code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
import Test.QuickCheck
myButLast :: [a] -> a
myButLast [x, y] = x
myButLast (x:y:xs) = myButLast (y:xs)
prop_myButLast :: [Int] -> Int -> Property
prop_myButLast xs y = (length xs > 1) ==>
myButLast (xs ++ [y]) == last xs
prop_myButLastRev :: [Int] -> Property
prop_myButLastRev xs = (length xs > 1) ==>
(myButLast xs) == (head $ reverse $ init xs)
main = do
quickCheck prop_myButLast
quickCheck prop_myButLastRev
| SandeepTuniki/99-Haskell-Problems | src/problem2.hs | bsd-3-clause | 475 | 0 | 10 | 130 | 196 | 101 | 95 | 13 | 1 |
{-# LANGUAGE TransformListComp #-}
module Numeric.LinearProgramming.Easy
( -- * The Simplex monad
Simplex
, runSimplex
, maximize
, minimize
-- ** Values & variables
, SimplexValue
, SimplexVar
, newVar
, newVars
-- ** Constraints
, (.<=)
, (.==)
, (.>=)
) where
import Control.Arrow
import Control.Monad.State
import Data.Function
import Data.List
import Text.Printf
import Numeric.LinearProgramming
type SimplexValue = SimplexExpr
-- | The simplex expression representation
data SimplexExpr
= Vars [ (Double, Int) ]
| Val Double
deriving Eq
instance Show SimplexExpr where
show (Val v) = show v
show (Vars vars) = intercalate " + " [ count ++ "v" ++ show i
| (d,i) <- vars
, let count = if d == 1 then "" else show d ++ " * "
]
instance Num SimplexExpr where
(Vars a) + (Vars b) = Vars (a++b)
a + b = impossibleExpression a "+" b
(Val a) * (Vars b) = Vars (map (first (*a)) b)
(Vars a) * (Val b) = Vars (map (first (*b)) a)
(Val a) * (Val b) = Val (a*b)
a * b = impossibleExpression a "*" b
negate (Val a) = Val (negate a)
negate (Vars a) = Vars (map (first negate) a)
abs (Val a) = Val (abs a)
abs (Vars a) = Vars (map (first abs) a)
signum (Val a) = Val (signum a)
signum a = impossibleExpression' "signum" a
fromInteger = Val . fromInteger
instance Fractional SimplexExpr where
fromRational a = Val (fromRational a)
(Vars a) / (Val b) = Vars (map (first (/b)) a)
(Val a) / (Val b) = Val (a/b)
a / b = nonlinearExpression a "/" b
-- Error messages
impossibleExpression :: (Show a, Show b) => a -> String -> b -> c
impossibleExpression' :: (Show b) => String -> b -> c
nonlinearExpression :: (Show a, Show b) => a -> String -> b -> c
impossibleExpression a e b = error $ printf "Impossible expression: %s %s %s" (show a) e (show b)
impossibleExpression' e b = error $ printf "Impossible expression: %s %s" e (show b)
nonlinearExpression a e b = error $ printf "Nonlinear expression: %s %s %s" (show a) e (show b)
type SimplexVar = SimplexExpr
newtype Simplex a = Simplex { unsimplex :: State Int a }
instance Monad Simplex where
return a = Simplex (return a)
(Simplex a) >>= b = Simplex (a >>= unsimplex . b)
runSimplex :: Simplex a -> a
runSimplex (Simplex s) = evalState s 0
type SimplexConstraints = Bound [(Double, Int)]
-- | Turn simplex expressions into a list with their value and id.
-- Assures that every variable occurs exactly once.
constraints :: SimplexExpr -> [(Double,Int)]
constraints (Val v) = impossibleExpression' "constraints" (Val v)
constraints (Vars c) =
[ (sum d,head i)
| (d,i) <- c
, then group by i
]
(.<=) :: SimplexExpr -> SimplexExpr -> SimplexConstraints
(Vars a) .<= (Val b) = a :<=: b
(Vars a) .<= (Vars b) = constraints (Vars a - Vars b) :<=: 0
(Val a) .<= (Vars b) = b :=>: a
(Val a) .<= (Val b) = error $ printf "Empty constraint: %f <= $f" a b
infix 4 .<=
(.>=) :: SimplexExpr -> SimplexExpr -> SimplexConstraints
(Vars a) .>= (Val b) = a :=>: b
(Vars a) .>= (Vars b) = constraints (Vars a - Vars b) :=>: 0
(Val a) .>= (Vars b) = b :<=: a
(Val a) .>= (Val b) = error $ printf "Empty constraint: %f >= $f" a b
infix 4 .>=
(.==) :: SimplexExpr -> SimplexExpr -> SimplexConstraints
(Vars a) .== (Val b) = a :==: b
(Vars a) .== (Vars b) = constraints (Vars a - Vars b) :==: 0
(Val a) .== (Vars b) = b :==: a
(Val a) .== (Val b) = error $ printf "Empty constraint: %f == $f" a b
infix 4 .==
-- | Create a new variable
newVar :: Simplex SimplexVar
newVar = Simplex $ do
i <- gets (+1)
put i
return $ Vars [(1,i)]
-- | Create a list of @n@ new variables.
newVars :: Int -> Simplex [SimplexVar]
newVars n = replicateM n newVar
minimize :: SimplexExpr -> [SimplexConstraints] -> Simplex Solution
minimize ex c = return $ simplex opt (Sparse c) []
where
opt :: Optimization
opt = Minimize $ map fst sorted
sorted = sortBy (compare `on` snd) (constraints ex)
maximize :: SimplexExpr -> [SimplexConstraints] -> Simplex Solution
maximize ex c = return $ simplex opt (Sparse c) []
where
opt :: Optimization
opt = Maximize $ map fst sorted
sorted = sortBy (compare `on` snd) (constraints ex)
| mcmaniac/simplex | src/Numeric/LinearProgramming/Easy.hs | bsd-3-clause | 4,501 | 1 | 14 | 1,249 | 1,826 | 937 | 889 | 104 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
module Context.Type.Error (
ErrUnboundTypeVariable(..)
, AsUnboundTypeVariable(..)
) where
import Control.Lens.Prism (Prism')
import Control.Lens.TH (makePrisms)
import Ast.Error
data ErrUnboundTypeVariable a = ErrUnboundTypeVariable a
deriving (Eq, Ord, Show)
makePrisms ''ErrUnboundTypeVariable
class AsUnboundTypeVariable e a where -- | e -> a where
_UnboundTypeVariable :: Prism' e a
instance AsUnboundTypeVariable (ErrUnboundTypeVariable a) a where
_UnboundTypeVariable = _ErrUnboundTypeVariable
instance {-# OVERLAPPABLE #-} AsUnboundTypeVariable (ErrSum xs) a => AsUnboundTypeVariable (ErrSum (x ': xs)) a where
_UnboundTypeVariable = _ErrNext . _UnboundTypeVariable
instance {-# OVERLAPPING #-} AsUnboundTypeVariable (ErrSum (ErrUnboundTypeVariable a ': xs)) a where
_UnboundTypeVariable = _ErrNow . _UnboundTypeVariable
| dalaing/type-systems | src/Context/Type/Error.hs | bsd-3-clause | 1,279 | 0 | 10 | 178 | 223 | 127 | 96 | 25 | 0 |
module Text.Highlighter.Lexers.RagelEmbedded (lexer) where
import qualified Text.Highlighter.Lexers.Ragel as Ragel
import qualified Text.Highlighter.Lexers.Ragel as Ragel
import Text.Regex.PCRE.Light
import Text.Highlighter.Types
lexer :: Lexer
lexer = Lexer
{ lName = "Embedded Ragel"
, lAliases = ["ragel-em"]
, lExtensions = [".rl"]
, lMimetypes = []
, lStart = root'
, lFlags = [multiline]
}
root' :: TokenMatcher
root' =
[ tok "([^%\\'\"/#]+|%(?=[^%]|$)|\"(\\\\\\\\|\\\\\"|[^\"])*\"|'(\\\\\\\\|\\\\'|[^'])*'|/\\*(.|\\n)*?\\*/|//.*$\\n?|\\#.*$\\n?|/(?!\\*)(\\\\\\\\|\\\\/|[^/])*/|/)+" (Arbitrary "Other")
, tok "(%%)(?![{%])(.*)($|;)(\\n?)" (ByGroups [(Arbitrary "Punctuation"), (Using Ragel.lexer), (Arbitrary "Punctuation"), (Arbitrary "Text")])
, tokNext "(%%%%|%%){" (Arbitrary "Punctuation") (GoTo multiLineFsm')
]
multiLineFsm' :: TokenMatcher
multiLineFsm' =
[ tok "(([^}\\'\"\\[/#]|}(?=[^%]|$)|}%(?=[^%]|$)|[^\\\\][\\\\][{}]|(>|\\$|%|<|@|<>)/|/(?!\\*)(\\\\\\\\|\\\\/|[^/])*/\\*|/(?=[^/\\*]|$))+|\"(\\\\\\\\|\\\\\"|[^\"])*\"|'(\\\\\\\\|\\\\'|[^'])*'|\\[(\\\\\\\\|\\\\\\]|[^\\]])*\\]|/\\*(.|\\n)*?\\*/|//.*$\\n?|\\#.*$\\n?)+" (Using Ragel.lexer)
, tokNext "}%%" (Arbitrary "Punctuation") Pop
]
| chemist/highlighter | src/Text/Highlighter/Lexers/RagelEmbedded.hs | bsd-3-clause | 1,264 | 0 | 12 | 157 | 245 | 146 | 99 | 22 | 1 |
{-|
Module : Main
Description : MRT Export Information Format parser
License : BSD3
Stability : Experimental
MRT is a library for parsing Multi-Threaded Routing Toolkit (MRT) export
files, of the kind you might find on the RouteViews archive.
-}
module Main where
import Data.Maybe (listToMaybe)
import qualified Codec.Compression.BZip as BZ
import Control.Monad (liftM)
import Data.Network.MRT
import qualified Data.ByteString.Lazy as BL
import System.Environment
loadStream :: FilePath -> IO BL.ByteString
loadStream = liftM BZ.decompress . BL.readFile
printRib :: IPRange -> RIBEntry -> IO ()
printRib ip rib = do
let path = listToMaybe $ filter isPath $ getBGPAttributes rib
putStrLn $ concat [show ip, "|", show path]
--let attrs = filter (getBGPAttributes rib
where
isPath (BGPAttribute _ x) = case x of {ASPath _ -> True; _ -> False}
printEntry :: MRTMessage -> IO ()
printEntry msg = case getRecord msg of
(TableDumpV2 _ ip ribs) -> mapM_ (printRib ip) ribs
_ -> return ()
main :: IO ()
main = do
args <- getArgs
mapM_ ((mapM_ printEntry =<<) . (liftM readMessages <$> loadStream)) args
| codebje/hask-mrt | test/Main.hs | bsd-3-clause | 1,205 | 0 | 12 | 284 | 322 | 169 | 153 | 22 | 2 |
{- |
Running a transducer with some input
-}
module FST.RunTransducer (
-- * Run functions
applyUp, applyDown
) where
import FST.Transducer
import Data.Maybe (catMaybes)
-- | A transition betwee states in a transducer
type TransitionFunction a = (Transducer a -> (StateTy,Symbol a) ->
[(Symbol a,StateTy)])
-- | Apply a transducer upwards
applyUp :: Eq a => Transducer a -> [a] -> Maybe [[a]]
applyUp transducer input
= apply transducer transitionsD input (initial transducer) []
-- | Apply a transducer downwards
applyDown :: Eq a => Transducer a -> [a] -> Maybe [[a]]
applyDown transducer input
= apply transducer transitionsU input (initial transducer) []
-- | Generic function for applying a transducer
apply :: Eq a => Transducer a -> TransitionFunction a -> [a] -> StateTy ->
[Symbol a] -> Maybe [[a]]
apply transducer transFun input s result =
case (runEpsilon transducer transFun input s result,
runSymbol transducer transFun input s result) of
(Just xs, Just ys) -> Just (xs ++ ys)
(a, Nothing) -> a
(Nothing, b ) -> b
runEpsilon :: Eq a => Transducer a -> TransitionFunction a -> [a] -> StateTy ->
[Symbol a] -> Maybe [[a]]
runEpsilon transducer transFun input s result =
case transFun transducer (s, Eps) of
[] -> Nothing
tl -> case concat $ catMaybes $
map (\(a,s1) -> apply transducer transFun input s1 (a:result)) tl of
[] -> Nothing
xs -> Just xs
runSymbol :: Eq a => Transducer a -> TransitionFunction a -> [a] -> StateTy ->
[Symbol a] -> Maybe [[a]]
runSymbol transducer _ [] s result
| isFinal transducer s = Just [transform result]
| otherwise = Nothing
runSymbol transducer transFun (i:input) s result =
case (transFun transducer (s,S i)) of
[] -> Nothing
tl -> case concat $ catMaybes $
map (\(a,s1) -> apply transducer transFun input s1 (a:result)) tl of
[] -> Nothing
xs -> Just xs
transform :: [Symbol a] -> [a]
transform ys = reverse [ a | S a <- ys ]
| johnjcamilleri/fst | FST/RunTransducer.hs | bsd-3-clause | 2,096 | 0 | 15 | 548 | 798 | 412 | 386 | 43 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
#endif
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Text.RE.PCRE.RE
( re
, reMS
, reMI
, reBS
, reBI
, reMultilineSensitive
, reMultilineInsensitive
, reBlockSensitive
, reBlockInsensitive
, re_
, cp
, regexType
, RE
, reOptions
, reSource
, reCaptureNames
, reRegex
, Options
, prelude
, preludeEnv
, preludeTestsFailing
, preludeTable
, preludeSummary
, preludeSources
, preludeSource
, noPreludeOptions
, defaultOptions
, unpackSimpleRegexOptions
, compileRegex
, escape
, escapeREString
) where
import Data.Bits
import Data.Functor.Identity
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Prelude.Compat
import Text.RE
import Text.RE.Internal.EscapeREString
import Text.RE.Internal.NamedCaptures
import Text.RE.Internal.PreludeMacros
import Text.RE.Internal.QQ
import Text.RE.TestBench
import Text.Regex.PCRE
re
, reMS
, reMI
, reBS
, reBI
, reMultilineSensitive
, reMultilineInsensitive
, reBlockSensitive
, reBlockInsensitive
, re_ :: QuasiQuoter
re = re' $ Just minBound
reMS = reMultilineSensitive
reMI = reMultilineInsensitive
reBS = reBlockSensitive
reBI = reBlockInsensitive
reMultilineSensitive = re' $ Just MultilineSensitive
reMultilineInsensitive = re' $ Just MultilineInsensitive
reBlockSensitive = re' $ Just BlockSensitive
reBlockInsensitive = re' $ Just BlockInsensitive
re_ = re' Nothing
regexType :: RegexType
regexType = PCRE
data RE =
RE
{ _re_options :: !Options
, _re_source :: !String
, _re_cnames :: !CaptureNames
, _re_regex :: !Regex
}
reOptions :: RE -> Options
reOptions = _re_options
reSource :: RE -> String
reSource = _re_source
reCaptureNames :: RE -> CaptureNames
reCaptureNames = _re_cnames
reRegex :: RE -> Regex
reRegex = _re_regex
type Options = Options_ RE CompOption ExecOption
instance IsOption SimpleRegexOptions RE CompOption ExecOption where
makeOptions = unpackSimpleRegexOptions
instance IsOption (Macros RE) RE CompOption ExecOption where
makeOptions ms = Options ms def_comp_option def_exec_option
instance IsOption CompOption RE CompOption ExecOption where
makeOptions co = Options prelude co def_exec_option
instance IsOption ExecOption RE CompOption ExecOption where
makeOptions eo = Options prelude def_comp_option eo
instance IsOption Options RE CompOption ExecOption where
makeOptions = id
instance IsOption () RE CompOption ExecOption where
makeOptions _ = unpackSimpleRegexOptions minBound
def_comp_option :: CompOption
def_comp_option = optionsComp defaultOptions
def_exec_option :: ExecOption
def_exec_option = optionsExec defaultOptions
noPreludeOptions :: Options
noPreludeOptions = defaultOptions { optionsMacs = emptyMacros }
defaultOptions :: Options
defaultOptions = makeOptions (minBound::SimpleRegexOptions)
unpackSimpleRegexOptions :: SimpleRegexOptions -> Options
unpackSimpleRegexOptions sro =
Options
{ optionsMacs = prelude
, optionsComp = comp
, optionsExec = defaultExecOpt
}
where
comp =
wiggle ml compMultiline $
wiggle ci compCaseless
defaultCompOpt
wiggle True m v = v .|. m
wiggle False m v = v .&. complement m
(ml,ci) = case sro of
MultilineSensitive -> (,) True False
MultilineInsensitive -> (,) True True
BlockSensitive -> (,) False False
BlockInsensitive -> (,) False True
compileRegex :: ( IsOption o RE CompOption ExecOption
, Functor m
, Monad m
)
=> o
-> String
-> m RE
compileRegex = compileRegex_ . makeOptions
compileRegex_ :: ( Functor m , Monad m )
=> Options
-> String
-> m RE
compileRegex_ os re_s = uncurry mk <$> compileRegex' os re_s
where
mk cnms rex =
RE
{ _re_options = os
, _re_source = re_s
, _re_cnames = cnms
, _re_regex = rex
}
re' :: Maybe SimpleRegexOptions -> QuasiQuoter
re' mb = case mb of
Nothing ->
(qq0 "re_")
{ quoteExp = parse minBound (\rs->[|flip unsafeCompileRegex rs|])
}
Just sro ->
(qq0 "re")
{ quoteExp = parse sro (\rs->[|unsafeCompileRegexSimple sro rs|])
}
where
parse :: SimpleRegexOptions -> (String->Q Exp) -> String -> Q Exp
parse sro mk rs = either error (\_->mk rs) $ compileRegex_ os rs
where
os = unpackSimpleRegexOptions sro
unsafeCompileRegexSimple :: SimpleRegexOptions -> String -> RE
unsafeCompileRegexSimple sro re_s = unsafeCompileRegex os re_s
where
os = unpackSimpleRegexOptions sro
unsafeCompileRegex :: IsOption o RE CompOption ExecOption
=> o
-> String
-> RE
unsafeCompileRegex = unsafeCompileRegex_ . makeOptions
unsafeCompileRegex_ :: Options -> String -> RE
unsafeCompileRegex_ os = either oops id . compileRegex os
where
oops = error . ("unsafeCompileRegex: " ++)
compileRegex' :: (Functor m,Monad m)
=> Options
-> String
-> m (CaptureNames,Regex)
compileRegex' Options{..} s0 = do
(cnms,s2) <- either fail return $ extractNamedCaptures s1
(,) cnms <$> makeRegexOptsM optionsComp optionsExec s2
where
s1 = expandMacros reSource optionsMacs s0
prelude :: Macros RE
prelude = runIdentity $ preludeMacros mk PCRE ExclCaptures
where
mk = Identity . unsafeCompileRegex_ noPreludeOptions
preludeTestsFailing :: [MacroID]
preludeTestsFailing = badMacros preludeEnv
preludeEnv :: MacroEnv
preludeEnv = preludeMacroEnv PCRE
preludeTable :: String
preludeTable = preludeMacroTable PCRE
preludeSummary :: PreludeMacro -> String
preludeSummary = preludeMacroSummary PCRE
preludeSources :: String
preludeSources = preludeMacroSources PCRE
preludeSource :: PreludeMacro -> String
preludeSource = preludeMacroSource PCRE
escape :: (String->String) -> String -> RE
escape f = unsafeCompileRegex () . f . escapeREString
| cdornan/idiot | Text/RE/PCRE/RE.hs | bsd-3-clause | 6,807 | 0 | 12 | 1,834 | 1,527 | 842 | 685 | 201 | 5 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Bead.Persistence.SQL.Entities where
import Control.Monad.Logger
import Control.Monad.Trans.Resource
import Data.ByteString.Char8 (ByteString)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Time hiding (TimeZone)
import Database.Persist.Sql
import Database.Persist.TH
import qualified Bead.Domain.Entities as Domain
#ifdef TEST
import Database.Persist.Sqlite
#endif
-- String represents a JSON value
type JSONText = String
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Assessment
description Text
evalConfig JSONText
deriving Show
Assignment
name Text
description Text
type JSONText
start UTCTime
end UTCTime
created UTCTime
evalConfig JSONText
deriving Show
Comment
text Text
author Text
date UTCTime
type JSONText
deriving Show
Course
name Text
description Text
testScriptType JSONText
deriving Show
Evaluation
result JSONText
written Text
deriving Show
Feedback
info JSONText
date UTCTime
deriving Show
Group
name Text
description Text
deriving Show
Notification
message Text
deriving Show
Score
score JSONText
deriving Show
Submission
simple Text Maybe
zipped ByteString Maybe sqltype=longblob
postDate UTCTime
deriving Show
TestCase
name Text
description Text
simpleValue Text Maybe
zippedValue ByteString Maybe sqltype=longblob
info Text
deriving Show
TestScript
name Text
description Text
notes Text
script Text
testScriptType JSONText
deriving Show
User
role JSONText
username Text
email Text
name Text
timeZone JSONText
language Text
uid Text
UniqueUsername username
deriving Show
UserRegistration
username Text
email Text
name Text
token Text
timeout UTCTime
deriving Show
-- Connections between objects
-- Submission -> [Feedback]
-- Feedback -> Submission
FeedbacksOfSubmission
submission SubmissionId
feedback FeedbackId
UniqueSubmissionFeedbackPair submission feedback
UniqueSubmisisonFeedback feedback
deriving Show
-- Assignment -> [Submission]
SubmissionsOfAssignment
assignment AssignmentId
submission SubmissionId
UniqueSubmissionsOfAssignmentPair assignment submission
deriving Show
-- Assignment -> TestCase
-- Only one assignment is allowed for the test case
TestCaseOfAssignment
assignment AssignmentId
testCase TestCaseId
UniqueTestCaseToAssignment assignment testCase
UniqueAssignmentOfTestCase assignment
deriving Show
-- Course -> [User]
AdminsOfCourse
course CourseId
admin UserId
UniqueAdminsOfCourse course admin
deriving Show
-- Course -> [Assignment]
AssignmentsOfCourse
course CourseId
assignment AssignmentId
UniqueAssignmentsOfCoursePair course assignment
deriving Show
-- Course -> [Assesment]
AssessmentsOfCourse
course CourseId
assessment AssessmentId
UniqueAssessmentsOfCoursePair course assessment
deriving Show
-- Group -> [Assesment]
AssessmentsOfGroup
group GroupId
assessment AssessmentId
UniqueAssessmentsOfGroupPair group assessment
deriving Show
-- Score -> (Username, Assessment)
-- (Username, Assessment) -> [Score]
ScoresOfUsernameAssessment
score ScoreId
user UserId
assessment AssessmentId
UniqueScoresOfUsernameAssessment score user assessment
UniqueScoreOfUsernameAssessment score
-- Course -> [Group]
-- Group -> Course
GroupsOfCourse
course CourseId
group GroupId
UniqueGroupCoursePair course group
UniqueGroupCourseGroup group
deriving Show
-- Course -> [TestScript]
TestScriptsOfCourse
course CourseId
testScript TestScriptId
UniqueTestScriptsOfCourse course testScript
deriving Show
-- Course -> [User]
UnsubscribedUsersFromCourse
course CourseId
user UserId
UniqueUnsubscribedUsersFromCourse course user
deriving Show
-- Course -> [User]
UsersOfCourse
course CourseId
user UserId
UniqueUsersOfCoursePair course user
deriving Show
-- Group -> [User]
AdminsOfGroup
group GroupId
admin UserId
UniqueAdminsOfGroupPair group admin
deriving Show
-- Group -> [Assignment]
AssignmentsOfGroup
group GroupId
assignment AssignmentId
UniqueAssignmentsOfGroupPair group assignment
deriving Show
-- Group -> [User]
UsersOfGroup
group GroupId
user UserId
UniqueUsersOfGroupPair group user
deriving Show
-- Group -> [User]
UnsubscribedUsersFromGroup
group GroupId
user UserId
UniqueUnsubscribedUsersFromGroup group user
deriving Show
-- Submission -> [Comment]
-- Comment -> Submission
CommentsOfSubmission
submission SubmissionId
comment CommentId
UniqueCommentsOfSubmissionPair submission comment
UniqueCommentsOfSubmissionComment comment
deriving Show
-- Submission -> User
UserOfSubmission
submission SubmissionId
user UserId
UniqueUserOfSubmission user submission
deriving Show
-- Assignment -> User -> [Submission]
UserSubmissionOfAssignment
submission SubmissionId
assignment AssignmentId
user UserId
UniqueUserSubmissionOfAssignmentTriplet submission assignment user
deriving Show
-- Assignment -> User -> [Submission]
OpenedSubmission
submission SubmissionId
assignment AssignmentId
user UserId
UniqueOpenedSubmissionTriplet submission assignment user
deriving Show
-- TestCase -> TestScript
TestScriptOfTestCase
testCase TestCaseId
testScript TestScriptId
UniqueTestScriptOfTestCase testCase testScript
UniqueTestScriptOfTestCaseTestCase testCase
deriving Show
-- Evaluation -> Submission
SubmissionOfEvaluation
submission SubmissionId
evaluation EvaluationId
UniqueSubmissionOfEvaluationPair submission evaluation
UniqueSubmissionOfEvaluation evaluation
deriving Show
-- Evaluation -> Score
ScoreOfEvaluation
score ScoreId
evaluation EvaluationId
UniqueScoreOfEvaluationPair score evaluation
UniqueScoreOfEvaluation evaluation
CommentNotification
comment CommentId
notification NotificationId
UniqueCommentNotification comment notification
FeedbackNotification
feedback FeedbackId
notification NotificationId
UniqueFeedbackNotification feedback notification
UserNotification
user UserId
notification NotificationId
UniqueUserNotification user notification
|]
-- * Persist
type Persist = SqlPersistT (NoLoggingT (ResourceT IO))
-- * Helpers
entity f (Entity key value) = f key value
withEntity e f = entity f e
-- Forgets the result of a given computation
void :: Monad m => m a -> m ()
void = (>> return ())
-- Throws an error indicating this module as the source of the error
persistError function msg = error (concat ["Bead.Persistent.SQL.", function, ": ", msg])
getByUsername username =
fmap (fromMaybe (persistError "getByUsername" $ "User is not found" ++ show username))
(getBy (Domain.usernameCata (UniqueUsername . Text.pack) username))
-- Selects a user from the database with the given user, if the
-- user is not found runs the nothing computation, otherwise
-- the just computation with the user as a parameter.
withUser username nothing just =
getBy (UniqueUsername $ Domain.usernameCata Text.pack username) >>= maybe nothing just
userKey username =
fmap (fmap entityKey) $ getBy (UniqueUsername $ Domain.usernameCata Text.pack username)
#ifdef TEST
-- * Test helpers
runSql :: Persist a -> IO a
runSql = runSqlite ":memory:"
initDB :: Persist ()
initDB = void $ runMigrationSilent migrateAll
#endif
| pgj/bead | src/Bead/Persistence/SQL/Entities.hs | bsd-3-clause | 8,017 | 0 | 12 | 1,561 | 461 | 259 | 202 | 37 | 1 |
{-# LANGUAGE GADTs, KindSignatures,
TypeOperators, TemplateHaskell, QuasiQuotes #-}
module Language.PiEtaEpsilon.BNFMeta.Term where
--from bnfc-meta
import Language.LBNF(lbnf, dumpCode, bnfc)
import Language.LBNF.Compiletime
import qualified Language.LBNF.Grammar
bnfc [lbnf|
-- This is a new pragma. The rest of the grammar is original JL.
antiquote "[" ":" ":]" ;
--Iso
BIdentityS. BaseIso0 ::= "<=+=>" ;
BIdentityP. BaseIso1 ::= "<=*=>" ;
BCommutativeS. BaseIso2 ::= "x+x" ;
BCommutativeP. BaseIso3 ::= "x*x" ;
BAssociativeS. BaseIso4 ::= "|+|+|" ;
BAssociativeP. BaseIso5 ::= "|*|*|" ;
BSplitS. BaseIso6 ::= "-+<" ;
BSplitP. BaseIso7 ::= "-*<" ;
BDistributiveZero. BaseIso8 ::= "^0^" ;
BDistributivePlus. BaseIso9 ::= "^+^" ;
--
IEliminate. Iso0 ::= "#" BaseIso ;
IIntroduce. Iso1 ::= "'" BaseIso ;
--
--Terms
TCompose. Term0 ::= Term1 ";" Term1 ;
TPlus. Term1 ::= Term1 "+" Term2 ;
TTimes. Term2 ::= Term2 "*" Term3 ;
TBase. Term3 ::= "<" Iso ;
TId. Term4 ::= "<=>" ;
_. Term ::= Term0 ;
_. Term0 ::= Term1 ;
_. Term1 ::= Term2 ;
_. Term2 ::= Term3 ;
_. Term3 ::= Term4 ;
_. Term4 ::= "(" Term ")" ;
_. Iso ::= Iso0 ;
_. Iso0 ::= Iso1 ;
_. Iso1 ::= "(" Iso ")" ;
_. BaseIso ::= BaseIso0 ;
_. BaseIso0 ::= BaseIso1 ;
_. BaseIso1 ::= BaseIso2 ;
_. BaseIso2 ::= BaseIso3 ;
_. BaseIso3 ::= BaseIso4 ;
_. BaseIso4 ::= BaseIso5 ;
_. BaseIso5 ::= BaseIso6 ;
_. BaseIso6 ::= BaseIso7 ;
_. BaseIso7 ::= BaseIso8 ;
_. BaseIso8 ::= BaseIso9 ;
_. BaseIso9 ::= "(" BaseIso ")" ;
TDouble. Typ ::= "double" ;
-- pragmas
comment "/*" "*/" ;
comment "//" ;
entrypoints Term, Iso, BaseIso ;
|]
| dmwit/pi-eta-epsilon | src/Language/PiEtaEpsilon/BNFMeta/Term.hs | bsd-3-clause | 2,294 | 0 | 5 | 980 | 49 | 33 | 16 | 7 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-|
Module : Main
Description : Token Server
Copyright : (c) Traderwave Ltd, 2016
License : All Rights Reserved
Maintainer : [email protected]
Stability : experimental
Portability : POSIX, WINDOWS
Token server can be used to temporarily store JSON data. It works by allowing
user to store arbitrary JSON data and returning a token for it. User can specify
the expiry period, or a default period of 24hrs is assumed.
After expiry the token, and hence the data expires.
-}
module Main where
import qualified Database.Redis as R
import Web.Scotty.Trans
import TConfig
import Token
import Options.Applicative as O
import Control.Lens
import Network.Wai.Middleware.RequestLogger
import Network.Wai.Middleware.Cors
import qualified Data.Text.Lazy as LT
import Data.Configurator
import Control.Monad.Trans
import Network.HTTP.Types.Status
data TOptions = TOptions { _port :: Int, _config :: String } deriving Show
makeLenses ''TOptions
optionParser :: Parser TOptions
optionParser =
TOptions
<$> option auto
( long "port" <> short 'p' <> metavar "PORT" <> help "port to start the service" )
<*> strOption
( long "config" <> short 'c' <> metavar "CONFIG" <> help "config file for twergo")
tokenOptions = execParser optParser
where optParser = O.info (helper <*> optionParser)
( fullDesc
<> progDesc "Start token service on PORT"
<> O.header "Token service for Traderwave" )
-- |
tokenService :: R.Connection -> ScottyT LT.Text IO ()
tokenService conn = do
middleware logStdout
middleware simpleCors
let retrieve = "/retrieve"
options retrieve $ status status200
get retrieve $ do
token <- jsonData
result <- liftIO $ retrieveData conn token
case result of
Nothing -> do
status notFound404
json $ UserError "Invalid token or expired token"
Just r -> json $ UserData r
let register = "/register"
options register $ status status200
post register $ do
(RegistryData e (UserData v)) <- jsonData
token <- liftIO $ registerData conn v (toInteger e)
json token
main :: IO ()
main = do
opts <- tokenOptions
putStrLn $ "Loading configuration " ++ (opts ^. config)
cfg <- load [ Required $ opts ^. config ]
cinfo <- getConnectInfo cfg
conn <- R.connect cinfo
scottyT (opts ^. port) id $ tokenService conn
| yogeshsajanikar/token-service | app/Main.hs | bsd-3-clause | 2,520 | 0 | 16 | 609 | 574 | 284 | 290 | 57 | 2 |
{-|
Module : Idris.IdrisDoc
Description : Generation of HTML documentation for Idris code
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE OverloadedStrings, PatternGuards #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.IdrisDoc (generateDocs) where
import Idris.AbsSyntax
import Idris.Core.Evaluate (Accessibility(..), ctxtAlist, isDConName, isFnName,
isTConName, lookupDefAcc)
import Idris.Core.TT (Name(..), OutputAnnotation(..), TextFormatting(..),
constIsType, nsroot, sUN, str, toAlist, txt)
import Idris.Docs
import Idris.Docstrings (nullDocstring)
import qualified Idris.Docstrings as Docstrings
import Idris.Options
import Idris.Parser.Helpers (opChars)
import IRTS.System (getIdrisDataFileByName)
import Control.Applicative ((<|>))
import Control.Monad (forM_)
import Control.Monad.Trans.Except
import Control.Monad.Trans.State.Strict
import qualified Data.ByteString.Lazy as BS2
import qualified Data.List as L
import qualified Data.Map as M hiding ((!))
import Data.Maybe
import Data.Monoid (mempty)
import qualified Data.Set as S
import qualified Data.Text as T
import System.Directory
import System.FilePath
import System.IO
import System.IO.Error
import Text.Blaze (contents, toValue)
import qualified Text.Blaze.Html.Renderer.String as R
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
import Text.Blaze.Html5 (preEscapedToHtml, toHtml, (!))
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
import Text.Blaze.Renderer.String (renderMarkup)
import Text.PrettyPrint.Annotated.Leijen (displayDecorated, renderCompact)
-- ---------------------------------------------------------------- [ Public ]
-- | Generates HTML documentation for a series of loaded namespaces
-- and their dependencies.
generateDocs :: IState -- ^ IState where all necessary information is
-- extracted from.
-> [Name] -- ^ List of namespaces to generate
-- documentation for.
-> FilePath -- ^ The directory to which documentation will
-- be written.
-> IO (Either String ())
generateDocs ist nss' out =
do let nss = map toNsName nss'
docs <- fetchInfo ist nss
let (c, io) = foldl (checker docs) (0, return ()) nss
io
if c < length nss
then catchIOError (createDocs ist docs out) (err . show)
else err "No namespaces to generate documentation for"
where checker docs st ns | M.member ns docs = st
checker docs (c, io) ns = (c+1, do prev <- io; warnMissing ns)
warnMissing ns =
putStrLn $ "Warning: Ignoring empty or non-existing namespace '" ++
(nsName2Str ns) ++ "'"
-- ----------------------------------------------------------------- [ Types ]
-- | Either an error message or a result
type Failable = Either String
-- | Internal representation of a fully qualified namespace name
type NsName = [T.Text]
-- | All information to be documented about a single namespace member
type NsItem = (Name, Maybe Docs, Accessibility)
-- | Docstrings containing fully elaborated term annotations
type FullDocstring = Docstrings.Docstring Docstrings.DocTerm
-- | All information to be documented about a namespace
data NsInfo = NsInfo { nsDocstring :: Maybe FullDocstring,
nsContents :: [NsItem]
}
-- | A map from namespace names to information about them
type NsDict = M.Map NsName NsInfo
-- --------------------------------------------------------------- [ Utility ]
-- | Make an error message
err :: String -> IO (Failable ())
err s = return $ Left s
-- | IdrisDoc version
version :: String
version = "1.0"
-- | Converts a Name into a [Text] corresponding to the namespace
-- part of a NS Name.
toNsName :: Name -- ^ Name to convert
-> NsName
toNsName (UN n) = [n]
toNsName (NS n ns) = (toNsName n) ++ ns
toNsName _ = []
-- | Retrieves the namespace part of a Name
getNs :: Name -- ^ Name to retrieve namespace for
-> NsName
getNs (NS _ ns) = ns
getNs _ = []
-- | String to replace for the root namespace
rootNsStr :: String
rootNsStr = "[builtins]"
-- | Converts a NsName to string form
nsName2Str :: NsName -- ^ NsName to convert
-> String
nsName2Str n = if null n then rootNsStr else name n
where name [] = []
name [ns] = str ns
name (ns:nss) = (name nss) ++ ('.' : str ns)
-- --------------------------------------------------------- [ Info Fetching ]
-- | Fetch info about namespaces and their contents
fetchInfo :: IState -- ^ IState to fetch info from
-> [NsName] -- ^ List of namespaces to fetch info for
-> IO NsDict -- ^ Mapping from namespace name to
-- info about its contents
fetchInfo ist nss =
do let originNss = S.fromList nss
info <- nsDict ist
let accessible = M.map (filterContents filterInclude) info
nonOrphan = M.map (updateContents removeOrphans) accessible
nonEmpty = M.filter (not . null . nsContents) nonOrphan
reachedNss = traceNss nonEmpty originNss S.empty
return $ M.filterWithKey (\k _ -> S.member k reachedNss) nonEmpty
where
-- TODO: lensify
filterContents p (NsInfo md ns) = NsInfo md (filter p ns)
updateContents f x = x { nsContents = f (nsContents x) }
-- | Removes loose interface methods and data constructors,
-- leaving them documented only under their parent.
removeOrphans :: [NsItem] -- ^ List to remove orphans from
-> [NsItem] -- ^ Orphan-free list
removeOrphans list =
let children = S.fromList $ concatMap (names . (\(_, d, _) -> d)) list
in filter ((flip S.notMember children) . (\(n, _, _) -> n)) list
where names (Just (DataDoc _ fds)) = map (\(FD n _ _ _ _) -> n) fds
names (Just (InterfaceDoc _ _ fds _ _ _ _ _ c)) = map (\(FD n _ _ _ _) -> n) fds ++ map (\(FD n _ _ _ _) -> n) (maybeToList c)
names _ = []
-- | Whether a Name names something which should be documented
filterName :: Name -- ^ Name to check
-> Bool -- ^ Predicate result
filterName (UN _) = True
filterName (NS n _) = filterName n
filterName _ = False
-- | Whether a NsItem should be included in the documentation.
-- It must not be Hidden/Private and filterName must return True for the name.
-- Also it must have Docs -- without Docs, nothing can be done.
filterInclude :: NsItem -- ^ Accessibility to check
-> Bool -- ^ Predicate result
filterInclude (name, Just _, Public) | filterName name = True
filterInclude (name, Just _, Frozen) | filterName name = True
filterInclude _ = False
-- | Finds all namespaces indirectly referred by a set of namespaces.
-- The NsItems of the namespaces are searched for references.
traceNss :: NsDict -- ^ Mappings of namespaces and their contents
-> S.Set NsName -- ^ Set of namespaces to trace
-> S.Set NsName -- ^ Set of namespaces which has been traced
-> S.Set NsName -- ^ Set of namespaces to trace and all traced one
traceNss nsd sT sD =
let nsTracer ns | Just nsis <- M.lookup ns nsd = map referredNss (nsContents nsis)
nsTracer _ = [S.empty] -- Ignore
reached = S.unions $ concatMap nsTracer (S.toList sT)
processed = S.union sT sD
untraced = S.difference reached processed
in if S.null untraced then processed
else traceNss nsd untraced processed
-- | Gets all namespaces directly referred by a NsItem
referredNss :: NsItem -- ^ The name to get all directly
-- referred namespaces for
-> S.Set NsName
referredNss (_, Nothing, _) = S.empty
referredNss (n, Just d, _) =
let fds = getFunDocs d
ts = concatMap types fds
names = concatMap (extractPTermNames) ts
in S.map getNs $ S.fromList names
where getFunDocs (FunDoc f) = [f]
getFunDocs (DataDoc f fs) = f:fs
getFunDocs (InterfaceDoc _ _ fs _ _ _ _ _ _) = fs
getFunDocs (RecordDoc _ _ f fs _) = f:fs
getFunDocs (NamedImplementationDoc _ fd) = [fd]
getFunDocs (ModDoc _ _) = []
types (FD _ _ args t _) = t:(map second args)
second (_, x, _, _) = x
-- | Returns an NsDict of containing all known namespaces and their contents
nsDict :: IState
-> IO NsDict
nsDict ist = flip (foldl addModDoc) modDocs $ foldl adder (return M.empty) nameDefList
where nameDefList = ctxtAlist $ tt_ctxt ist
adder m (n, _) = do map <- m
doc <- loadDocs ist n
let access = getAccess ist n
nInfo = NsInfo Nothing [(n, doc, access)]
return $ M.insertWith addNameInfo (getNs n) nInfo map
addNameInfo (NsInfo m ns) (NsInfo m' ns') = NsInfo (m <|> m') (ns ++ ns')
modDocs = map (\(mn, d) -> (mn, NsInfo (Just d) [])) $ toAlist (idris_moduledocs ist)
addModDoc :: IO NsDict -> (Name, NsInfo) -> IO NsDict
addModDoc dict (mn, d) = fmap (M.insertWith addNameInfo (getNs mn) d) dict
-- | Gets the Accessibility for a Name
getAccess :: IState -- ^ IState containing accessibility information
-> Name -- ^ The Name to retrieve access for
-> Accessibility
getAccess ist n =
let res = lookupDefAcc n False (tt_ctxt ist)
in case res of
[(_, acc)] -> acc
_ -> Private
-- | Predicate saying whether a Name possibly may have docs defined
-- Without this, getDocs from Idris.Docs may fail a pattern match.
mayHaveDocs :: Name -- ^ The Name to test
-> Bool -- ^ The result
mayHaveDocs (UN _) = True
mayHaveDocs (NS n _) = mayHaveDocs n
mayHaveDocs _ = False
-- | Retrieves the Docs for a Name
loadDocs :: IState -- ^ IState to extract infomation from
-> Name -- ^ Name to load Docs for
-> IO (Maybe Docs)
loadDocs ist n
| mayHaveDocs n = do docs <- runExceptT $ evalStateT (getDocs n FullDocs) ist
case docs of Right d -> return (Just d)
Left _ -> return Nothing
| otherwise = return Nothing
-- | Extracts names referred from a type.
-- The covering of all PTerms ensures that we avoid unanticipated cases,
-- though all of them are not needed. The author just did not know which!
-- TODO: Remove unnecessary cases
extractPTermNames :: PTerm -- ^ Where to extract names from
-> [Name] -- ^ Extracted names
extractPTermNames (PRef _ _ n) = [n]
extractPTermNames (PInferRef _ _ n) = [n]
extractPTermNames (PPatvar _ n) = [n]
extractPTermNames (PLam _ n _ p1 p2) = n : concatMap extract [p1, p2]
extractPTermNames (PPi _ n _ p1 p2) = n : concatMap extract [p1, p2]
extractPTermNames (PLet _ _ n _ p1 p2 p3) = n : concatMap extract [p1, p2, p3]
extractPTermNames (PTyped p1 p2) = concatMap extract [p1, p2]
extractPTermNames (PApp _ p pas) = let names = concatMap extractPArg pas
in (extract p) ++ names
extractPTermNames (PAppBind _ p pas) = let names = concatMap extractPArg pas
in (extract p) ++ names
extractPTermNames (PMatchApp _ n) = [n]
extractPTermNames (PCase _ p ps) = let (ps1, ps2) = unzip ps
in concatMap extract (p:(ps1 ++ ps2))
extractPTermNames (PIfThenElse _ c t f) = concatMap extract [c, t, f]
extractPTermNames (PRewrite _ _ a b m) | Just c <- m =
concatMap extract [a, b, c]
extractPTermNames (PRewrite _ _ a b _) = concatMap extract [a, b]
extractPTermNames (PPair _ _ _ p1 p2) = concatMap extract [p1, p2]
extractPTermNames (PDPair _ _ _ a b c) = concatMap extract [a, b, c]
extractPTermNames (PAlternative _ _ l) = concatMap extract l
extractPTermNames (PHidden p) = extract p
extractPTermNames (PGoal _ p1 n p2) = n : concatMap extract [p1, p2]
extractPTermNames (PDoBlock pdos) = concatMap extractPDo pdos
extractPTermNames (PIdiom _ p) = extract p
extractPTermNames (PMetavar _ n) = [n]
extractPTermNames (PProof tacts) = concatMap extractPTactic tacts
extractPTermNames (PTactics tacts) = concatMap extractPTactic tacts
extractPTermNames (PCoerced p) = extract p
extractPTermNames (PDisamb _ p) = extract p
extractPTermNames (PUnifyLog p) = extract p
extractPTermNames (PNoImplicits p) = extract p
extractPTermNames (PRunElab _ p _) = extract p
extractPTermNames (PConstSugar _ tm) = extract tm
extractPTermNames _ = []
-- | Shorter name for extractPTermNames
extract :: PTerm -- ^ Where to extract names from
-> [Name] -- ^ Extracted names
extract = extractPTermNames
-- | Helper function for extractPTermNames
extractPArg :: PArg -> [Name]
extractPArg (PImp {pname=n, getTm=p}) = n : extract p
extractPArg (PExp {getTm=p}) = extract p
extractPArg (PConstraint {getTm=p}) = extract p
extractPArg (PTacImplicit {pname=n, getScript=p1, getTm=p2})
= n : (concatMap extract [p1, p2])
-- | Helper function for extractPTermNames
extractPDo :: PDo -> [Name]
extractPDo (DoExp _ p) = extract p
extractPDo (DoBind _ n _ p) = n : extract p
extractPDo (DoBindP _ p1 p2 ps) = let (ps1, ps2) = unzip ps
ps' = ps1 ++ ps2
in concatMap extract (p1 : p2 : ps')
extractPDo (DoLet _ _ n _ p1 p2) = n : concatMap extract [p1, p2]
extractPDo (DoLetP _ p1 p2) = concatMap extract [p1, p2]
extractPDo (DoRewrite _ p) = extract p
-- | Helper function for extractPTermNames
extractPTactic :: PTactic -> [Name]
extractPTactic (Intro ns) = ns
extractPTactic (Focus n) = [n]
extractPTactic (Refine n _) = [n]
extractPTactic (Rewrite p) = extract p
extractPTactic (Induction p) = extract p
extractPTactic (CaseTac p) = extract p
extractPTactic (Equiv p) = extract p
extractPTactic (MatchRefine n) = [n]
extractPTactic (LetTac n p) = n : extract p
extractPTactic (LetTacTy n p1 p2) = n : concatMap extract [p1, p2]
extractPTactic (Exact p) = extract p
extractPTactic (ProofSearch _ _ _ m _ ns) | Just n <- m = n : ns
extractPTactic (ProofSearch _ _ _ _ _ ns) = ns
extractPTactic (Try t1 t2) = concatMap extractPTactic [t1, t2]
extractPTactic (TSeq t1 t2) = concatMap extractPTactic [t1, t2]
extractPTactic (ApplyTactic p) = extract p
extractPTactic (ByReflection p) = extract p
extractPTactic (Reflect p) = extract p
extractPTactic (Fill p) = extract p
extractPTactic (GoalType _ t) = extractPTactic t
extractPTactic (TCheck p) = extract p
extractPTactic (TEval p) = extract p
extractPTactic _ = []
-- ------------------------------------------------------- [ HTML Generation ]
-- | Generates the actual HTML output based on info from a NsDict
-- A merge of the new docs and any existing docs located in the output dir
-- is attempted.
-- TODO: Ensure the merge always succeeds.
-- Currently the content of 'docs/<builtins>.html' may change between
-- runs, thus not always containing all items referred from other
-- namespace .html files.
createDocs :: IState -- ^ Needed to determine the types of names
-> NsDict -- ^ All info from which to generate docs
-> FilePath -- ^ The base directory to which
-- documentation will be written.
-> IO (Failable ())
createDocs ist nsd out =
do new <- not `fmap` (doesFileExist $ out </> "IdrisDoc")
existing_nss <- existingNamespaces out
let nss = S.union (M.keysSet nsd) existing_nss
dExists <- doesDirectoryExist out
if new && dExists then err $ "Output directory (" ++ out ++ ") is" ++
" already in use for other than IdrisDoc."
else do
createDirectoryIfMissing True out
foldl docGen (return ()) (M.toList nsd)
createIndex nss out
-- Create an empty IdrisDoc file to signal 'out' is used for IdrisDoc
if new -- But only if it not already existed...
then withFile (out </> "IdrisDoc") WriteMode ((flip hPutStr) "")
else return ()
copyDependencies out
return $ Right ()
where docGen io (n, c) = do io; createNsDoc ist n c out
-- | (Over)writes the 'index.html' file in the given directory with
-- an (updated) index of namespaces in the documentation
createIndex :: S.Set NsName -- ^ Set of namespace names to
-- include in the index
-> FilePath -- ^ The base directory to which
-- documentation will be written.
-> IO ()
createIndex nss out =
do (path, h) <- openTempFile out "index.html"
BS2.hPut h $ renderHtml $ wrapper Nothing $ do
H.h1 "Namespaces"
H.ul ! class_ "names" $ do
let path ns = "docs" ++ "/" ++ genRelNsPath ns "html"
item ns = do let n = toHtml $ nsName2Str ns
link = toValue $ path ns
H.li $ H.a ! href link ! class_ "code" $ n
sort = L.sortBy (\n1 n2 -> reverse n1 `compare` reverse n2)
forM_ (sort $ S.toList nss) item
hClose h
renameFile path (out </> "index.html")
-- | Generates a HTML file for a namespace and its contents.
-- The location for e.g. Prelude.Algebra is <base>/Prelude/Algebra.html
createNsDoc :: IState -- ^ Needed to determine the types of names
-> NsName -- ^ The name of the namespace to
-- create documentation for
-> NsInfo -- ^ The contents of the namespace
-> FilePath -- ^ The base directory to which
-- documentation will be written.
-> IO ()
createNsDoc ist ns content out =
do let tpath = out </> "docs" </> (genRelNsPath ns "html")
dir = takeDirectory tpath
file = takeFileName tpath
haveDocs (_, Just d, _) = [d]
haveDocs _ = []
-- We cannot do anything without a Doc
content' = concatMap haveDocs (nsContents content)
createDirectoryIfMissing True dir
(path, h) <- openTempFile dir file
BS2.hPut h $ renderHtml $ wrapper (Just ns) $ do
H.h1 $ toHtml (nsName2Str ns)
case nsDocstring content of
Nothing -> mempty
Just docstring -> Docstrings.renderHtml docstring
H.dl ! class_ "decls" $ forM_ content' (createOtherDoc ist)
hClose h
renameFile path tpath
-- | Generates a relative filepath for a namespace, appending an extension
genRelNsPath :: NsName -- ^ Namespace to generate a path for
-> String -- ^ Extension suffix
-> FilePath
genRelNsPath ns suffix = nsName2Str ns <.> suffix
-- | Generates a HTML type signature with proper tags
-- TODO: Turn docstrings into title attributes more robustly
genTypeHeader :: IState -- ^ Needed to determine the types of names
-> FunDoc -- ^ Type to generate type declaration for
-> H.Html -- ^ Resulting HTML
genTypeHeader ist (FD n _ args ftype _) = do
H.span ! class_ (toValue $ "name " ++ getType n)
! title (toValue $ show n)
$ toHtml $ name $ nsroot n
H.span ! class_ "word" $ do nbsp; ":"; nbsp
H.span ! class_ "signature" $ preEscapedToHtml htmlSignature
where
htmlSignature = displayDecorated decorator $ renderCompact signature
signature = pprintPTerm defaultPPOption [] names (idris_infixes ist) ftype
names = [ n | (n@(UN n'), _, _, _) <- args,
not (T.isPrefixOf (txt "__") n') ]
decorator (AnnConst c) str | constIsType c = htmlSpan str "type" str
| otherwise = htmlSpan str "data" str
decorator (AnnData _ _) str = htmlSpan str "data" str
decorator (AnnType _ _) str = htmlSpan str "type" str
decorator AnnKeyword str = htmlSpan "" "keyword" str
decorator (AnnBoundName n i) str | Just t <- M.lookup n docs =
let cs = (if i then "implicit " else "") ++ "documented boundvar"
in htmlSpan t cs str
decorator (AnnBoundName _ i) str =
let cs = (if i then "implicit " else "") ++ "boundvar"
in htmlSpan "" cs str
decorator (AnnName n _ _ _) str
| filterName n = htmlLink (show n) (getType n) (link n) str
| otherwise = htmlSpan "" (getType n) str
decorator (AnnTextFmt BoldText) str = "<b>" ++ str ++ "</b>"
decorator (AnnTextFmt UnderlineText) str = "<u>" ++ str ++ "</u>"
decorator (AnnTextFmt ItalicText) str = "<i>" ++ str ++ "</i>"
decorator _ str = str
htmlSpan :: String -> String -> String -> String
htmlSpan t cs str = do
R.renderHtml $ H.span ! class_ (toValue cs)
! title (toValue t)
$ toHtml str
htmlLink :: String -> String -> String -> String -> String
htmlLink t cs a str = do
R.renderHtml $ H.a ! class_ (toValue cs)
! title (toValue t) ! href (toValue a)
$ toHtml str
docs = M.fromList $ mapMaybe docExtractor args
docExtractor (_, _, _, Nothing) = Nothing
docExtractor (n, _, _, Just d) = Just (n, doc2Str d)
-- TODO: Remove <p> tags more robustly
doc2Str d = let dirty = renderMarkup $ contents $ Docstrings.renderHtml d
in take (length dirty - 8) $ drop 3 dirty
name (NS n ns) = show (NS (sUN $ name n) ns)
name n = let n' = show n
in if (head n') `elem` opChars
then '(':(n' ++ ")")
else n'
link n = let path = genRelNsPath (getNs n) "html"
in path ++ "#" ++ (show n)
getType :: Name -> String
getType n = let ctxt = tt_ctxt ist
in case () of
_ | isDConName n ctxt -> "constructor"
_ | isFnName n ctxt -> "function"
_ | isTConName n ctxt -> "type"
_ | otherwise -> ""
-- | Generates HTML documentation for a function.
createFunDoc :: IState -- ^ Needed to determine the types of names
-> FunDoc -- ^ Function to generate block for
-> H.Html -- ^ Resulting HTML
createFunDoc ist fd@(FD name docstring args ftype fixity) = do
H.dt ! (A.id $ toValue $ show name) $ genTypeHeader ist fd
H.dd $ do
(if nullDocstring docstring then mempty else Docstrings.renderHtml docstring)
let args' = filter (\(_, _, _, d) -> isJust d) args
if (not $ null args') || (isJust fixity)
then H.dl $ do
if (isJust fixity) then do
H.dt ! class_ "fixity" $ "Fixity"
let f = fromJust fixity
H.dd ! class_ "fixity" ! title (toValue $ show f) $ genFix f
else mempty
forM_ args' genArg
else mempty
where genFix (Infixl {prec=p}) =
toHtml $ "Left associative, precedence " ++ show p
genFix (Infixr {prec=p}) =
toHtml $ "Left associative, precedence " ++ show p
genFix (InfixN {prec=p}) =
toHtml $ "Non-associative, precedence " ++ show p
genFix (PrefixN {prec=p}) =
toHtml $ "Prefix, precedence " ++ show p
genArg (_, _, _, Nothing) = mempty
genArg (name, _, _, Just docstring) = do
H.dt $ toHtml $ show name
H.dd $ Docstrings.renderHtml docstring
-- | Generates HTML documentation for any Docs type
-- TODO: Generate actual signatures for interfaces
createOtherDoc :: IState -- ^ Needed to determine the types of names
-> Docs -- ^ Namespace item to generate HTML block for
-> H.Html -- ^ Resulting HTML
createOtherDoc ist (FunDoc fd) = createFunDoc ist fd
createOtherDoc ist (InterfaceDoc n docstring fds _ _ _ _ _ c) = do
H.dt ! (A.id $ toValue $ show n) $ do
H.span ! class_ "word" $ do "interface"; nbsp
H.span ! class_ "name type"
! title (toValue $ show n)
$ toHtml $ name $ nsroot n
H.span ! class_ "signature" $ nbsp
H.dd $ do
(if nullDocstring docstring then mempty else Docstrings.renderHtml docstring)
H.dl ! class_ "decls" $ (forM_ (maybeToList c ++ fds) (createFunDoc ist))
where name (NS n ns) = show (NS (sUN $ name n) ns)
name n = let n' = show n
in if (head n') `elem` opChars
then '(':(n' ++ ")")
else n'
createOtherDoc ist (RecordDoc n doc ctor projs params) = do
H.dt ! (A.id $ toValue $ show n) $ do
H.span ! class_ "word" $ do "record"; nbsp
H.span ! class_ "name type"
! title (toValue $ show n)
$ toHtml $ name $ nsroot n
H.span ! class_ "type" $ do nbsp ; prettyParameters
H.dd $ do
(if nullDocstring doc then mempty else Docstrings.renderHtml doc)
if not $ null params
then H.dl $ forM_ params genParam
else mempty
H.dl ! class_ "decls" $ createFunDoc ist ctor
H.dl ! class_ "decls" $ forM_ projs (createFunDoc ist)
where name (NS n ns) = show (NS (sUN $ name n) ns)
name n = let n' = show n
in if (head n') `elem` opChars
then '(':(n' ++ ")")
else n'
genParam (name, pt, docstring) = do
H.dt $ toHtml $ show (nsroot name)
H.dd $ maybe nbsp Docstrings.renderHtml docstring
prettyParameters = toHtml $ unwords [show $ nsroot n | (n,_,_) <- params]
createOtherDoc ist (DataDoc fd@(FD n docstring args _ _) fds) = do
H.dt ! (A.id $ toValue $ show n) $ do
H.span ! class_ "word" $ do "data"; nbsp
genTypeHeader ist fd
H.dd $ do
(if nullDocstring docstring then mempty else Docstrings.renderHtml docstring)
let args' = filter (\(_, _, _, d) -> isJust d) args
if not $ null args'
then H.dl $ forM_ args' genArg
else mempty
H.dl ! class_ "decls" $ forM_ fds (createFunDoc ist)
where genArg (_, _, _, Nothing) = mempty
genArg (name, _, _, Just docstring) = do
H.dt $ toHtml $ show name
H.dd $ Docstrings.renderHtml docstring
createOtherDoc ist (NamedImplementationDoc _ fd) = createFunDoc ist fd
createOtherDoc ist (ModDoc _ docstring) = do
Docstrings.renderHtml docstring
-- | Generates everything but the actual content of the page
wrapper :: Maybe NsName -- ^ Namespace name, unless it is the index
-> H.Html -- ^ Inner HTML
-> H.Html
wrapper ns inner =
let (index, str) = extract ns
base = if index then "" else "../"
styles = base ++ "styles.css" :: String
indexPage = base ++ "index.html" :: String
in H.docTypeHtml $ do
H.head $ do
H.title $ do
"IdrisDoc"
if index then " Index" else do
": "
toHtml str
H.link ! type_ "text/css" ! rel "stylesheet"
! href (toValue styles)
H.body ! class_ (if index then "index" else "namespace") $ do
H.div ! class_ "wrapper" $ do
H.header $ do
H.strong "IdrisDoc"
if index then mempty else do
": "
toHtml str
H.nav $ H.a ! href (toValue indexPage) $ "Index"
H.div ! class_ "container" $ inner
H.footer $ do
"Produced by IdrisDoc version "
toHtml version
where extract (Just ns) = (False, nsName2Str ns)
extract _ = (True, "")
-- | Non-break space character
nbsp :: H.Html
nbsp = preEscapedToHtml (" " :: String)
-- | Returns a list of namespaces already documented in a IdrisDoc directory
existingNamespaces :: FilePath -- ^ The base directory containing the
-- 'docs' directory with existing
-- namespace pages
-> IO (S.Set NsName)
existingNamespaces out = do
let docs = out ++ "/" ++ "docs"
str2Ns s | s == rootNsStr = []
str2Ns s = reverse $ T.splitOn (T.singleton '.') (txt s)
toNs fp = do isFile <- doesFileExist $ docs </> fp
let isHtml = ".html" == takeExtension fp
name = dropExtension fp
ns = str2Ns name
return $ if isFile && isHtml then Just ns else Nothing
docsExists <- doesDirectoryExist docs
if not docsExists
then return S.empty
else do contents <- getDirectoryContents docs
namespaces <- catMaybes `fmap` (sequence $ map toNs contents)
return $ S.fromList namespaces
-- | Copies IdrisDoc dependencies such as stylesheets to a directory
copyDependencies :: FilePath -- ^ The base directory to which
-- dependencies should be written
-> IO ()
copyDependencies dir =
do styles <- getIdrisDataFileByName $ "idrisdoc" </> "styles.css"
copyFile styles (dir </> "styles.css")
| uuhan/Idris-dev | src/Idris/IdrisDoc.hs | bsd-3-clause | 30,163 | 0 | 23 | 9,558 | 8,699 | 4,430 | 4,269 | 521 | 19 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Examples.Polynomials.Polynomials
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Simple usage of polynomials over GF(2^n), using Rijndael's
-- finite field: <http://en.wikipedia.org/wiki/Finite_field_arithmetic#Rijndael.27s_finite_field>
--
-- The functions available are:
--
-- [/pMult/] GF(2^n) Multiplication
--
-- [/pDiv/] GF(2^n) Division
--
-- [/pMod/] GF(2^n) Modulus
--
-- [/pDivMod/] GF(2^n) Division/Modulus, packed together
--
-- Note that addition in GF(2^n) is simply `xor`, so no custom function is provided.
-----------------------------------------------------------------------------
module Data.SBV.Examples.Polynomials.Polynomials where
import Data.SBV
import Data.SBV.Tools.Polynomial
-- | Helper synonym for representing GF(2^8); which are merely 8-bit unsigned words. Largest
-- term in such a polynomial has degree 7.
type GF28 = SWord8
-- | Multiplication in Rijndael's field; usual polynomial multiplication followed by reduction
-- by the irreducible polynomial. The irreducible used by Rijndael's field is the polynomial
-- @x^8 + x^4 + x^3 + x + 1@, which we write by giving it's /exponents/ in SBV.
-- See: <http://en.wikipedia.org/wiki/Finite_field_arithmetic#Rijndael.27s_finite_field>.
-- Note that the irreducible itself is not in GF28! It has a degree of 8.
--
-- NB. You can use the 'showPoly' function to print polynomials nicely, as a mathematician would write.
gfMult :: GF28 -> GF28 -> GF28
a `gfMult` b = pMult (a, b, [8, 4, 3, 1, 0])
-- | States that the unit polynomial @1@, is the unit element
multUnit :: GF28 -> SBool
multUnit x = (x `gfMult` unit) .== x
where unit = polynomial [0] -- x@0
-- | States that multiplication is commutative
multComm :: GF28 -> GF28 -> SBool
multComm x y = (x `gfMult` y) .== (y `gfMult` x)
-- | States that multiplication is associative, note that associativity
-- proofs are notoriously hard for SAT/SMT solvers
multAssoc :: GF28 -> GF28 -> GF28 -> SBool
multAssoc x y z = ((x `gfMult` y) `gfMult` z) .== (x `gfMult` (y `gfMult` z))
-- | States that the usual multiplication rule holds over GF(2^n) polynomials
-- Checks:
--
-- @
-- if (a, b) = x `pDivMod` y then x = y `pMult` a + b
-- @
--
-- being careful about @y = 0@. When divisor is 0, then quotient is
-- defined to be 0 and the remainder is the numerator.
-- (Note that addition is simply `xor` in GF(2^8).)
polyDivMod :: GF28 -> GF28 -> SBool
polyDivMod x y = ite (y .== 0) ((0, x) .== (a, b)) (x .== (y `gfMult` a) `xor` b)
where (a, b) = x `pDivMod` y
-- | Queries
testGF28 :: IO ()
testGF28 = do
print =<< prove multUnit
print =<< prove multComm
-- print =<< prove multAssoc -- takes too long; see above note..
print =<< prove polyDivMod
| josefs/sbv | Data/SBV/Examples/Polynomials/Polynomials.hs | bsd-3-clause | 2,900 | 0 | 10 | 507 | 429 | 267 | 162 | 21 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Feldspar.Core.Constructs.Array
where
import Data.List
import Data.Map (notMember)
import Language.Syntactic
import Language.Syntactic.Constructs.Binding.HigherOrder (CLambda)
import Feldspar.Range
import Feldspar.Lattice
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
import Feldspar.Core.Constructs.Bits
import Feldspar.Core.Constructs.Binding
import Feldspar.Core.Constructs.Complex
import Feldspar.Core.Constructs.Eq
import Feldspar.Core.Constructs.Integral
import Feldspar.Core.Constructs.Logic
import Feldspar.Core.Constructs.Num
import Feldspar.Core.Constructs.Ord
data Array a
where
Parallel :: Type a => Array (Length :-> (Index -> a) :-> Full [a])
Sequential :: (Type a, Type st) =>
Array (Length :-> st :-> (Index -> st -> (a,st)) :-> Full [a])
Append :: Type a => Array ([a] :-> [a] :-> Full [a])
GetIx :: Type a => Array ([a] :-> Index :-> Full a)
SetIx :: Type a => Array ([a] :-> Index :-> a :-> Full [a])
GetLength :: Type a => Array ([a] :-> Full Length)
SetLength :: Type a => Array (Length :-> [a] :-> Full [a])
instance Semantic Array
where
semantics Append = Sem "(++)" (++)
semantics GetIx = Sem "(!)" evalGetIx
where
evalGetIx as i
| 0 <= i && i < len = genericIndex as i
| otherwise = error $ unwords
[ "getIx: accessing index"
, show i
, "outside the bounds of an array of length"
, show len
]
where
len = genericLength as
semantics GetLength = Sem "getLength" genericLength
semantics SetLength = Sem "setLength"
(\n as -> genericTake n (as ++ repeat err))
where
err = error "reading uninitialized array element"
semantics Parallel = Sem "parallel"
(\len ixf -> genericTake len $ map ixf [0..])
semantics Sequential = Sem "sequential"
(\len i step -> genericTake len $
snd $ mapAccumL (\a ix -> swap (step ix a)) i [0..])
where swap (a,b) = (b,a)
semantics SetIx = Sem "setIx" evalSetIx
where
evalSetIx as i v
| 0 <= i && i < len = genericTake i as ++ [v] ++ genericDrop (i+1) as
| otherwise = error $ unwords
[ "setIx: assigning index"
, show i
, "outside the bounds of an array of length"
, show len
]
where
len = genericLength as
semanticInstances ''Array
instance EvalBind Array where evalBindSym = evalBindSymDefault
instance AlphaEq dom dom dom env => AlphaEq Array Array dom env
where
alphaEqSym = alphaEqSymDefault
instance Sharable Array
instance Cumulative Array
instance SizeProp (Array :|| Type)
where
sizeProp (C' Parallel) (WrapFull len :* WrapFull ixf :* Nil) =
infoSize len :> snd (infoSize ixf)
sizeProp (C' Sequential) (WrapFull len :* _ :* WrapFull step :* Nil) =
infoSize len :> fst (snd $ snd $ infoSize step)
sizeProp (C' Append) (WrapFull arra :* WrapFull arrb :* Nil) =
(alen + blen) :> (aelem \/ belem)
where
alen :> aelem = infoSize arra
blen :> belem = infoSize arrb
sizeProp (C' GetIx) (WrapFull arr :* _ :* Nil) = el
where
_ :> el = infoSize arr
sizeProp (C' SetIx) (WrapFull arr :* _ :* WrapFull e :* Nil) =
len :> (el \/ infoSize e)
where
len :> el = infoSize arr
sizeProp (C' GetLength) (WrapFull arr :* Nil) = len
where
len :> _ = infoSize arr
sizeProp (C' SetLength) (WrapFull len :* WrapFull arr :* Nil) =
infoSize len :> el
where
_ :> el = infoSize arr
instance
( Cumulative dom
, (Array :|| Type) :<: dom
, (BITS :|| Type) :<: dom
, (EQ :|| Type) :<: dom
, (NUM :|| Type) :<: dom
, Let :<: dom
, (ORD :|| Type) :<: dom
, (INTEGRAL :|| Type) :<: dom
, (COMPLEX :|| Type) :<: dom
, (Logic :|| Type) :<: dom
, (Variable :|| Type) :<: dom
, CLambda Type :<: dom
, OptimizeSuper dom
) =>
Optimize (Array :|| Type) dom
where
optimizeFeat opts sym@(C' Parallel) (len :* ixf :* Nil) = do
len' <- optimizeM opts len
let szI = infoSize (getInfo len')
ixRange = rangeByRange 0 (rangeSubSat szI 1)
ixf' <- optimizeFunction opts (optimizeM opts) (mkInfo ixRange) ixf
constructFeat opts sym (len' :* ixf' :* Nil)
optimizeFeat opts sym@(C' Sequential) (len :* inital :* step :* Nil) = do
len' <- optimizeM opts len
init' <- optimizeM opts inital
let szI = infoSize (getInfo len')
ixRange = rangeByRange 0 (rangeSubSat szI 1)
step' <- optimizeFunction opts
(optimizeM opts) -- TODO (optimizeFunctionFix optimizeM (mkInfo universal))
(mkInfo ixRange)
step
constructFeat opts sym (len' :* init' :* step' :* Nil)
-- TODO Should use fixed-point iteration, but `optimizeFunctionFix` only
-- works for functions of type `a -> a`.
optimizeFeat opts a args = optimizeFeatDefault opts a args
-- parallel l (\x -> let y = e in e') => let y = e in parallel l (\x -> e')
--
-- Test case: nothing simple. Look at the index calculation in the
-- outer second loop in turboDecode.
constructFeatOpt opts sym@(C' Parallel) (len :* (lam1 :$ (lt :$ e1 :$ (lam2 :$ bd))) :* Nil)
| Just (SubConstr2 (Lambda v1)) <- prjLambda lam1
, Just lam2'@(SubConstr2 (Lambda _ )) <- prjLambda lam2
, Just Let <- prj lt
, v1 `notMember` infoVars (getInfo e1)
, SICS `inTarget` opts
= do
sym' <- constructFeat opts sym (len :* (lam1 :$ bd) :* Nil)
sym'' <- constructFeat opts (reuseCLambda lam2') (sym' :* Nil)
constructFeat opts Let (e1 :* sym'' :* Nil)
constructFeatOpt _ (C' Parallel) (len :* _ :* Nil)
| Just 0 <- viewLiteral len
= return $ literalDecor []
-- TODO Optimize when length is one. This requires a way to create an
-- uninitialized array of length one, and setting the first element.
-- Use `betaReduce` to apply `ixf` to the literal 0.
constructFeatOpt opts (C' Parallel) (len :* (lam :$ (gix :$ arr2 :$ ix)) :* Nil)
| Just (SubConstr2 (Lambda v1)) <- prjLambda lam
, Just (C' GetIx) <- prjF gix
, Just (C' (Variable v2)) <- prjF ix
, v1 == v2
, v1 `notMember` infoVars (getInfo arr2)
= constructFeat opts (c' SetLength) (len :* arr2 :* Nil)
constructFeatOpt _ (C' Sequential) (len :* _ :* _ :* Nil)
| Just 0 <- viewLiteral len
= return $ literalDecor []
-- TODO Optimize when length is one. This requires a way to create an
-- uninitialized array of length one, and setting the first element.
-- Use `betaReduce` to apply the step function.
constructFeatOpt _ (C' Append) (a :* b :* Nil)
| Just [] <- viewLiteral a = return b
| Just [] <- viewLiteral b = return a
constructFeatOpt opts (C' GetIx) ((op :$ _ :$ ixf) :* ix :* Nil)
| Just (C' Parallel) <- prjF op
= optimizeM opts $ betaReduce (stripDecor ix) (stripDecor ixf)
-- TODO should not need to drop the decorations
constructFeatOpt opts s@(C' GetIx) ((op :$ _ :$ arr) :* ix :* Nil)
| Just (C' SetLength) <- prjF op
= constructFeat opts s (arr :* ix :* Nil)
constructFeatOpt _ (C' GetLength) (arr :* Nil)
| Just as <- viewLiteral arr = return $ literalDecor $ genericLength as
constructFeatOpt opts s@(C' GetLength) ((op :$ a :$ _ :$ _) :* Nil)
| Just (C' Sequential) <- prjF op = return a
| Just (C' SetIx) <- prjF op = constructFeat opts s (a :* Nil)
constructFeatOpt opts sym@(C' GetLength) ((op :$ a :$ b) :* Nil)
| Just (C' Append) <- prjF op = do
aLen <- constructFeat opts sym (a :* Nil)
bLen <- constructFeat opts sym (b :* Nil)
constructFeatOpt opts (c' Add) (aLen :* bLen :* Nil)
| Just (C' Parallel) <- prjF op = return a
| Just (C' SetLength) <- prjF op = return a
constructFeatOpt _ (C' SetLength) (len :* _ :* Nil)
| Just 0 <- viewLiteral len = return $ literalDecor []
constructFeatOpt _ (C' SetLength) ((getLength :$ arr') :* arr :* Nil)
| Just (C' GetLength) <- prjF getLength
, alphaEq arr arr'
= return arr
constructFeatOpt opts (C' SetLength) (len1 :* (par :$ len2 :$ ixf) :* Nil)
| Just p@(C' Parallel) <- prjF par
, alphaEq len1 len2
= constructFeat opts p (len2 :* ixf :* Nil)
constructFeatOpt opts (C' SetLength) (len1 :* (sq :$ len2 :$ ini :$ step) :* Nil)
| Just s@(C' Sequential) <- prjF sq
, alphaEq len1 len2
= constructFeat opts s (len2 :* ini :* step :* Nil)
constructFeatOpt _ (C' SetLength) (len :* arr :* Nil)
| rlen <- infoSize $ getInfo len
, rarr :> _ <- infoSize $ getInfo arr
, isSingleton rlen
, isSingleton rarr
, rlen == rarr
= return arr
constructFeatOpt opts a args = constructFeatUnOpt opts a args
constructFeatUnOpt opts x@(C' _) = constructFeatUnOptDefault opts x
| emwap/feldspar-language | src/Feldspar/Core/Constructs/Array.hs | bsd-3-clause | 11,302 | 0 | 16 | 3,256 | 3,384 | 1,701 | 1,683 | 188 | 0 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Language.Rsc.Typecheck.Environment (
-- * Create env
initModuleEnv
, initGlobalEnv
, initClassCtorEnv
, initClassMethEnv
, initCallableEnv
-- * Search env
, tcEnvFindTy
, tcEnvFindSymInfo
, tcEnvFindReturn
, tcEnvAdd
, tcEnvAdds
, tcEnvAddBounds
, Unif
) where
import Language.Fixpoint.Misc (safeZip)
import qualified Language.Fixpoint.Types as F
import Language.Rsc.Annotations
import Language.Rsc.AST
import Language.Rsc.ClassHierarchy
import Language.Rsc.Core.Env
import Language.Rsc.Environment
import Language.Rsc.Locations
import Language.Rsc.Names
import Language.Rsc.Pretty
import Language.Rsc.Program
import Language.Rsc.Symbols
import Language.Rsc.Typecheck.TCMonad
import Language.Rsc.Typecheck.Types
import Language.Rsc.Typecheck.Unify (Unif)
import Language.Rsc.Types
-- import Debug.Trace
--------------------------------------------------------------------------------
-- | Environment initialization
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
initGlobalEnv :: Unif r => TcRsc r -> ClassHierarchy r -> TCM r (TCEnv r)
--------------------------------------------------------------------------------
initGlobalEnv (Rsc { code = Src ss }) cha = do
nms <- either fatal return (symEnv ss)
return $ TCE nms bnds ctx pth cha mut tThis fId
where
bnds = mempty
ctx = emptyContext
pth = emptyPath
mut = Nothing
tThis = Nothing
fId = (-1)
-- This will be called *last* on every contructor, method, function.
-- It is transparent to the incoming environment's: path, cha, mut, this
--
-- TODO: Shadow `this` binding (in case this is a class context)
--
--------------------------------------------------------------------------------
initCallableEnv :: (PP f, IsLocated f, Unif r)
=> AnnTc r -> TCEnv r -> f
-> IOverloadSig r
-> [Id (AnnTc r)]
-> [Statement (AnnTc r)]
-> TCM r (TCEnv r)
--------------------------------------------------------------------------------
initCallableEnv l γ f fty xs s = do
locs <- either fatal return (symEnv s)
let nms1 = locs `mappend` nms0 -- favors first
let nms = envAddReturn f siRet nms1
return $ tcEnvAdds arg
$ tcEnvAdds varBs
$ tcEnvAdds tyBs
$ TCE nms bnds ctx pth cha mut tThis (fId l)
where
(i, sig) = fty
(bs,xts,t) = sig
nms0 = toFgn (envNames γ)
siRet = SI rSym Local ReturnVar t
rSym = returnSymbol
tyBs = [lsia α | α <- αs]
varBs = [(x, siw x t) | (x, t) <- safeZip "initCallableEnv" xs ts]
arg = [(getArgId (srcPos l), mkArgumentsSI l ts)]
bnds = envAdds [(s,t) | BTV s _ (Just t) <- bs] $ envBounds γ
ctx = pushContext i (envCtx γ)
pth = envPath γ
cha = envCHA γ
mut = envMut γ
tThis = envThis γ
ts = map b_type xts
αs = map btvToTV bs
lsia x = (Loc (srcPos l) x, sia x (tVar x))
sia x t = SI (F.symbol x) Local Ambient t
siw x t = SI (F.symbol x) Local WriteLocal t
-- | `initClassCtorEnv` makes `this` a Unique binding
--------------------------------------------------------------------------------
initClassCtorEnv :: Unif r => TypeSigQ AK r -> TCEnv r -> TCEnv r
--------------------------------------------------------------------------------
initClassCtorEnv (TS _ (BGen nm bs) _) γ = tcEnvAdd eThis γ'
where
γ' = γ { tce_mut = Just tUQ
, tce_this = Just tThis
, tce_bounds = envAdds bts (tce_bounds γ)
}
bts = [(s,t) | BTV s _ (Just t) <- bs]
tThis = adjUQ (TRef (Gen nm (map btVar bs)) fTop)
eThis = SI thisSym Local RdOnly tThis
adjUQ (TRef (Gen n (_:ps)) r) = TRef (Gen n (tUQ:ps)) r
adjUQ t = t
--------------------------------------------------------------------------------
initClassMethEnv :: Unif r => MutabilityR r -> TypeSig r -> TCEnv r -> TCEnv r
--------------------------------------------------------------------------------
initClassMethEnv m (TS _ (BGen nm bs) _) γ = tcEnvAdd eThis γ'
where
γ' = γ { tce_bounds = envAdds bts (tce_bounds γ)
, tce_mut = Just m
}
bts = [(s,t) | BTV s _ (Just t) <- bs]
tThis = adjMut $ TRef (Gen nm (map btVar bs)) fTop
eThis = SI thisSym Local RdOnly tThis
adjMut (TRef (Gen n (_:ps)) r) = TRef (Gen n (m:ps)) r
adjMut t = t
--------------------------------------------------------------------------------
initModuleEnv :: (Unif r, F.Symbolic n, PP n)
=> TCEnv r -> n -> [Statement (AnnTc r)] -> TCM r (TCEnv r)
--------------------------------------------------------------------------------
initModuleEnv γ n s = do
nms1 <- either fatal return (symEnv s)
let nms = nms1 `mappend` nms0
return $ TCE nms bnds ctx pth cha mut tThis fnId
where
nms0 = toFgn (envNames γ)
bnds = envBounds γ
ctx = envCtx γ
pth = extendAbsPath (envPath γ) n
cha = envCHA γ
mut = Nothing -- 'this' gets out of scope when entering a module
tThis = Nothing
fnId = envFnId γ
--------------------------------------------------------------------------------
-- | Environment wrappers
--------------------------------------------------------------------------------
tcEnvAdds xs γ = γ { tce_names = envAdds xs $ tce_names γ }
--------------------------------------------------------------------------------
tcEnvAdd :: SymInfo r -> TCEnv r -> TCEnv r
--------------------------------------------------------------------------------
tcEnvAdd s γ = γ { tce_names = envAdd (F.symbol s) s $ tce_names γ }
--------------------------------------------------------------------------------
tcEnvFindTy :: (Unif r, F.Symbolic x, IsLocated x) => x -> TCEnv r -> Maybe (RType r)
--------------------------------------------------------------------------------
tcEnvFindTy x γ = fmap v_type (tcEnvFindSymInfo x γ)
--------------------------------------------------------------------------------
tcEnvFindSymInfo :: (Unif r, F.Symbolic x) => x -> TCEnv r -> Maybe (SymInfo r)
--------------------------------------------------------------------------------
tcEnvFindSymInfo x γ = envFindTy x (tce_names γ)
tcEnvFindReturn = v_type . envFindReturn . tce_names
--------------------------------------------------------------------------------
tcEnvAddBounds :: [BTVar r] -> TCEnv r -> TCEnv r
--------------------------------------------------------------------------------
tcEnvAddBounds = flip $ foldr go
where
go (BTV α _ (Just t)) γ = γ { tce_bounds = envAdd α t $ tce_bounds γ }
go (BTV _ _ _ ) γ = γ
| UCSD-PL/RefScript | src/Language/Rsc/Typecheck/Environment.hs | bsd-3-clause | 7,476 | 0 | 15 | 1,859 | 1,953 | 1,028 | 925 | 124 | 2 |
{-# LANGUAGE QuasiQuotes #-}
{-@ LIQUID "--no-termination "@-}
import LiquidHaskell
import Language.Haskell.Liquid.Prelude
---------------------------------------------------------------------
----------------------- Datatype Definition -------------------------
---------------------------------------------------------------------
type Bndr
= Int
data Expr
= Lam Bndr Expr
| Var Bndr
| App Expr Expr
[lq|
data Expr [elen]
= Lam (x::Bndr) (e::Expr)
| Var (x::Bndr)
| App (e1::Expr) (e2::Expr)
|]
[lq| measure elen :: Expr -> Int
elen(Var x) = 0
elen(Lam x e) = 1 + (elen e)
elen(App e1 e2) = 1 + (elen e1) + (elen e2)
|]
[lq| invariant {v:Expr | (elen v) >= 0} |]
[lq| measure isValue :: Expr -> Prop
isValue (Lam x e) = true
isValue (Var x) = false
isValue (App e1 e2) = false
|]
[lq| type Value = {v: Expr | isValue v } |]
---------------------------------------------------------------------
-------------------------- The Evaluator ----------------------------
---------------------------------------------------------------------
evalVar x ((y, v):sto)
| x == y
= v
| otherwise
= evalVar x sto
evalVar x []
= error "unbound variable"
-- A "value" is simply: {v: Expr | isValue v } *)
[lq| Decrease eval 2 |]
[lq| eval :: [(Bndr, Value)] -> Expr -> ([(Bndr, Value)], Value) |]
eval sto (Var x)
= (sto, evalVar x sto)
eval sto (App e1 e2)
= let (_, v2 ) = eval sto e2
(sto1XXX, e1') = eval sto e1
in case e1' of
(Lam x e) -> eval ((x, v2): sto1XXX) e
_ -> error "non-function application"
eval sto (Lam x e)
= (sto, Lam x e)
-----------------------------------------------------------------------
---------------------------- Value Checker ----------------------------
-----------------------------------------------------------------------
check (Lam _ _) = True
check (Var _) = liquidAssertB False
check (App _ _) = liquidAssertB False
-----------------------------------------------------------------------
---------------------------- Unit Tests -------------------------------
-----------------------------------------------------------------------
mysnd (x, y) = y
tests =
let (f,g,x) = (0,1,2)
e1 = Lam x (Var x)
e2 = App e1 e1
e3 = Lam f (Lam g (Lam x (App (Var f) (App (Var g) (Var x)))))
vs = map (mysnd . eval []) [e1, e2, e3]
in map check vs
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/LambdaEvalMini.hs | bsd-3-clause | 2,491 | 0 | 19 | 558 | 548 | 300 | 248 | 44 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module TemplateSpec where
import Data.Morgue.Agenda.Render
import Data.Morgue.Agenda.Types
import Test.Hspec
import Text.Mustache
spec :: Spec
spec = describe "Data.Morgue.Agenda.Render" $ do
elementTemplateSpec
treeTemplateSpec
elementTemplate :: Template
elementTemplate = (someTemplate Plain) { templateActual = "element" }
treeTemplate :: Template
treeTemplate = (someTemplate Plain) { templateActual = "tree" }
elementTemplateSpec :: Spec
elementTemplateSpec = describe "elementTemplate" $ do
it "displays simple values correctly" True -- TODO: fix
{-render elementTemplate (Elem ["test"] (Just True) Nothing [])
`shouldBe` "todo indicator here \ttest"-}
it "copes with random input" True -- TODO: quickcheck
treeTemplateSpec :: Spec
treeTemplateSpec = describe "treeTemplate" $ do
it "displays simple values correctly" $
render treeTemplate (AgendaFile "" [AgendaTree (Elem ["root"] Nothing Nothing []) []])
`shouldBe` "root\n"
it "handles indentation correctly" True -- TODO
{- The template is unusable for obvious reasons
Template
{ templateActual = PName {unPName = "tree"}
, templateCache = fromList
[ (PName {unPName = "both"},[])
, ( PName {unPName = "element"}
, [ Section (Key {unKey = ["toDo"]}) [TextBlock "todo indicator here \t"]
, Section (Key {unKey = ["time"]}) [TextBlock "time format here \t"]
, UnescapedVar (Key {unKey = ["description"]})
, Section (Key {unKey = ["tags"]}) [TextBlock " \ttags here"]
, TextBlock ""
]
)
, ( PName {unPName = "main"}
, [Partial (PName {unPName = "tree"}) (Just (Pos 1))]
)
, ( PName {unPName = "timed"}
, [ TextBlock "Week agenda ("
, UnescapedVar (Key {unKey = ["weeks"]})
, TextBlock "):\n"
, Section (Key {unKey = ["days"]})
[ Partial (PName {unPName = "date"}) Nothing
, TextBlock ":\n"
, Partial (PName {unPName = "tree"}) (Just (Pos 1))
, InvertedSection (Key {unKey = ["tree"]}) [TextBlock "\n"]
]
]
)
, ( PName {unPName = "todo"}
, [ TextBlock "Global list of TODO entries:\n"
, Partial (PName {unPName = "tree"}) (Just (Pos 1))
]
)
, ( PName {unPName = "tree"}
, [ Section (Key {unKey = ["element"]})
[Partial (PName {unPName = "element"}) Nothing]
, TextBlock "\n",Section (Key {unKey = ["children"]})
[ Section (Key {unKey = ["element"]})
[ UnescapedVar (Key {unKey = ["indent"]})
, TextBlock ": "
, Partial (PName {unPName = "element"}) Nothing
]
]
, TextBlock "\n"
]
)
]
}
-}
| ibabushkin/morgue | tests/TemplateSpec.hs | bsd-3-clause | 3,002 | 0 | 17 | 978 | 219 | 119 | 100 | 24 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Internal
-- Copyright : (C) 2012-14 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : Rank2Types
--
-- These are some of the explicit 'Functor' instances that leak into the
-- type signatures of @Control.Lens@. You shouldn't need to import this
-- module directly for most use-cases.
--
----------------------------------------------------------------------------
module Control.Lens.Internal
( module Control.Lens.Internal.Action
, module Control.Lens.Internal.Bazaar
, module Control.Lens.Internal.Context
, module Control.Lens.Internal.Fold
, module Control.Lens.Internal.Getter
, module Control.Lens.Internal.Indexed
, module Control.Lens.Internal.Iso
, module Control.Lens.Internal.Level
, module Control.Lens.Internal.Magma
, module Control.Lens.Internal.Prism
, module Control.Lens.Internal.Review
, module Control.Lens.Internal.Setter
, module Control.Lens.Internal.Zoom
) where
import Control.Lens.Internal.Action
import Control.Lens.Internal.Bazaar
import Control.Lens.Internal.Context
import Control.Lens.Internal.Fold
import Control.Lens.Internal.Getter
import Control.Lens.Internal.Indexed
import Control.Lens.Internal.Instances ()
import Control.Lens.Internal.Iso
import Control.Lens.Internal.Level
import Control.Lens.Internal.Magma
import Control.Lens.Internal.Prism
import Control.Lens.Internal.Review
import Control.Lens.Internal.Setter
import Control.Lens.Internal.Zoom
#ifdef HLINT
{-# ANN module "HLint: ignore Use import/export shortcut" #-}
#endif
| hvr/lens | src/Control/Lens/Internal.hs | bsd-3-clause | 1,752 | 0 | 5 | 207 | 232 | 174 | 58 | 29 | 0 |
-- |
-- Module: Math.NumberTheory.MoebiusInversion
-- Copyright: (c) 2012 Daniel Fischer
-- Licence: MIT
-- Maintainer: Daniel Fischer <[email protected]>
--
-- Generalised Möbius inversion
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Math.NumberTheory.MoebiusInversion
( generalInversion
, totientSum
) where
import Control.Monad
import Control.Monad.ST
import Data.Proxy
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Generic.Mutable as MG
import Math.NumberTheory.Roots
import Math.NumberTheory.Utils.FromIntegral
-- | @totientSum n@ is, for @n > 0@, the sum of @[totient k | k <- [1 .. n]]@,
-- computed via generalised Möbius inversion.
-- See <http://mathworld.wolfram.com/TotientSummatoryFunction.html> for the
-- formula used for @totientSum@.
--
-- >>> import Data.Proxy
-- >>> totientSum (Proxy :: Proxy Data.Vector.Unboxed.Vector) 100 :: Int
-- 3044
-- >>> totientSum (Proxy :: Proxy Data.Vector.Vector) 100 :: Integer
-- 3044
totientSum
:: (Integral t, G.Vector v t)
=> Proxy v
-> Word
-> t
totientSum _ 0 = 0
totientSum proxy n = generalInversion proxy (triangle . fromIntegral) n
where
triangle k = (k * (k + 1)) `quot` 2
-- | @generalInversion g n@ evaluates the generalised Möbius inversion of @g@
-- at the argument @n@.
--
-- The generalised Möbius inversion implemented here allows an efficient
-- calculation of isolated values of the function @f : N -> Z@ if the function
-- @g@ defined by
--
-- >
-- > g n = sum [f (n `quot` k) | k <- [1 .. n]]
-- >
--
-- can be cheaply computed. By the generalised Möbius inversion formula, then
--
-- >
-- > f n = sum [moebius k * g (n `quot` k) | k <- [1 .. n]]
-- >
--
-- which allows the computation in /O/(n) steps, if the values of the
-- Möbius function are known. A slightly different formula, used here,
-- does not need the values of the Möbius function and allows the
-- computation in /O/(n^0.75) steps, using /O/(n^0.5) memory.
--
-- An example of a pair of such functions where the inversion allows a
-- more efficient computation than the direct approach is
--
-- >
-- > f n = number of reduced proper fractions with denominator <= n
-- >
-- > g n = number of proper fractions with denominator <= n
-- >
--
-- (a /proper fraction/ is a fraction @0 < n/d < 1@). Then @f n@ is the
-- cardinality of the Farey sequence of order @n@ (minus 1 or 2 if 0 and/or
-- 1 are included in the Farey sequence), or the sum of the totients of
-- the numbers @2 <= k <= n@. @f n@ is not easily directly computable,
-- but then @g n = n*(n-1)/2@ is very easy to compute, and hence the inversion
-- gives an efficient method of computing @f n@.
--
-- Since the function arguments are used as array indices, the domain of
-- @f@ is restricted to 'Int'.
--
-- The value @f n@ is then computed by @generalInversion g n@. Note that when
-- many values of @f@ are needed, there are far more efficient methods, this
-- method is only appropriate to compute isolated values of @f@.
generalInversion
:: (Num t, G.Vector v t)
=> Proxy v
-> (Word -> t)
-> Word
-> t
generalInversion proxy fun n = case n of
0 ->error "Möbius inversion only defined on positive domain"
1 -> fun 1
2 -> fun 2 - fun 1
3 -> fun 3 - 2*fun 1
_ -> runST (fastInvertST proxy (fun . intToWord) (wordToInt n))
fastInvertST
:: forall s t v.
(Num t, G.Vector v t)
=> Proxy v
-> (Int -> t)
-> Int
-> ST s t
fastInvertST _ fun n = do
let !k0 = integerSquareRoot (n `quot` 2)
!mk0 = n `quot` (2*k0+1)
kmax a m = (a `quot` m - 1) `quot` 2
small <- MG.unsafeNew (mk0 + 1) :: ST s (G.Mutable v s t)
MG.unsafeWrite small 0 0
MG.unsafeWrite small 1 $! fun 1
when (mk0 >= 2) $
MG.unsafeWrite small 2 $! (fun 2 - fun 1)
let calcit :: Int -> Int -> Int -> ST s (Int, Int)
calcit switch change i
| mk0 < i = return (switch,change)
| i == change = calcit (switch+1) (change + 4*switch+6) i
| otherwise = do
let mloop !acc k !m
| k < switch = kloop acc k
| otherwise = do
val <- MG.unsafeRead small m
let nxtk = kmax i (m+1)
mloop (acc - fromIntegral (k-nxtk)*val) nxtk (m+1)
kloop !acc k
| k == 0 = do
MG.unsafeWrite small i $! acc
calcit switch change (i+1)
| otherwise = do
val <- MG.unsafeRead small (i `quot` (2*k+1))
kloop (acc-val) (k-1)
mloop (fun i - fun (i `quot` 2)) ((i-1) `quot` 2) 1
(sw, ch) <- calcit 1 8 3
large <- MG.unsafeNew k0 :: ST s (G.Mutable v s t)
let calcbig :: Int -> Int -> Int -> ST s (G.Mutable v s t)
calcbig switch change j
| j == 0 = return large
| (2*j-1)*change <= n = calcbig (switch+1) (change + 4*switch+6) j
| otherwise = do
let i = n `quot` (2*j-1)
mloop !acc k m
| k < switch = kloop acc k
| otherwise = do
val <- MG.unsafeRead small m
let nxtk = kmax i (m+1)
mloop (acc - fromIntegral (k-nxtk)*val) nxtk (m+1)
kloop !acc k
| k == 0 = do
MG.unsafeWrite large (j-1) $! acc
calcbig switch change (j-1)
| otherwise = do
let m = i `quot` (2*k+1)
val <- if m <= mk0
then MG.unsafeRead small m
else MG.unsafeRead large (k*(2*j-1)+j-1)
kloop (acc-val) (k-1)
mloop (fun i - fun (i `quot` 2)) ((i-1) `quot` 2) 1
mvec <- calcbig sw ch k0
MG.unsafeRead mvec 0
| Bodigrim/arithmoi | Math/NumberTheory/MoebiusInversion.hs | mit | 6,279 | 0 | 28 | 2,144 | 1,587 | 824 | 763 | 95 | 5 |
module Handler.Info where
import Import
import Text.Shakespeare.Text
--import Text.Hamlet
getInfoR :: Handler Html
getInfoR = do
defaultLayout $ do
addScript $ StaticR js_proof_js
addScript $ StaticR js_popper_min_js
addScript $ StaticR ghcjs_rts_js
addScript $ StaticR ghcjs_allactions_lib_js
addScript $ StaticR ghcjs_allactions_out_js
addScript $ StaticR klement_proofs_js
addScript $ StaticR klement_syntax_js
setTitle "Carnap - About"
addStylesheet $ StaticR css_tree_css
addStylesheet $ StaticR css_proof_css
addStylesheet $ StaticR css_exercises_css
addStylesheet $ StaticR klement_proofs_css
$(widgetFile "infopage")
-- TODO : split out the stuff specifically relating to exercises
addScript $ StaticR ghcjs_allactions_runmain_js
-- TODO remove submit option on these.
checker :: Int -> Text -> Text -> Text -> Text -> Text -> HtmlUrl url
checker n thetype sys opts goal proof =
[hamlet|
<div class="exercise">
<span>example #{show n}
<div data-carnap-type="#{thetype}" data-carnap-options="#{opts}" data-carnap-system="#{sys}" data-carnap-goal="#{goal}">
#{strip proof}
|]
where strip = dropWhile (== '\n')
proofcheck n = checker n "proofchecker"
sequentcheck n = checker n "sequentchecker"
-- XXX function that strips text of indentation and line-numbers.
aristotleTheorem = [st|
Show: P\/-P
Show: --(P\/-P)
-(P\/-P):AS
Show: -P
P:AS
P\/-P:ADD 5
:ID 6 3
P\/-P:ADD 4
:ID 3 8
P\/-P:DNE 2
:DD 10|]
pierceTheorem = [st|
(P->Q)->P :A/CI
-P :A/~E
P :A/CI
Q :A/~E
P :3 R
-P :2 R
Q :4-6 -E
P->Q :3-7 CI
P :8 1 CE
-P :2 R
P :2-10 -E
((P->Q)->P)->P :1-11 CI |]
lemmonTheorem = [st|
[1] ExAy(Kxy -> Fxy) P
[2] AxEy(Kxy) P
[1,3] Ay(Kay -> Fay) (1)a EII
[2] Ey(Kay) (2) UI
[2,5] Kab (4)b EII
[1,3] Kab -> Fab (3) UI
[1,2,3,5] Fab (5) (6) TF
[1,2,3,5] EyFay (7) EG
[1,2,3,5] ExEyFxy (8) EG
[1,2,3] ExEyFxy [5](9) EIE
[1,2] ExEyFxy [3](10) EIE|]
comprehensionTheorem = [st|
Show EXAx(F(x)/\G(x)<->X(x))
Show Ax(F(x)/\G(x)<->\y[F(y)/\G(y)](x))
Show F(c)/\G(c)->\y[F(y)/\G(y)](c)
F(c)/\G(c):AS
\y[F(y)/\G(y)](c):ABS 4
:CD 5
Show \y[F(y)/\G(y)](c)->F(c)/\G(c)
\y[F(y)/\G(y)](c):AS
F(c)/\G(c):APP 8
:CD 9
F(c)/\G(c)<->\y[F(y)/\G(y)](c):CB 3 7
:UD 11
EXAx(F(x)/\G(x)<->X(x)):EG 2
:DD 13|]
russellTheorem = [st|
Show -ExAy(-F(y,y) <-> F(x,y))
ExAy(-F(y,y)<->F(x,y)) :AS
Show: -ExAy(-F(y,y) <-> F(x,y))
Ay(-F(y,y)<->F(c_1,y)) :AS
-F(c_1,c_1)<->F(c_1,c_1) :UI 4
Show:-F(c_1,c_1)
F(c_1,c_1) :AS
F(c_1,c_1)->-F(c_1,c_1) :BC 5
-F(c_1,c_1) :MP 8 7
:ID 7 9
-F(c_1,c_1) -> F(c_1,c_1) :BC 5
F(c_1,c_1) :MP 11 6
Show: -ExAy(-F(y,y) <-> F(x,y))
:ID 6 12
:ED 13 2 4
:ID 2 3|]
russellTheoremForallx = [st|
ExAy(-Fyy <-> Fxy):AS
Ay(-Fyy<->Fry):AS
-Frr<->Frr:AE 2
-Frr:AS
Frr:<->E 3 4
-Frr:R 4
Frr:-E 4-6
-Frr:<->E 7 3
ExAy(-Fyy <-> Fxy):AS
Frr:R 7
-Frr:R 8
-ExAy(-Fyy <-> Fxy):-I 9-11
-ExAy(-Fyy <-> Fxy):EE 1 2-12
ExAy(-Fyy <-> Fxy):R 1
-ExAy(-Fyy <-> Fxy):-I 1-14|]
russellTheoremCalgary = [st|
ExAy(-Fyy <-> Fxy):AS
Ay(-Fyy<->Fry):AS
-Frr<->Frr:AE 2
-Frr:AS
Frr:<->E 3 4
!?:~E 4 5
--
Frr:AS
-Frr:<->E 3 8
!?:~E 8 9
!?:LEM 4-6 8-10
!?:EE 1 2-11
-ExAy(-Fyy <-> Fxy):-I 1-12|]
inverseTheorem = [st|
Show: AX2EY2∀x∀y(X2(x,y) ↔ Y2(y,x))
Show: ∀x∀y(X2(x,y) ↔ \w\v[X2(v,w)](y,x))
Show: ∀y(X2(a,y) ↔ \w\v[X2(v,w)](y,a))
Show: X2(a,b) -> \w\v[X2(v,w)](b,a)
X2(a,b):AS
\w\v[X2(v,w)](b,a):ABS2 5
:CD 6
Show: \w\v[X2(v,w)](b,a)-> X2(a,b)
\w\v[X2(v,w)](b,a):AS
X2(a,b):APP2 9
:CD 10
X2(a,b) <-> \x_1\x_2[X2(x_2,x_1)](b,a):CB 4 8
:UD 12
:UD 3
EY2∀x∀y(X2(x,y) ↔ Y2(y,x)):EG2 2
:UD2 15|]
adjunctionTheorem = [st|
Show P->(Q->R):CD
P:AS
Show Q->R:CD
Q:AS
P/\Q->R:PR
P/\Q:&I 2 4
R:->O 5 6|]
axiomFiveTheorem = [st|
Show <>[]P->[]P /0 :CD
<>[]P /0 :AS
Show []P /0 :ND
Show P /0-1 :DD
[]P /0-2 :<>O 2
P /0-1 :[]O(5) 5|]
axiomBTheorem = [st|
Show <>[]P->P /0 :CD
<>[]P /0 :AS
[]P /0-1 :<>O 2
P /0 :[]O(b) 3|]
barcanTheorem = [st|
Show Ax[]Fx->[]AxFx /0 :CD
Ax[]Fx /0 :AS
Show []AxFx /0 :ND
Show AxFx /0-1 :UD
Show Fa /0-1 :DD
[]Fa /0 :AO 2
Fa /0-1 :[]O 6|]
bisectorTheorem = [st|
AxAyAz(F(x,g(y,z)) ↔ h(x,y) = h(x,z)) :PR
Show AwAxAyAz(F(w,g(x,y))∧F(w,g(x,z)) → F(w,g(y,z)))
Show AxAyAz(F(a,g(x,y))∧F(a,g(x,z)) → F(a,g(y,z)))
Show AyAz(F(a,g(b,y))∧F(a,g(b,z)) → F(a,g(y,z)))
Show Az(F(a,g(b,c))∧F(a,g(b,z)) → F(a,g(c,z)))
Show F(a,g(b,c))∧F(a,g(b,d)) → F(a,g(c,d))
F(a,g(b,c))∧F(a,g(b,d)) :AS
F(a,g(b,c)) :S 7
F(a,g(b,d)) :S 7
AyAz(F(a,g(y,z)) ↔ h(a,y) = h(a,z)) :UI 1
Az(F(a,g(b,z)) ↔ h(a,b) = h(a,z)) :UI 10
F(a,g(b,c)) ↔ h(a,b) = h(a,c) :UI 11
F(a,g(b,d)) ↔ h(a,b) = h(a,d) :UI 11
F(a,g(b,c)) → h(a,b) = h(a,c) :BC 12
F(a,g(b,d)) → h(a,b) = h(a,d) :BC 13
h(a,b) = h(a,c) :MP 8 14
h(a,b) = h(a,d) :MP 9 15
h(a,c) = h(a,d) :LL 16 17
Az(F(a,g(c,z)) ↔ h(a,c) = h(a,z)) :UI 10
F(a,g(c,d)) ↔ h(a,c) = h(a,d) :UI 19
h(a,c) = h(a,d) → F(a,g(c,d)) :BC 20
F(a,g(c,d)) :MP 18 21
:CD 22
:UD 6
:UD 5
:UD 4
:UD 3|]
transitiveTheorem = [st|
Show P(a) within P(P(a))
a within P(a):PR
Ax(x in a -> x in P(a)):Def-S 2
Show Ax(x in P(a) -> x in P(P(a)))
Show b in P(a) -> b in P(P(a))
b in P(a) :AS
b within a:Def-P 6
Ax(x in b -> x in a):Def-S 7
Show b in P(P(a))
Show Ax(x in b -> x in P(a))
Show c in b -> c in P(a)
c in b :AS
c in b -> c in a:UI 8
c in a :MP 12 13
c in a -> c in P(a) :UI 3
c in P(a):MP 14 15
:CD 16
:UD11
b within P(a):Def-S 10
b in P(P(a)):Def-P 19
:DD 20
:CD 9
:UD 5
P(a) within P(P(a)):Def-S 4
:DD 24|]
sequentDemo = [st|
{
"label": "AxEy(F(x)/\\G(y)):|-:EyAx(F(x)/\\G(y))",
"rule": "RE",
"forest": [
{
"label": "AxEy(F(x)/\\G(y)) :|-: Ax(F(x)/\\G(b)), EyAx(F(x)/\\G(y))",
"rule": "LA",
"forest": [
{
"label": "AxEy(F(x)/\\G(y)), Ey(F(a)/\\G(y)) :|-: Ax(F(x)/\\G(b)), EyAx(F(x)/\\G(y))",
"rule": "LE",
"forest": [
{
"label": "AxEy(F(x)/\\G(y)), F(a)/\\G(c) :|-: Ax(F(x)/\\G(b)), EyAx(F(x)/\\G(y))",
"rule": "L&2",
"forest": [
{
"label": "G(c), AxEy(F(x)/\\G(y)) :|-: Ax(F(x)/\\G(b)), EyAx(F(x)/\\G(y))",
"rule": "RA",
"forest": [
{
"label": "G(c), AxEy(F(x)/\\G(y)) :|-: F(d)/\\G(b), EyAx(F(x)/\\G(y))",
"rule": "R&",
"forest": [
{
"label": "G(c), AxEy(F(x)/\\G(y)) :|-: F(d), EyAx(F(x)/\\G(y))",
"rule": "LA",
"forest": [
{
"label": "G(c), Ey(F(d)/\\G(y)) :|-: F(d), EyAx(F(x)/\\G(y))",
"rule": "LE",
"forest": [
{
"label": "G(c), F(d)/\\G(e) :|-: F(d), EyAx(F(x)/\\G(y))",
"rule": "L&1",
"forest": [
{
"label": "G(c), F(d) :|-: F(d), EyAx(F(x)/\\G(y))",
"rule": "Ax",
"forest": [
{
"label": "",
"rule": "",
"forest": []
}
]
}
]
}
]
}
]
},
{
"label": "AxEy(F(x)/\\G(y)), G(c):|-:EyAx(F(x)/\\G(y)), G(b)",
"rule": "RE",
"forest": [
{
"label": "AxEy(F(x)/\\G(y)), G(c):|-:Ax(F(x)/\\G(c)), G(b)",
"rule": "RA",
"forest": [
{
"label": "G(c), AxEy(F(x)/\\G(y)), :|-:F(a)/\\G(c), G(b)",
"rule": "LA",
"forest": [
{
"label": "G(c), Ey(F(a)/\\G(y)), :|-:F(a)/\\G(c), G(b)",
"rule": "LE",
"forest": [
{
"label": "G(c), F(a)/\\G(d) :|-:F(a)/\\G(c), G(b)",
"rule": "L&1",
"forest": [
{
"label": "G(c), F(a) :|-:F(a)/\\G(c), G(b)",
"rule": "R&",
"forest": [
{
"label": "F(a),G(c) :|-:G(c), G(b)",
"rule": "Ax",
"forest": [
{
"label": "",
"rule": "",
"forest": []
}
]
},
{
"label": "G(c), F(a) :|-:F(a), G(b)",
"rule": "Ax",
"forest": [
{
"label": "",
"rule": "",
"forest": []
}
]
}
]
}
]
}
]
}
]
}
]
}
]
}
]
}
]
}
]
}
]
}
]
}
]
}
|]
| opentower/carnap | Carnap-Server/Handler/Info.hs | gpl-3.0 | 12,641 | 0 | 12 | 6,562 | 404 | 232 | 172 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- |
-- Module : Network.Google.Env
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- Environment and Google specific configuration for the "Network.Google" monad.
module Network.Google.Env where
import Control.Lens (Lens', lens, (<>~), (?~))
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Reader (MonadReader (local))
import Data.Function (on)
import Data.Monoid (Dual (..), Endo (..))
import Data.Proxy (Proxy (..))
import GHC.TypeLits (Symbol)
import Network.Google.Auth
import Network.Google.Internal.Logger (Logger)
import Network.Google.Types
import Network.HTTP.Conduit (Manager, newManager, tlsManagerSettings)
-- | The environment containing the parameters required to make Google requests.
data Env (s :: [Symbol]) = Env
{ _envOverride :: !(Dual (Endo ServiceConfig))
, _envLogger :: !Logger
, _envManager :: !Manager
, _envStore :: !(Store s)
}
-- Note: The strictness annotations aobe are applied to ensure
-- total field initialisation.
class HasEnv s a | a -> s where
environment :: Lens' a (Env s)
{-# MINIMAL environment #-}
-- | The currently applied overrides to all 'Service' configuration.
envOverride :: Lens' a (Dual (Endo ServiceConfig))
-- | The function used to output log messages.
envLogger :: Lens' a Logger
-- | The 'Manager' used to create and manage open HTTP connections.
envManager :: Lens' a Manager
-- | The credential store used to sign requests for authentication with Google.
envStore :: Lens' a (Store s)
-- | The authorised OAuth2 scopes.
--
-- /See:/ 'allow', '!', and the related scopes available for each service.
envScopes :: Lens' a (Proxy s)
envOverride = environment . lens _envOverride (\s a -> s { _envOverride = a })
envLogger = environment . lens _envLogger (\s a -> s { _envLogger = a })
envManager = environment . lens _envManager (\s a -> s { _envManager = a })
envStore = environment . lens _envStore (\s a -> s { _envStore = a })
envScopes = environment . lens (\_ -> Proxy :: Proxy s) (flip allow)
instance HasEnv s (Env s) where
environment = id
-- | Provide a function which will be added to the stack
-- of overrides, which are applied to all service configurations.
-- This provides a way to configure any request that is sent using the
-- modified 'Env'.
--
-- /See:/ 'override'.
configure :: HasEnv s a => (ServiceConfig -> ServiceConfig) -> a -> a
configure f = envOverride <>~ Dual (Endo f)
-- | Override a specific 'ServiceConfig'. All requests belonging to the
-- supplied service will use this configuration instead of the default.
--
-- Typically you would override a modified version of the default 'ServiceConfig'
-- for the desired service:
--
-- > override (gmailService & serviceHost .~ "localhost") env
--
-- Or when using "Network.Google" with "Control.Monad.Reader" or "Control.Lens.Zoom"
-- and the 'ServiceConfig' lenses:
--
-- > local (override (computeService & serviceHost .~ "localhost")) $ do
-- > ...
--
-- /See:/ 'configure'.
override :: HasEnv s a => ServiceConfig -> a -> a
override s = configure f
where
f x | on (==) _svcId s x = s
| otherwise = x
-- | Scope an action such that any HTTP response will use this timeout value.
--
-- Default timeouts are chosen by considering:
--
-- * This 'timeout', if set.
--
-- * The related 'Service' timeout for the sent request if set. (Default 70s)
--
-- * The 'envManager' timeout, if set.
--
-- * The 'ClientRequest' timeout. (Default 30s)
timeout :: (MonadReader r m, HasEnv s r) => Seconds -> m a -> m a
timeout s = local (configure (serviceTimeout ?~ s))
-- | Creates a new environment with a newly initialized 'Manager', without logging.
-- and Credentials that are determined by calling 'getApplicationDefault'.
-- Use 'newEnvWith' to supply custom credentials such as an 'OAuthClient'
-- and 'OAuthCode'.
--
-- The 'Allow'ed 'OAuthScope's are used to authorize any @service_account@ that is
-- found with the appropriate scopes. See the top-level module of each individual
-- @gogol-*@ library for a list of available scopes, such as
-- @Network.Google.Compute.authComputeScope@.
-- Lenses from 'HasEnv' can be used to further configure the resulting 'Env'.
--
-- /See:/ 'newEnvWith', 'getApplicationDefault'.
newEnv :: (MonadIO m, MonadCatch m, AllowScopes s) => m (Env s)
newEnv = do
m <- liftIO (newManager tlsManagerSettings)
c <- getApplicationDefault m
newEnvWith c (\_ _ -> pure ()) m
-- | Create a new environment.
--
-- /See:/ 'newEnv'.
newEnvWith :: (MonadIO m, MonadCatch m, AllowScopes s)
=> Credentials s
-> Logger
-> Manager
-> m (Env s)
newEnvWith c l m = Env mempty l m <$> initStore c l m
| brendanhay/gogol | gogol/src/Network/Google/Env.hs | mpl-2.0 | 5,391 | 0 | 13 | 1,206 | 948 | 548 | 400 | 66 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.UpdateMyUserProfile
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Updates a user's SSH public key.
--
-- Required Permissions: To use this action, an IAM user must have
-- self-management enabled or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_UpdateMyUserProfile.html>
module Network.AWS.OpsWorks.UpdateMyUserProfile
(
-- * Request
UpdateMyUserProfile
-- ** Request constructor
, updateMyUserProfile
-- ** Request lenses
, umupSshPublicKey
-- * Response
, UpdateMyUserProfileResponse
-- ** Response constructor
, updateMyUserProfileResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
newtype UpdateMyUserProfile = UpdateMyUserProfile
{ _umupSshPublicKey :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'UpdateMyUserProfile' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'umupSshPublicKey' @::@ 'Maybe' 'Text'
--
updateMyUserProfile :: UpdateMyUserProfile
updateMyUserProfile = UpdateMyUserProfile
{ _umupSshPublicKey = Nothing
}
-- | The user's SSH public key.
umupSshPublicKey :: Lens' UpdateMyUserProfile (Maybe Text)
umupSshPublicKey = lens _umupSshPublicKey (\s a -> s { _umupSshPublicKey = a })
data UpdateMyUserProfileResponse = UpdateMyUserProfileResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'UpdateMyUserProfileResponse' constructor.
updateMyUserProfileResponse :: UpdateMyUserProfileResponse
updateMyUserProfileResponse = UpdateMyUserProfileResponse
instance ToPath UpdateMyUserProfile where
toPath = const "/"
instance ToQuery UpdateMyUserProfile where
toQuery = const mempty
instance ToHeaders UpdateMyUserProfile
instance ToJSON UpdateMyUserProfile where
toJSON UpdateMyUserProfile{..} = object
[ "SshPublicKey" .= _umupSshPublicKey
]
instance AWSRequest UpdateMyUserProfile where
type Sv UpdateMyUserProfile = OpsWorks
type Rs UpdateMyUserProfile = UpdateMyUserProfileResponse
request = post "UpdateMyUserProfile"
response = nullResponse UpdateMyUserProfileResponse
| kim/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/UpdateMyUserProfile.hs | mpl-2.0 | 3,399 | 0 | 9 | 652 | 357 | 219 | 138 | 47 | 1 |
{-# LANGUAGE CPP, BangPatterns, MagicHash, NondecreasingIndentation, OverloadedStrings #-}
-------------------------------------------------------------------------------
--
-- | Main API for compiling plain Haskell source code.
--
-- This module implements compilation of a Haskell source. It is
-- /not/ concerned with preprocessing of source files; this is handled
-- in "DriverPipeline".
--
-- There are various entry points depending on what mode we're in:
-- "batch" mode (@--make@), "one-shot" mode (@-c@, @-S@ etc.), and
-- "interactive" mode (GHCi). There are also entry points for
-- individual passes: parsing, typechecking/renaming, desugaring, and
-- simplification.
--
-- All the functions here take an 'HscEnv' as a parameter, but none of
-- them return a new one: 'HscEnv' is treated as an immutable value
-- from here on in (although it has mutable components, for the
-- caches).
--
-- Warning messages are dealt with consistently throughout this API:
-- during compilation warnings are collected, and before any function
-- in @HscMain@ returns, the warnings are either printed, or turned
-- into a real compialtion error if the @-Werror@ flag is enabled.
--
-- (c) The GRASP/AQUA Project, Glasgow University, 1993-2000
--
-------------------------------------------------------------------------------
module Eta.Main.HscMain
(
-- * Making an HscEnv
newHscEnv
-- * Compiling complete source files
, Messager, batchMsg
, HscStatus (..)
, hscCompileOneShot
, hscCompileCore
, genericHscCompileGetFrontendResult
, genModDetails
, hscSimpleIface
, hscWriteIface
, hscNormalIface
, hscGenHardCode
, hscInteractive
-- * Running passes separately
, hscParse
, hscTypecheckRename
, hscDesugar
, makeSimpleIface
, makeSimpleDetails
, hscSimplify -- ToDo, shouldn't really export this
-- * Support for interactive evaluation
, hscParseIdentifier
, hscTcRcLookupName
, hscTcRnGetInfo
, hscCheckSafe
, hscGetSafe
#ifdef ETA_REPL
, hscIsGHCiMonad
, hscGetModuleInterface
, hscRnImportDecls
, hscTcRnLookupRdrName
, hscStmt, hscStmtWithLocation
, hscDecls, hscDeclsWithLocation
, hscTcExpr, hscImport, hscKcType
, hscCompileCoreExpr
, hscParseExpr
, hscParsedStmt
-- * Low-level exports for hooks
, hscCompileCoreExpr'
#endif
-- We want to make sure that we export enough to be able to redefine
-- hscFileFrontEnd in client code
, hscParse', hscSimplify', hscDesugar', tcRnModule'
, getHscEnv
, hscSimpleIface', hscNormalIface'
, oneShotMsg
, hscFileFrontEnd, genericHscFrontend, dumpIfaceStats
, ioMsgMaybe, showModuleIndex
) where
#ifdef ETA_REPL
import Eta.REPL.RemoteTypes ( ForeignHValue )
import Eta.REPL.Linker
import Eta.BasicTypes.Id
import Eta.Core.CoreTidy ( tidyExpr )
import Eta.Types.Type ( Type )
import Eta.Prelude.PrelNames
import {- Kind parts of -} Eta.Types.Type ( Kind )
import Eta.Core.CoreLint ( lintInteractiveExpr )
import Eta.BasicTypes.VarEnv ( emptyTidyEnv )
import Eta.Utils.Panic
import Eta.BasicTypes.ConLike
#endif
import Eta.BasicTypes.Module
import Eta.BasicTypes.RdrName
import Eta.HsSyn.HsSyn
import Eta.Core.CoreSyn
import Eta.Utils.StringBuffer
import Eta.Parser.Parser
import qualified Eta.Parser.Lexer as Lexer
import Eta.Parser.Lexer
import Eta.BasicTypes.SrcLoc
import Eta.TypeCheck.TcRnDriver
import Eta.Iface.TcIface ( typecheckIface )
import Eta.TypeCheck.TcRnMonad
import Eta.BasicTypes.NameCache ( initNameCache )
import Eta.Iface.LoadIface ( ifaceStats, initExternalPackageState )
import Eta.Prelude.PrelInfo
import Eta.Iface.MkIface
import Eta.DeSugar.DeSugar
import Eta.SimplCore.SimplCore
import Eta.Main.TidyPgm
import Eta.Core.CorePrep
import Eta.Core.CoreUtils (exprType)
import Eta.StgSyn.CoreToStg ( coreToStg )
import Eta.StgSyn.StgSyn
import Eta.Profiling.CostCentre
import Eta.Types.TyCon
import Eta.BasicTypes.Name
import Eta.SimplStg.SimplStg ( stg2stg )
import Eta.BasicTypes.NameEnv ( emptyNameEnv )
import Eta.Types.InstEnv
import Eta.Types.FamInstEnv
import Eta.Utils.Fingerprint ( Fingerprint )
import Eta.Utils.PprColor
import Eta.Main.Hooks
import Eta.TypeCheck.TcEnv
import Eta.Main.DynFlags
import Eta.Main.ErrUtils
import Eta.Utils.Outputable
import Eta.Main.HscStats ( ppSourceStats )
import Eta.Main.HscTypes
import Eta.Utils.FastString
import Eta.BasicTypes.UniqSupply
import Eta.Utils.Bag
import Eta.Utils.Exception
import Eta.Utils.Util
import Eta.CodeGen.Main
import Eta.CodeGen.Name
import Eta.Utils.JAR
import Eta.Main.Packages
import Codec.JVM
import Data.List
import Data.IORef
import System.FilePath as FilePath
import System.Directory
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Set as S
import Data.Set (Set)
import Control.Arrow((&&&))
import Control.Monad
import Data.Data hiding (Fixity)
import Control.Concurrent
#include "HsVersions.h"
{- **********************************************************************
%* *
Initialisation
%* *
%********************************************************************* -}
newHscEnv :: DynFlags -> IO HscEnv
newHscEnv dflags = do
eps_var <- newIORef initExternalPackageState
us <- mkSplitUniqSupply 'r'
nc_var <- newIORef (initNameCache us knownKeyNames)
fc_var <- newIORef emptyInstalledModuleEnv
iserv_mvar <- newMVar Nothing
mlc_var <- newIORef emptyModuleEnv
ic <- newInteractiveContext dflags
idx_var <- newMVar emptyClassIndex
return HscEnv { hsc_dflags = dflags,
hsc_targets = [],
hsc_mod_graph = emptyMG,
hsc_IC = ic,
hsc_HPT = emptyHomePackageTable,
hsc_EPS = eps_var,
hsc_NC = nc_var,
hsc_FC = fc_var,
hsc_iserv = iserv_mvar,
hsc_MLC = mlc_var,
hsc_type_env_var = Nothing,
hsc_classIndex = idx_var }
-- -----------------------------------------------------------------------------
getWarnings :: Hsc WarningMessages
getWarnings = Hsc $ \_ w -> return (w, w)
clearWarnings :: Hsc ()
clearWarnings = Hsc $ \_ _ -> return ((), emptyBag)
logWarnings :: WarningMessages -> Hsc ()
logWarnings w = Hsc $ \_ w0 -> return ((), w0 `unionBags` w)
getHscEnv :: Hsc HscEnv
getHscEnv = Hsc $ \e w -> return (e, w)
handleWarnings :: Hsc ()
handleWarnings = do
dflags <- getDynFlags
w <- getWarnings
liftIO $ printOrThrowWarnings dflags w
clearWarnings
-- | log warning in the monad, and if there are errors then
-- throw a SourceError exception.
logWarningsReportErrors :: Messages -> Hsc ()
logWarningsReportErrors (warns,errs) = do
logWarnings warns
when (not $ isEmptyBag errs) $ throwErrors errs
-- | Throw some errors.
throwErrors :: ErrorMessages -> Hsc a
throwErrors = liftIO . throwIO . mkSrcErr
-- | Deal with errors and warnings returned by a compilation step
--
-- In order to reduce dependencies to other parts of the compiler, functions
-- outside the "main" parts of GHC return warnings and errors as a parameter
-- and signal success via by wrapping the result in a 'Maybe' type. This
-- function logs the returned warnings and propagates errors as exceptions
-- (of type 'SourceError').
--
-- This function assumes the following invariants:
--
-- 1. If the second result indicates success (is of the form 'Just x'),
-- there must be no error messages in the first result.
--
-- 2. If there are no error messages, but the second result indicates failure
-- there should be warnings in the first result. That is, if the action
-- failed, it must have been due to the warnings (i.e., @-Werror@).
ioMsgMaybe :: IO (Messages, Maybe a) -> Hsc a
ioMsgMaybe ioA = do
((warns,errs), mb_r) <- liftIO ioA
logWarnings warns
case mb_r of
Nothing -> throwErrors errs
Just r -> ASSERT( isEmptyBag errs ) return r
-- | like ioMsgMaybe, except that we ignore error messages and return
-- 'Nothing' instead.
ioMsgMaybe' :: IO (Messages, Maybe a) -> Hsc (Maybe a)
ioMsgMaybe' ioA = do
((warns,_errs), mb_r) <- liftIO $ ioA
logWarnings warns
return mb_r
-- -----------------------------------------------------------------------------
-- | Lookup things in the compiler's environment
#ifdef ETA_REPL
hscTcRnLookupRdrName :: HscEnv -> Located RdrName -> IO [Name]
hscTcRnLookupRdrName hsc_env0 rdr_name
= runInteractiveHsc hsc_env0 $
do { hsc_env <- getHscEnv
; ioMsgMaybe $ tcRnLookupRdrName hsc_env rdr_name }
#endif
hscTcRcLookupName :: HscEnv -> Name -> IO (Maybe TyThing)
hscTcRcLookupName hsc_env0 name = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe' $ tcRnLookupName hsc_env name
-- ignore errors: the only error we're likely to get is
-- "name not found", and the Maybe in the return type
-- is used to indicate that.
hscTcRnGetInfo :: HscEnv -> Name -> IO (Maybe (TyThing, Fixity, [ClsInst], [FamInst], SDoc))
hscTcRnGetInfo hsc_env0 name
= runInteractiveHsc hsc_env0 $
do { hsc_env <- getHscEnv
; ioMsgMaybe' $ tcRnGetInfo hsc_env name }
#ifdef ETA_REPL
hscIsGHCiMonad :: HscEnv -> String -> IO Name
hscIsGHCiMonad hsc_env name
= runHsc hsc_env $ ioMsgMaybe $ isGHCiMonad hsc_env name
hscGetModuleInterface :: HscEnv -> Module -> IO ModIface
hscGetModuleInterface hsc_env0 mod = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ getModuleInterface hsc_env mod
-- -----------------------------------------------------------------------------
-- | Rename some import declarations
hscRnImportDecls :: HscEnv -> [LImportDecl RdrName] -> IO GlobalRdrEnv
hscRnImportDecls hsc_env0 import_decls = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ tcRnImportDecls hsc_env import_decls
#endif
-- -----------------------------------------------------------------------------
-- | parse a file, returning the abstract syntax
hscParse :: HscEnv -> ModSummary -> IO HsParsedModule
hscParse hsc_env mod_summary = runHsc hsc_env $ hscParse' mod_summary
-- internal version, that doesn't fail due to -Werror
hscParse' :: ModSummary -> Hsc HsParsedModule
hscParse' mod_summary
| Just r <- ms_parsed_mod mod_summary = return r
| otherwise = do
dflags <- getDynFlags
let src_filename = ms_hspp_file mod_summary
maybe_src_buf = ms_hspp_buf mod_summary
-------------------------- Parser ----------------
liftIO $ showPass dflags "Parser"
{-# SCC "Parser" #-} do
-- sometimes we already have the buffer in memory, perhaps
-- because we needed to parse the imports out of it, or get the
-- module name.
buf <- case maybe_src_buf of
Just b -> return b
Nothing -> liftIO $ hGetStringBuffer src_filename
let loc = mkRealSrcLoc (mkFastString src_filename) 1 1
let parseMod | HsigFile == ms_hsc_src mod_summary
= parseSignature
| otherwise = parseModule
case unP parseMod (mkPState dflags buf loc) of
PFailed span err ->
liftIO $ throwOneError (mkPlainErrMsg dflags span err)
POk pst rdr_module -> do
logWarningsReportErrors (getMessages pst)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" $
ppr rdr_module
liftIO $ dumpIfSet_dyn dflags Opt_D_source_stats "Source Statistics" $
ppSourceStats False rdr_module
-- To get the list of extra source files, we take the list
-- that the parser gave us,
-- - eliminate files beginning with '<'. gcc likes to use
-- pseudo-filenames like "<built-in>" and "<command-line>"
-- - normalise them (elimiante differences between ./f and f)
-- - filter out the preprocessed source file
-- - filter out anything beginning with tmpdir
-- - remove duplicates
-- - filter out the .hs/.lhs source filename if we have one
--
let n_hspp = FilePath.normalise src_filename
srcs0 = nub $ filter (not . (tmpDir dflags `isPrefixOf`))
$ filter (not . (== n_hspp))
$ map FilePath.normalise
$ filter (not . (isPrefixOf "<"))
$ map unpackFS
$ srcfiles pst
srcs1 = case ml_hs_file (ms_location mod_summary) of
Just f -> filter (/= FilePath.normalise f) srcs0
Nothing -> srcs0
-- sometimes we see source files from earlier
-- preprocessing stages that cannot be found, so just
-- filter them out:
srcs2 <- liftIO $ filterM doesFileExist srcs1
return HsParsedModule {
hpm_module = rdr_module,
hpm_src_files = srcs2,
hpm_annotations
= (M.fromListWith (++) $ annotations pst,
M.fromList $ ((noSrcSpan,comment_q pst)
:(annotations_comments pst)))
}
-- XXX: should this really be a Maybe X? Check under which circumstances this
-- can become a Nothing and decide whether this should instead throw an
-- exception/signal an error.
type RenamedStuff =
(Maybe (HsGroup Name, [LImportDecl Name], Maybe [LIE Name],
Maybe LHsDocString))
-- | Rename and typecheck a module, additionally returning the renamed syntax
hscTypecheckRename :: HscEnv -> ModSummary -> HsParsedModule
-> IO (TcGblEnv, RenamedStuff)
hscTypecheckRename hsc_env mod_summary rdr_module = runHsc hsc_env $ do
tc_result <- tcRnModule' hsc_env mod_summary False rdr_module
-- This 'do' is in the Maybe monad!
let rn_info = do decl <- tcg_rn_decls tc_result
let imports = tcg_rn_imports tc_result
exports = tcg_rn_exports tc_result
doc_hdr = tcg_doc_hdr tc_result
return (decl,imports,exports,doc_hdr)
return (tc_result, rn_info)
hscTypecheck :: Bool -- ^ Keep renamed source?
-> ModSummary -> Maybe HsParsedModule
-> Hsc TcGblEnv
hscTypecheck keep_rn mod_summary mb_rdr_module = do
hsc_env <- getHscEnv
let hsc_src = ms_hsc_src mod_summary
dflags = hsc_dflags hsc_env
outer_mod = ms_mod mod_summary
inner_mod = canonicalizeHomeModule dflags (moduleName outer_mod)
src_filename = ms_hspp_file mod_summary
real_loc = realSrcLocSpan $ mkRealSrcLoc (mkFastString src_filename) 1 1
MASSERT( moduleUnitId outer_mod == thisPackage dflags )
if hsc_src == HsigFile && not (isHoleModule inner_mod)
then ioMsgMaybe $ tcRnInstantiateSignature hsc_env outer_mod real_loc
else
do hpm <- case mb_rdr_module of
Just hpm -> return hpm
Nothing -> hscParse' mod_summary
tc_result0 <- tcRnModule' hsc_env mod_summary keep_rn hpm
if hsc_src == HsigFile
then do (iface, _, _) <- liftIO $ hscSimpleIface hsc_env tc_result0 Nothing
ioMsgMaybe $
tcRnMergeSignatures hsc_env (tcg_top_loc tc_result0) iface
else return tc_result0
-- wrapper around tcRnModule to handle safe haskell extras
tcRnModule' :: HscEnv -> ModSummary -> Bool -> HsParsedModule
-> Hsc TcGblEnv
tcRnModule' hsc_env sum save_rn_syntax mod = do
tcg_res <- {-# SCC "Typecheck-Rename" #-}
ioMsgMaybe $
tcRnModule hsc_env (ms_hsc_src sum) save_rn_syntax mod
tcSafeOK <- liftIO $ readIORef (tcg_safeInfer tcg_res)
dflags <- getDynFlags
let allSafeOK = safeInferred dflags && tcSafeOK
-- end of the safe haskell line, how to respond to user?
if not (safeHaskellOn dflags) || (safeInferOn dflags && not allSafeOK)
-- if safe Haskell off or safe infer failed, mark unsafe
then markUnsafeInfer tcg_res emptyBag
-- module (could be) safe, throw warning if needed
else do
tcg_res' <- hscCheckSafeImports tcg_res
safe <- liftIO $ readIORef (tcg_safeInfer tcg_res')
when safe $ do
case wopt Opt_WarnSafe dflags of
True -> (logWarnings $ unitBag $ mkPlainWarnMsg dflags
(warnSafeOnLoc dflags) $ errSafe tcg_res')
False | safeHaskell dflags == Sf_Trustworthy &&
wopt Opt_WarnTrustworthySafe dflags ->
(logWarnings $ unitBag $ mkPlainWarnMsg dflags
(trustworthyOnLoc dflags) $ errTwthySafe tcg_res')
False -> return ()
return tcg_res'
where
pprMod t = ppr $ moduleName $ tcg_mod t
errSafe t = quotes (pprMod t) <+> text "has been inferred as safe!"
errTwthySafe t = quotes (pprMod t)
<+> text "is marked as Trustworthy but has been inferred as safe!"
-- | Convert a typechecked module to Core
hscDesugar :: HscEnv -> ModSummary -> TcGblEnv -> IO ModGuts
hscDesugar hsc_env mod_summary tc_result =
runHsc hsc_env $ hscDesugar' (ms_location mod_summary) tc_result
hscDesugar' :: ModLocation -> TcGblEnv -> Hsc ModGuts
hscDesugar' mod_location tc_result = do
hsc_env <- getHscEnv
r <- ioMsgMaybe $
{-# SCC "deSugar" #-}
deSugar hsc_env mod_location tc_result
-- always check -Werror after desugaring, this is the last opportunity for
-- warnings to arise before the backend.
handleWarnings
return r
-- | Make a 'ModIface' from the results of typechecking. Used when
-- not optimising, and the interface doesn't need to contain any
-- unfoldings or other cross-module optimisation info.
-- ToDo: the old interface is only needed to get the version numbers,
-- we should use fingerprint versions instead.
makeSimpleIface :: HscEnv -> Maybe ModIface -> TcGblEnv -> ModDetails
-> IO (ModIface,Bool)
makeSimpleIface hsc_env maybe_old_iface tc_result details = runHsc hsc_env $ do
safe_mode <- hscGetSafeMode tc_result
ioMsgMaybe $ do
mkIfaceTc hsc_env (fmap mi_iface_hash maybe_old_iface) safe_mode
details tc_result
-- | Make a 'ModDetails' from the results of typechecking. Used when
-- typechecking only, as opposed to full compilation.
makeSimpleDetails :: HscEnv -> TcGblEnv -> IO ModDetails
makeSimpleDetails hsc_env tc_result = mkBootModDetailsTc hsc_env tc_result
{- **********************************************************************
%* *
The main compiler pipeline
%* *
%********************************************************************* -}
{-
--------------------------------
The compilation proper
--------------------------------
It's the task of the compilation proper to compile Haskell, hs-boot and core
files to either byte-code, hard-code (C, asm, LLVM, ect) or to nothing at all
(the module is still parsed and type-checked. This feature is mostly used by
IDE's and the likes). Compilation can happen in either 'one-shot', 'batch',
'nothing', or 'interactive' mode. 'One-shot' mode targets hard-code, 'batch'
mode targets hard-code, 'nothing' mode targets nothing and 'interactive' mode
targets byte-code.
The modes are kept separate because of their different types and meanings:
* In 'one-shot' mode, we're only compiling a single file and can therefore
discard the new ModIface and ModDetails. This is also the reason it only
targets hard-code; compiling to byte-code or nothing doesn't make sense when
we discard the result.
* 'Batch' mode is like 'one-shot' except that we keep the resulting ModIface
and ModDetails. 'Batch' mode doesn't target byte-code since that require us to
return the newly compiled byte-code.
* 'Nothing' mode has exactly the same type as 'batch' mode but they're still
kept separate. This is because compiling to nothing is fairly special: We
don't output any interface files, we don't run the simplifier and we don't
generate any code.
* 'Interactive' mode is similar to 'batch' mode except that we return the
compiled byte-code together with the ModIface and ModDetails.
Trying to compile a hs-boot file to byte-code will result in a run-time error.
This is the only thing that isn't caught by the type-system.
-}
type Messager = HscEnv -> (Int,Int) -> RecompileRequired -> ModSummary -> IO ()
genericHscCompileGetFrontendResult ::
Bool -- always do basic recompilation check?
-> Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface -- Old interface, if available
-> (Int,Int) -- (i,n) = module i of n (for msgs)
-> IO (Either ModIface (TcGblEnv, Maybe Fingerprint))
genericHscCompileGetFrontendResult
always_do_basic_recompilation_check m_tc_result
mHscMessage hsc_env mod_summary source_modified mb_old_iface mod_index
= do
let msg what = case mHscMessage of
Just hscMessage -> hscMessage hsc_env mod_index what mod_summary
Nothing -> return ()
skip iface = do
msg UpToDate
return $ Left iface
compile mb_old_hash reason = do
msg reason
tc_result <- runHsc hsc_env $ genericHscFrontend mod_summary
return $ Right (tc_result, mb_old_hash)
stable = case source_modified of
SourceUnmodifiedAndStable -> True
_ -> False
case m_tc_result of
Just tc_result
| not always_do_basic_recompilation_check ->
return $ Right (tc_result, Nothing)
_ -> do
(recomp_reqd, mb_checked_iface)
<- {-# SCC "checkOldIface" #-}
checkOldIface hsc_env mod_summary
source_modified mb_old_iface
-- save the interface that comes back from checkOldIface.
-- In one-shot mode we don't have the old iface until this
-- point, when checkOldIface reads it from the disk.
let mb_old_hash = fmap mi_iface_hash mb_checked_iface
case mb_checked_iface of
Just iface | not (recompileRequired recomp_reqd) ->
-- If the module used TH splices when it was last
-- compiled, then the recompilation check is not
-- accurate enough (#481) and we must ignore
-- it. However, if the module is stable (none of
-- the modules it depends on, directly or
-- indirectly, changed), then we *can* skip
-- recompilation. This is why the SourceModified
-- type contains SourceUnmodifiedAndStable, and
-- it's pretty important: otherwise ghc --make
-- would always recompile TH modules, even if
-- nothing at all has changed. Stability is just
-- the same check that make is doing for us in
-- one-shot mode.
case m_tc_result of
Nothing
| mi_used_th iface && not stable ->
compile mb_old_hash (RecompBecause "TH")
_ ->
skip iface
_ ->
case m_tc_result of
Nothing -> compile mb_old_hash recomp_reqd
Just tc_result ->
return $ Right (tc_result, mb_old_hash)
genericHscFrontend :: ModSummary -> Hsc TcGblEnv
genericHscFrontend mod_summary =
getHooked hscFrontendHook genericHscFrontend' >>= ($ mod_summary)
genericHscFrontend' :: ModSummary -> Hsc TcGblEnv
genericHscFrontend' mod_summary = hscFileFrontEnd mod_summary
--------------------------------------------------------------
-- Compilers
--------------------------------------------------------------
hscCompileOneShot :: HscEnv
-> ModSummary
-> SourceModified
-> IO HscStatus
hscCompileOneShot env =
lookupHook hscCompileOneShotHook hscCompileOneShot' (hsc_dflags env) env
-- Compile Haskell/boot in OneShot mode.
hscCompileOneShot' :: HscEnv
-> ModSummary
-> SourceModified
-> IO HscStatus
hscCompileOneShot' hsc_env mod_summary src_changed
= do
-- One-shot mode needs a knot-tying mutable variable for interface
-- files. See TcRnTypes.TcGblEnv.tcg_type_env_var.
-- See also Note [hsc_type_env_var hack]
type_env_var <- newIORef emptyNameEnv
let mod = ms_mod mod_summary
hsc_env' = hsc_env{ hsc_type_env_var = Just (mod, type_env_var) }
msg what = oneShotMsg hsc_env' what
skip = do msg UpToDate
dumpIfaceStats hsc_env'
return HscUpToDate
compile mb_old_hash reason = runHsc hsc_env' $ do
liftIO $ msg reason
tc_result <- genericHscFrontend mod_summary
guts0 <- hscDesugar' (ms_location mod_summary) tc_result
dflags <- getDynFlags
case hscTarget dflags of
HscNothing -> do
when (gopt Opt_WriteInterface dflags) $ liftIO $ do
(iface, changed, _details) <- hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed mod_summary
return HscNotGeneratingCode
_ ->
case ms_hsc_src mod_summary of
t | isHsBootOrSig t ->
do (iface, changed, _) <- hscSimpleIface' tc_result mb_old_hash
liftIO $ hscWriteIface dflags iface changed mod_summary
return (case t of
HsBootFile -> HscUpdateBoot
HsigFile -> HscUpdateSig
HsSrcFile -> panic "hscCompileOneShot Src")
_ ->
do guts <- hscSimplify' guts0
(iface, changed, _details, cgguts) <- hscNormalIface' guts mb_old_hash
liftIO $ hscWriteIface dflags iface changed mod_summary
return $ HscRecomp cgguts mod_summary
-- XXX This is always False, because in one-shot mode the
-- concept of stability does not exist. The driver never
-- passes SourceUnmodifiedAndStable in here.
stable = case src_changed of
SourceUnmodifiedAndStable -> True
_ -> False
(recomp_reqd, mb_checked_iface)
<- {-# SCC "checkOldIface" #-}
checkOldIface hsc_env' mod_summary src_changed Nothing
-- save the interface that comes back from checkOldIface.
-- In one-shot mode we don't have the old iface until this
-- point, when checkOldIface reads it from the disk.
let mb_old_hash = fmap mi_iface_hash mb_checked_iface
case mb_checked_iface of
Just iface | not (recompileRequired recomp_reqd) ->
-- If the module used TH splices when it was last compiled,
-- then the recompilation check is not accurate enough (#481)
-- and we must ignore it. However, if the module is stable
-- (none of the modules it depends on, directly or indirectly,
-- changed), then we *can* skip recompilation. This is why
-- the SourceModified type contains SourceUnmodifiedAndStable,
-- and it's pretty important: otherwise ghc --make would
-- always recompile TH modules, even if nothing at all has
-- changed. Stability is just the same check that make is
-- doing for us in one-shot mode.
if mi_used_th iface && not stable
then compile mb_old_hash (RecompBecause "TH")
else skip
_ ->
compile mb_old_hash recomp_reqd
--------------------------------------------------------------
-- NoRecomp handlers
--------------------------------------------------------------
-- NB: this must be knot-tied appropriately, see hscIncrementalCompile
genModDetails :: HscEnv -> ModIface -> IO ModDetails
genModDetails hsc_env old_iface
= do
new_details <- {-# SCC "tcRnIface" #-}
initIfaceLoad hsc_env (typecheckIface old_iface)
dumpIfaceStats hsc_env
return new_details
--------------------------------------------------------------
-- Progress displayers.
--------------------------------------------------------------
oneShotMsg :: HscEnv -> RecompileRequired -> IO ()
oneShotMsg hsc_env recomp =
case recomp of
UpToDate ->
compilationProgressMsg (hsc_dflags hsc_env) $
"compilation IS NOT required"
_ ->
return ()
batchMsg :: Messager
batchMsg hsc_env mod_index recomp mod_summary =
case recomp of
MustCompile -> showMsg "Compiling " ""
UpToDate
| verbosity (hsc_dflags hsc_env) >= 2 -> showMsg "Skipping " ""
| otherwise -> return ()
RecompBecause reason -> showMsg "Compiling " (" [" ++ reason ++ "]")
where
dflags = hsc_dflags hsc_env
showMsg msg reason =
compilationProgressMsg dflags $
showSDocWithColor dflags $
colored colYellowFg (text $
showModuleIndex mod_index ++ msg ++
showModMsg dflags (hscTarget dflags) (recompileRequired recomp) mod_summary ++
reason)
--------------------------------------------------------------
-- FrontEnds
--------------------------------------------------------------
hscFileFrontEnd :: ModSummary -> Hsc TcGblEnv
hscFileFrontEnd mod_summary = hscTypecheck False mod_summary Nothing
-- hscFileFrontEnd mod_summary = do
-- hpm <- hscParse' mod_summary
-- hsc_env <- getHscEnv
-- tcg_env <- tcRnModule' hsc_env mod_summary False hpm
-- return tcg_env
--------------------------------------------------------------
-- Safe Haskell
--------------------------------------------------------------
-- Note [Safe Haskell Trust Check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell checks that an import is trusted according to the following
-- rules for an import of module M that resides in Package P:
--
-- * If M is recorded as Safe and all its trust dependencies are OK
-- then M is considered safe.
-- * If M is recorded as Trustworthy and P is considered trusted and
-- all M's trust dependencies are OK then M is considered safe.
--
-- By trust dependencies we mean that the check is transitive. So if
-- a module M that is Safe relies on a module N that is trustworthy,
-- importing module M will first check (according to the second case)
-- that N is trusted before checking M is trusted.
--
-- This is a minimal description, so please refer to the user guide
-- for more details. The user guide is also considered the authoritative
-- source in this matter, not the comments or code.
-- Note [Safe Haskell Inference]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell does Safe inference on modules that don't have any specific
-- safe haskell mode flag. The basic aproach to this is:
-- * When deciding if we need to do a Safe language check, treat
-- an unmarked module as having -XSafe mode specified.
-- * For checks, don't throw errors but return them to the caller.
-- * Caller checks if there are errors:
-- * For modules explicitly marked -XSafe, we throw the errors.
-- * For unmarked modules (inference mode), we drop the errors
-- and mark the module as being Unsafe.
--
-- It used to be that we only did safe inference on modules that had no Safe
-- Haskell flags, but now we perform safe inference on all modules as we want
-- to allow users to set the `--fwarn-safe`, `--fwarn-unsafe` and
-- `--fwarn-trustworthy-safe` flags on Trustworthy and Unsafe modules so that a
-- user can ensure their assumptions are correct and see reasons for why a
-- module is safe or unsafe.
--
-- This is tricky as we must be careful when we should throw an error compared
-- to just warnings. For checking safe imports we manage it as two steps. First
-- we check any imports that are required to be safe, then we check all other
-- imports to see if we can infer them to be safe.
-- | Check that the safe imports of the module being compiled are valid.
-- If not we either issue a compilation error if the module is explicitly
-- using Safe Haskell, or mark the module as unsafe if we're in safe
-- inference mode.
hscCheckSafeImports :: TcGblEnv -> Hsc TcGblEnv
hscCheckSafeImports tcg_env = do
dflags <- getDynFlags
tcg_env' <- checkSafeImports dflags tcg_env
checkRULES dflags tcg_env'
where
checkRULES dflags tcg_env' = do
case safeLanguageOn dflags of
True -> do
-- XSafe: we nuke user written RULES
logWarnings $ warns dflags (tcg_rules tcg_env')
return tcg_env' { tcg_rules = [] }
False
-- SafeInferred: user defined RULES, so not safe
| safeInferOn dflags && not (null $ tcg_rules tcg_env')
-> markUnsafeInfer tcg_env' $ warns dflags (tcg_rules tcg_env')
-- Trustworthy OR SafeInferred: with no RULES
| otherwise
-> return tcg_env'
warns dflags rules = listToBag $ map (warnRules dflags) rules
warnRules dflags (L loc (HsRule n _ _ _ _ _ _)) =
mkPlainWarnMsg dflags loc $
text "Rule \"" <> ftext (unLoc n) <> text "\" ignored" $+$
text "User defined rules are disabled under Safe Haskell"
-- | Validate that safe imported modules are actually safe. For modules in the
-- HomePackage (the package the module we are compiling in resides) this just
-- involves checking its trust type is 'Safe' or 'Trustworthy'. For modules
-- that reside in another package we also must check that the external pacakge
-- is trusted. See the Note [Safe Haskell Trust Check] above for more
-- information.
--
-- The code for this is quite tricky as the whole algorithm is done in a few
-- distinct phases in different parts of the code base. See
-- RnNames.rnImportDecl for where package trust dependencies for a module are
-- collected and unioned. Specifically see the Note [RnNames . Tracking Trust
-- Transitively] and the Note [RnNames . Trust Own Package].
checkSafeImports :: DynFlags -> TcGblEnv -> Hsc TcGblEnv
checkSafeImports dflags tcg_env
= do
imps <- mapM condense imports'
let (safeImps, regImps) = partition (\(_,_,s) -> s) imps
-- We want to use the warning state specifically for detecting if safe
-- inference has failed, so store and clear any existing warnings.
oldErrs <- getWarnings
clearWarnings
-- Check safe imports are correct
safePkgs <- S.fromList <$> mapMaybeM checkSafe safeImps
safeErrs <- getWarnings
clearWarnings
-- Check non-safe imports are correct if inferring safety
-- See the Note [Safe Haskell Inference]
(infErrs, infPkgs) <- case (safeInferOn dflags) of
False -> return (emptyBag, S.empty)
True -> do infPkgs <- S.fromList <$> mapMaybeM checkSafe regImps
infErrs <- getWarnings
clearWarnings
return (infErrs, infPkgs)
-- restore old errors
logWarnings oldErrs
case (isEmptyBag safeErrs) of
-- Failed safe check
False -> liftIO . throwIO . mkSrcErr $ safeErrs
-- Passed safe check
True -> do
let infPassed = isEmptyBag infErrs
tcg_env' <- case (not infPassed) of
True -> markUnsafeInfer tcg_env infErrs
False -> return tcg_env
when (packageTrustOn dflags) $ checkPkgTrust dflags pkgReqs
let newTrust = pkgTrustReqs safePkgs infPkgs infPassed
return tcg_env' { tcg_imports = impInfo `plusImportAvails` newTrust }
where
impInfo = tcg_imports tcg_env -- ImportAvails
imports = imp_mods impInfo -- ImportedMods
imports1 = moduleEnvToList imports -- (Module, [ImportedBy])
imports' = map (fmap importedByUser) imports1 -- (Module, [ImportedModsVal])
pkgReqs = imp_trust_pkgs impInfo -- [InstalledUnitId]
condense :: (Module, [ImportedModsVal]) -> Hsc (Module, SrcSpan, IsSafeImport)
condense (_, []) = panic "HscMain.condense: Pattern match failure!"
condense (m, x:xs) = do imv <- foldlM cond' x xs
return (m, imv_span imv, imv_is_safe imv)
-- ImportedModsVal = (ModuleName, Bool, SrcSpan, IsSafeImport)
cond' :: ImportedModsVal -> ImportedModsVal -> Hsc ImportedModsVal
cond' v1 v2
| imv_is_safe v1 /= imv_is_safe v2
= do
dflags <- getDynFlags
throwErrors $ unitBag $ mkPlainErrMsg dflags (imv_span v1)
(text "Module" <+> ppr (imv_name v1) <+>
(text $ "is imported both as a safe and unsafe import!"))
| otherwise
= return v1
-- easier interface to work with
checkSafe :: (Module, SrcSpan, a) -> Hsc (Maybe InstalledUnitId)
checkSafe (m, l, _) = fst `fmap` hscCheckSafe' dflags m l
-- what pkg's to add to our trust requirements
pkgTrustReqs req inf infPassed | safeInferOn dflags
&& safeHaskell dflags == Sf_None && infPassed
= emptyImportAvails {
imp_trust_pkgs = req `S.union` inf
}
pkgTrustReqs _ _ _ | safeHaskell dflags == Sf_Unsafe
= emptyImportAvails
pkgTrustReqs req _ _ = emptyImportAvails { imp_trust_pkgs = req }
-- | Check that a module is safe to import.
--
-- We return True to indicate the import is safe and False otherwise
-- although in the False case an exception may be thrown first.
hscCheckSafe :: HscEnv -> Module -> SrcSpan -> IO Bool
hscCheckSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
pkgs <- snd `fmap` hscCheckSafe' dflags m l
when (packageTrustOn dflags) $ checkPkgTrust dflags pkgs
errs <- getWarnings
return $ isEmptyBag errs
-- | Return if a module is trusted and the pkgs it depends on to be trusted.
hscGetSafe :: HscEnv -> Module -> SrcSpan -> IO (Bool, Set InstalledUnitId)
hscGetSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
(self, pkgs) <- hscCheckSafe' dflags m l
good <- isEmptyBag `fmap` getWarnings
clearWarnings -- don't want them printed...
let pkgs' | Just p <- self = S.insert p pkgs
| otherwise = pkgs
return (good, pkgs')
-- | Is a module trusted? If not, throw or log errors depending on the type.
-- Return (regardless of trusted or not) if the trust type requires the modules
-- own package be trusted and a list of other packages required to be trusted
-- (these later ones haven't been checked) but the own package trust has been.
hscCheckSafe' :: DynFlags -> Module -> SrcSpan -> Hsc (Maybe InstalledUnitId, Set InstalledUnitId)
hscCheckSafe' dflags m l = do
(tw, pkgs) <- isModSafe m l
case tw of
False -> return (Nothing, pkgs)
True | isHomePkg m -> return (Nothing, pkgs)
| otherwise -> return (Just $ toInstalledUnitId (moduleUnitId m), pkgs)
where
isModSafe :: Module -> SrcSpan -> Hsc (Bool, Set InstalledUnitId)
isModSafe m l = do
iface <- lookup' m
case iface of
-- can't load iface to check trust!
Nothing -> throwErrors $ unitBag $ mkPlainErrMsg dflags l
$ text "Can't load the interface file for" <+> ppr m
<> text ", to check that it can be safely imported"
-- got iface, check trust
Just iface' ->
let trust = getSafeMode $ mi_trust iface'
trust_own_pkg = mi_trust_pkg iface'
-- check module is trusted
safeM = trust `elem` [Sf_Safe, Sf_Trustworthy]
-- check package is trusted
safeP = packageTrusted trust trust_own_pkg m
-- pkg trust reqs
pkgRs = S.fromList . map fst $ filter snd $ dep_pkgs $ mi_deps iface'
-- General errors we throw but Safe errors we log
errs = case (safeM, safeP) of
(True, True ) -> emptyBag
(True, False) -> pkgTrustErr
(False, _ ) -> modTrustErr
in do
logWarnings errs
return (trust == Sf_Trustworthy, pkgRs)
where
pkgTrustErr = unitBag $ mkErrMsg dflags l (pkgQual dflags) $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The package (" <> ppr (moduleUnitId m)
<> text ") the module resides in isn't trusted."
]
modTrustErr = unitBag $ mkErrMsg dflags l (pkgQual dflags) $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The module itself isn't safe." ]
-- | Check the package a module resides in is trusted. Safe compiled
-- modules are trusted without requiring that their package is trusted. For
-- trustworthy modules, modules in the home package are trusted but
-- otherwise we check the package trust flag.
packageTrusted :: SafeHaskellMode -> Bool -> Module -> Bool
packageTrusted Sf_None _ _ = False -- shouldn't hit these cases
packageTrusted Sf_Unsafe _ _ = False -- prefer for completeness.
packageTrusted _ _ _
| not (packageTrustOn dflags) = True
packageTrusted Sf_Safe False _ = True
packageTrusted _ _ m
| isHomePkg m = True
| otherwise = trusted $ getPackageDetails dflags (moduleUnitId m)
lookup' :: Module -> Hsc (Maybe ModIface)
lookup' m = do
hsc_env <- getHscEnv
hsc_eps <- liftIO $ hscEPS hsc_env
let pkgIfaceT = eps_PIT hsc_eps
homePkgT = hsc_HPT hsc_env
iface = lookupIfaceByModule dflags homePkgT pkgIfaceT m
#ifdef ETA_REPL
-- the 'lookupIfaceByModule' method will always fail when calling from GHCi
-- as the compiler hasn't filled in the various module tables
-- so we need to call 'getModuleInterface' to load from disk
iface' <- case iface of
Just _ -> return iface
Nothing -> snd `fmap` (liftIO $ getModuleInterface hsc_env m)
return iface'
#else
return iface
#endif
isHomePkg :: Module -> Bool
isHomePkg m
| thisPackage dflags == moduleUnitId m = True
| otherwise = False
-- | Check the list of packages are trusted.
checkPkgTrust :: DynFlags -> Set InstalledUnitId -> Hsc ()
checkPkgTrust dflags pkgs =
case errors of
[] -> return ()
_ -> (liftIO . throwIO . mkSrcErr . listToBag) errors
where
errors = S.foldr go [] pkgs
go pkg acc
| trusted $ getInstalledPackageDetails dflags pkg
= acc
| otherwise
= (:acc) $ mkErrMsg dflags noSrcSpan (pkgQual dflags)
$ text "The package (" <> ppr pkg <> text ") is required" <>
text " to be trusted but it isn't!"
-- | Set module to unsafe and (potentially) wipe trust information.
--
-- Make sure to call this method to set a module to inferred unsafe, it should
-- be a central and single failure method. We only wipe the trust information
-- when we aren't in a specific Safe Haskell mode.
--
-- While we only use this for recording that a module was inferred unsafe, we
-- may call it on modules using Trustworthy or Unsafe flags so as to allow
-- warning flags for safety to function correctly. See Note [Safe Haskell
-- Inference].
markUnsafeInfer :: TcGblEnv -> WarningMessages -> Hsc TcGblEnv
markUnsafeInfer tcg_env whyUnsafe = do
dflags <- getDynFlags
when (wopt Opt_WarnUnsafe dflags)
(logWarnings $ unitBag $
mkPlainWarnMsg dflags (warnUnsafeOnLoc dflags) (whyUnsafe' dflags))
liftIO $ writeIORef (tcg_safeInfer tcg_env) False
-- NOTE: Only wipe trust when not in an explicity safe haskell mode. Other
-- times inference may be on but we are in Trustworthy mode -- so we want
-- to record safe-inference failed but not wipe the trust dependencies.
case safeHaskell dflags == Sf_None of
True -> return $ tcg_env { tcg_imports = wiped_trust }
False -> return tcg_env
where
wiped_trust = (tcg_imports tcg_env) { imp_trust_pkgs = S.empty }
pprMod = ppr $ moduleName $ tcg_mod tcg_env
whyUnsafe' df = vcat [ quotes pprMod <+> text "has been inferred as unsafe!"
, text "Reason:"
, nest 4 $ (vcat $ badFlags df) $+$
(vcat $ pprErrMsgBagWithLoc whyUnsafe) $+$
(vcat $ badInsts $ tcg_insts tcg_env)
]
badFlags df = concat $ map (badFlag df) unsafeFlagsForInfer
badFlag df (str,loc,on,_)
| on df = [mkLocMessage SevOutput (loc df) $
text str <+> text "is not allowed in Safe Haskell"]
| otherwise = []
badInsts insts = concat $ map badInst insts
checkOverlap (NoOverlap _) = False
checkOverlap _ = True
badInst ins | checkOverlap (overlapMode (is_flag ins))
= [mkLocMessage SevOutput (nameSrcSpan $ getName $ is_dfun ins) $
ppr (overlapMode $ is_flag ins) <+>
text "overlap mode isn't allowed in Safe Haskell"]
| otherwise = []
-- | Figure out the final correct safe haskell mode
hscGetSafeMode :: TcGblEnv -> Hsc SafeHaskellMode
hscGetSafeMode tcg_env = do
dflags <- getDynFlags
liftIO $ finalSafeMode dflags tcg_env
--------------------------------------------------------------
-- Simplifiers
--------------------------------------------------------------
hscSimplify :: HscEnv -> ModGuts -> IO ModGuts
hscSimplify hsc_env modguts = runHsc hsc_env $ hscSimplify' modguts
hscSimplify' :: ModGuts -> Hsc ModGuts
hscSimplify' ds_result = do
hsc_env <- getHscEnv
{-# SCC "Core2Core" #-}
liftIO $ core2core hsc_env ds_result
--------------------------------------------------------------
-- Interface generators
--------------------------------------------------------------
hscSimpleIface :: HscEnv
-> TcGblEnv
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails)
hscSimpleIface hsc_env tc_result mb_old_iface
= runHsc hsc_env $ hscSimpleIface' tc_result mb_old_iface
hscSimpleIface' :: TcGblEnv
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails)
hscSimpleIface' tc_result mb_old_iface = do
hsc_env <- getHscEnv
details <- liftIO $ mkBootModDetailsTc hsc_env tc_result
safe_mode <- hscGetSafeMode tc_result
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
ioMsgMaybe $
mkIfaceTc hsc_env mb_old_iface safe_mode details tc_result
-- And the answer is ...
liftIO $ dumpIfaceStats hsc_env
return (new_iface, no_change, details)
hscNormalIface :: HscEnv
-> ModGuts
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface hsc_env simpl_result mb_old_iface =
runHsc hsc_env $ hscNormalIface' simpl_result mb_old_iface
hscNormalIface' :: ModGuts
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface' simpl_result mb_old_iface = do
hsc_env <- getHscEnv
(cg_guts, details) <- {-# SCC "CoreTidy" #-}
liftIO $ tidyProgram hsc_env simpl_result
-- BUILD THE NEW ModIface and ModDetails
-- and emit external core if necessary
-- This has to happen *after* code gen so that the back-end
-- info has been set. Not yet clear if it matters waiting
-- until after code output
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
ioMsgMaybe $
mkIface hsc_env mb_old_iface details simpl_result
liftIO $ dumpIfaceStats hsc_env
-- Return the prepared code.
return (new_iface, no_change, details, cg_guts)
--------------------------------------------------------------
-- BackEnd combinators
--------------------------------------------------------------
hscWriteIface :: DynFlags -> ModIface -> Bool -> ModSummary -> IO ()
hscWriteIface dflags iface no_change mod_summary = do
let ifaceFile = ml_hi_file (ms_location mod_summary)
unless no_change $
{-# SCC "writeIface" #-}
writeIfaceFile dflags ifaceFile iface
whenGeneratingDynamicToo dflags $ do
-- TODO: We should do a no_change check for the dynamic
-- interface file too
-- TODO: Should handle the dynamic hi filename properly
let dynIfaceFile = replaceExtension ifaceFile (dynHiSuf dflags)
dynIfaceFile' = addBootSuffix_maybe (mi_boot iface) dynIfaceFile
dynDflags = dynamicTooMkDynamicDynFlags dflags
writeIfaceFile dynDflags dynIfaceFile' iface
-- | Compile to hard-code.
hscGenHardCode :: HscEnv -> CgGuts -> ModSummary -> FilePath
-> IO (FilePath, Maybe FilePath) -- ^ @Just f@ <=> _stub.c is f
hscGenHardCode hsc_env cgguts mod_summary output_filename = do
let CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs,
cg_dep_pkgs = _dependencies,
cg_hpc_info = hpc_info } = cgguts
dflags = hsc_dflags hsc_env
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm hsc_env this_mod location core_binds data_tycons ;
----------------- Convert to STG ------------------
(stg_binds, _cost_centre_info)
<- {-# SCC "CoreToStg" #-}
myCoreToStg dflags this_mod prepd_binds
let modClass = moduleJavaClass this_mod
modClasses <- codeGen hsc_env this_mod location data_tycons stg_binds hpc_info
(lookupStubs modClass foreign_stubs)
let stubClasses = outputForeignStubs dflags foreign_stubs modClass
classes = stubClasses ++ modClasses
jarContents = map (classFilePath &&& classFileBS) classes
-- createEmptyJar output_filename
addMultiByteStringsToJar' (gopt Opt_NormalizeJar dflags) output_filename
(compressionMethod dflags) jarContents
return (output_filename, Nothing)
outputForeignStubs :: DynFlags -> ForeignStubs -> T.Text -> [ClassFile]
outputForeignStubs _dflags NoStubs _ = []
outputForeignStubs _dflags (ForeignStubs _ _ classExports) modClass =
map f $ filter (\(cls, _) -> cls /= modClass) $ foreignExportsList classExports
where f (classSpec, (methodDefs, fieldDefs)) =
mkClassFile java7 [Public, Super] (jvmify className) (Just superClass)
interfaces fieldDefs methodDefs''
where className':specs = T.words classSpec
className = jvmify className'
methodDefs' = methodDefs
methodDefs'' = if hasConstructor
then methodDefs'
else mkDefaultConstructor className superClass
: methodDefs'
hasConstructor = any (\(MethodDef _ (UName n) _ _ _) ->
n == "<init>") methodDefs
(superClass, interfaces) = parseSpecs specs jobjectC []
parseSpecs ("extends":superClass:xs) _ is = parseSpecs xs (jvmify superClass) is
parseSpecs ("implements":interface:xs) sc is = parseSpecs xs sc (jvmify interface:is)
parseSpecs [] sc is = (sc, reverse is)
parseSpecs _ _ _ = error $ "Invalid foreign export spec."
jvmify = T.map (\c -> if c == '.' then '/' else c)
hscInteractive :: HscEnv
-> CgGuts
-> ModSummary
-> IO [ClassFile]
#ifdef ETA_REPL
hscInteractive hsc_env cgguts mod_summary = do
let dflags = hsc_dflags hsc_env
CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs } = cgguts
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm hsc_env this_mod location core_binds data_tycons
(stg_binds, _cost_centre_info)
<- {-# SCC "CoreToStg" #-}
myCoreToStg dflags this_mod prepd_binds
let modClass = moduleJavaClass this_mod
modClasses <- codeGen hsc_env this_mod location data_tycons stg_binds
(panic "hpc_info") (lookupStubs modClass foreign_stubs)
let stubClasses = outputForeignStubs dflags foreign_stubs modClass
classes = modClasses ++ stubClasses
return classes
#else
hscInteractive _ _ = panic "GHC not compiled with interpreter"
#endif
myCoreToStg :: DynFlags -> Module -> CoreProgram
-> IO ( [StgBinding] -- output program
, CollectedCCs) -- cost centre info (declared and used)
myCoreToStg dflags this_mod prepd_binds = do
stg_binds
<- {-# SCC "Core2Stg" #-}
coreToStg dflags this_mod prepd_binds
(stg_binds2, cost_centre_info)
<- {-# SCC "Stg2Stg" #-}
stg2stg dflags this_mod stg_binds
return (stg_binds2, cost_centre_info)
{- **********************************************************************
%* *
\subsection{Compiling a do-statement}
%* *
%********************************************************************* -}
{-
When the UnlinkedBCOExpr is linked you get an HValue of type *IO [HValue]* When
you run it you get a list of HValues that should be the same length as the list
of names; add them to the ClosureEnv.
A naked expression returns a singleton Name [it]. The stmt is lifted into the
IO monad as explained in Note [Interactively-bound Ids in GHCi] in HscTypes
-}
#ifdef ETA_REPL
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmt :: HscEnv -> String -> IO (Maybe (Either Reinterpret ([Id], ForeignHValue, FixityEnv)))
hscStmt hsc_env stmt = hscStmtWithLocation hsc_env stmt "<interactive>" 1
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmtWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO ( Maybe (Either Reinterpret ([Id]
, ForeignHValue {- IO [HValue] -}
, FixityEnv)))
hscStmtWithLocation hsc_env0 stmt source linenumber =
runInteractiveHsc hsc_env0 $ do
maybe_stmt <- hscParseStmtWithLocation source linenumber stmt
case maybe_stmt of
Nothing -> return Nothing
Just parsed_stmt -> do
hsc_env <- getHscEnv
liftIO $ hscParsedStmt hsc_env parsed_stmt
hscParsedStmt :: HscEnv
-> GhciLStmt RdrName -- ^ The parsed statement
-> IO ( Maybe (Either Reinterpret ([Id]
, ForeignHValue {- IO [HValue] -}
, FixityEnv)))
hscParsedStmt hsc_env stmt = runInteractiveHsc hsc_env $ do
-- Rename and typecheck it
eResult <- ioMsgMaybe $ tcRnStmt hsc_env stmt
case eResult of
Right (ids, tc_expr, fix_env) -> do
-- Desugar it
ds_expr <- ioMsgMaybe $ deSugarExpr hsc_env tc_expr
liftIO (lintInteractiveExpr "desugar expression" hsc_env ds_expr)
handleWarnings
-- Then code-gen, and link it
-- It's important NOT to have package 'interactive' as thisUnitId
-- for linking, else we try to link 'main' and can't find it.
-- Whereas the linker already knows to ignore 'interactive'
let src_span = srcLocSpan interactiveSrcLoc
-- TODO: Allow hooks to work
hval <- liftIO $ hscCompileCoreExpr hsc_env src_span ds_expr
return $ Just $ Right (ids, hval, fix_env)
Left reinterpret -> return $ Just $ Left reinterpret
-- | Compile a decls
hscDecls :: HscEnv
-> String -- ^ The statement
-> IO ([TyThing], InteractiveContext)
hscDecls hsc_env str = hscDeclsWithLocation hsc_env str "<interactive>" 1
-- | Compile a decls
hscDeclsWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO ([TyThing], InteractiveContext)
hscDeclsWithLocation hsc_env0 str source linenumber =
runInteractiveHsc hsc_env0 $ do
L _ (HsModule{ hsmodDecls = decls }) <-
hscParseThingWithLocation source linenumber parseModule str
{- Rename and typecheck it -}
hsc_env <- getHscEnv
tc_gblenv <- ioMsgMaybe $ tcRnDeclsi hsc_env decls
{- Grab the new instances -}
-- We grab the whole environment because of the overlapping that may have
-- been done. See the notes at the definition of InteractiveContext
-- (ic_instances) for more details.
let defaults = tcg_default tc_gblenv
dflags = hsc_dflags hsc_env
{- Desugar it -}
-- We use a basically null location for iNTERACTIVE
let iNTERACTIVELoc = ModLocation{ ml_hs_file = Nothing,
ml_hi_file = panic "hsDeclsWithLocation:ml_hi_file",
ml_obj_file = panic "hsDeclsWithLocation:ml_hi_file"}
ds_result <- hscDesugar' iNTERACTIVELoc tc_gblenv
{- Simplify -}
simpl_mg <- liftIO $ hscSimplify hsc_env ds_result
-- simpl_mg <- liftIO $ do
-- plugins <- readIORef (tcg_th_coreplugins tc_gblenv)
-- hscSimplify hsc_env plugins ds_result
{- Tidy -}
(tidy_cg, mod_details) <- liftIO $ tidyProgram hsc_env simpl_mg
let !CgGuts{ cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs } = tidy_cg
!ModDetails { md_insts = cls_insts
, md_fam_insts = fam_insts } = mod_details
-- Get the *tidied* cls_insts and fam_insts
data_tycons = filter isDataTyCon tycons
{- Prepare For Code Generation -}
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
liftIO $ corePrepPgm hsc_env this_mod iNTERACTIVELoc core_binds data_tycons
(stg_binds, _cost_centre_info)
<- {-# SCC "CoreToStg" #-}
liftIO $ myCoreToStg dflags this_mod prepd_binds
let modClass = moduleJavaClass this_mod
modClasses <- liftIO $ codeGen hsc_env this_mod iNTERACTIVELoc data_tycons
stg_binds (panic "hpc_info") (lookupStubs modClass foreign_stubs)
let stubClasses = outputForeignStubs dflags foreign_stubs modClass
classes = modClasses ++ stubClasses
liftIO $ linkClasses hsc_env (forceClasses classes)
-- let src_span = srcLocSpan interactiveSrcLoc
let tcs = filterOut isImplicitTyCon (mg_tcs simpl_mg)
patsyns = mg_patsyns simpl_mg
ext_ids = [ id | id <- bindersOfBinds core_binds
, isExternalName (idName id)
, not (isDFunId id || isImplicitId id) ]
-- We only need to keep around the external bindings
-- (as decided by TidyPgm), since those are the only ones
-- that might later be looked up by name. But we can exclude
-- - DFunIds, which are in 'cls_insts' (see Note [ic_tythings] in HscTypes
-- - Implicit Ids, which are implicit in tcs
-- c.f. TcRnDriver.runTcInteractive, which reconstructs the TypeEnv
new_tythings = map AnId ext_ids ++ map ATyCon tcs ++ map (AConLike . PatSynCon) patsyns
ictxt = hsc_IC hsc_env
-- See Note [Fixity declarations in GHCi]
fix_env = tcg_fix_env tc_gblenv
new_ictxt = extendInteractiveContext ictxt new_tythings cls_insts
fam_insts defaults fix_env
return (new_tythings, new_ictxt)
{-
Note [Fixity declarations in GHCi]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To support fixity declarations on types defined within GHCi (as requested
in #10018) we record the fixity environment in InteractiveContext.
When we want to evaluate something TcRnDriver.runTcInteractive pulls out this
fixity environment and uses it to initialize the global typechecker environment.
After the typechecker has finished its business, an updated fixity environment
(reflecting whatever fixity declarations were present in the statements we
passed it) will be returned from hscParsedStmt. This is passed to
updateFixityEnv, which will stuff it back into InteractiveContext, to be
used in evaluating the next statement.
-}
hscImport :: HscEnv -> String -> IO (ImportDecl RdrName)
hscImport hsc_env str = runInteractiveHsc hsc_env $ do
(L _ (HsModule{hsmodImports=is})) <-
hscParseThing parseModule str
case is of
[L _ i] -> return i
_ -> liftIO $ throwOneError $
mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan $
text "parse error in import declaration"
-- | Typecheck an expression (but don't run it)
hscTcExpr :: HscEnv
-> String -- ^ The expression
-> IO Type
hscTcExpr hsc_env0 expr = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
parsed_expr <- hscParseExpr expr
ioMsgMaybe $ tcRnExpr hsc_env (noLoc parsed_expr)
-- | Find the kind of a type
-- Currently this does *not* generalise the kinds of the type
hscKcType
:: HscEnv
-> Bool -- ^ Normalise the type
-> String -- ^ The type as a string
-> IO (Type, Kind) -- ^ Resulting type (possibly normalised) and kind
hscKcType hsc_env0 normalise str = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ty <- hscParseType str
ioMsgMaybe $ tcRnType hsc_env normalise ty
hscParseExpr :: String -> Hsc (HsExpr RdrName)
hscParseExpr expr = do
hsc_env <- getHscEnv
maybe_stmt <- hscParseStmt expr
case maybe_stmt of
Just (L _ (BodyStmt expr _ _ _)) -> return (unLoc expr)
_ -> throwErrors $ unitBag $ mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan
(text "not an expression:" <+> quotes (text expr))
hscParseStmt :: String -> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmt = hscParseThing parseStmt
hscParseStmtWithLocation :: String -> Int -> String
-> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmtWithLocation source linenumber stmt =
hscParseThingWithLocation source linenumber parseStmt stmt
hscParseType :: String -> Hsc (LHsType RdrName)
hscParseType = hscParseThing parseType
#endif
hscParseIdentifier :: HscEnv -> String -> IO (Located RdrName)
hscParseIdentifier hsc_env str =
runInteractiveHsc hsc_env $ hscParseThing parseIdentifier str
hscParseThing :: (Outputable thing, Data thing)
=> Lexer.P thing -> String -> Hsc thing
hscParseThing = hscParseThingWithLocation "<interactive>" 1
hscParseThingWithLocation :: (Outputable thing, Data thing) => String -> Int
-> Lexer.P thing -> String -> Hsc thing
hscParseThingWithLocation source linenumber parser str
= {-# SCC "Parser" #-} do
-- withTiming getDynFlags
-- (text "Parser [source]")
-- (const ()) $
dflags <- getDynFlags
liftIO $ showPass dflags "Parser"
let buf = stringToStringBuffer str
loc = mkRealSrcLoc (fsLit source) linenumber 1
case unP parser (mkPState dflags buf loc) of
PFailed span err ->
liftIO $ throwOneError (mkPlainErrMsg dflags span err)
-- PFailed warnFn span err -> do
-- logWarningsReportErrors (warnFn dflags)
-- handleWarnings
-- let msg = mkPlainErrMsg dflags span err
-- throwErrors $ unitBag msg
POk pst thing -> do
logWarningsReportErrors (getMessages pst)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" (ppr thing)
return thing
-- POk pst thing -> do
-- logWarningsReportErrors (getMessages pst dflags)
-- liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" (ppr thing)
-- liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed_ast "Parser AST" $
-- showAstData NoBlankSrcSpan thing
-- return thing
hscCompileCore :: HscEnv -> Bool -> SafeHaskellMode -> ModSummary
-> CoreProgram -> FilePath -> IO ()
hscCompileCore hsc_env simplify safe_mode mod_summary binds output_filename
= runHsc hsc_env $ do
guts <- maybe_simplify (mkModGuts (ms_mod mod_summary) safe_mode binds)
(iface, changed, _details, cgguts) <- hscNormalIface' guts Nothing
liftIO $ hscWriteIface (hsc_dflags hsc_env) iface changed mod_summary
_ <- liftIO $ hscGenHardCode hsc_env cgguts mod_summary output_filename
return ()
where
maybe_simplify mod_guts | simplify = hscSimplify' mod_guts
| otherwise = return mod_guts
-- Makes a "vanilla" ModGuts.
mkModGuts :: Module -> SafeHaskellMode -> CoreProgram -> ModGuts
mkModGuts mod safe binds =
ModGuts {
mg_module = mod,
mg_hsc_src = HsSrcFile,
mg_exports = [],
mg_usages = [],
mg_deps = noDependencies,
mg_used_th = False,
mg_rdr_env = emptyGlobalRdrEnv,
mg_fix_env = emptyFixityEnv,
mg_tcs = [],
mg_insts = [],
mg_fam_insts = [],
mg_patsyns = [],
mg_rules = [],
mg_vect_decls = [],
mg_binds = binds,
mg_foreign = NoStubs,
mg_warns = NoWarnings,
mg_anns = [],
mg_hpc_info = emptyHpcInfo False,
mg_modBreaks = emptyModBreaks,
mg_vect_info = noVectInfo,
mg_inst_env = emptyInstEnv,
mg_fam_inst_env = emptyFamInstEnv,
mg_safe_haskell = safe,
mg_trust_pkg = False
}
{- **********************************************************************
%* *
Desugar, simplify, convert to bytecode, and link an expression
%* *
%********************************************************************* -}
#ifdef ETA_REPL
hscCompileCoreExpr :: HscEnv -> SrcSpan -> CoreExpr -> IO ForeignHValue
hscCompileCoreExpr hsc_env =
lookupHook hscCompileCoreExprHook hscCompileCoreExpr' (hsc_dflags hsc_env) hsc_env
hscCompileCoreExpr' :: HscEnv -> SrcSpan -> CoreExpr -> IO ForeignHValue
hscCompileCoreExpr' hsc_env _srcspan ds_expr
= do { let dflags = hsc_dflags hsc_env
{- Simplify it -}
; simpl_expr <- simplifyExpr dflags ds_expr
{- Tidy it (temporary, until coreSat does cloning) -}
; let tidy_expr = tidyExpr emptyTidyEnv simpl_expr
{- Prepare for codegen -}
; prepd_expr <- corePrepExpr dflags hsc_env tidy_expr
{- Lint if necessary -}
; lintInteractiveExpr "hscCompileExpr" hsc_env prepd_expr
; exprNo <- icExprCounterInc (hsc_IC hsc_env)
----------------- Convert to STG ------------------
; let exprFS = fsLit "$expr"
this_mod0 = icInteractiveModule (hsc_IC hsc_env)
this_mod =
this_mod0 { moduleName =
mkModuleName $ moduleNameString (moduleName this_mod0)
++ "_" ++ show exprNo }
exprName = mkExternalName (getUnique exprFS)
this_mod (mkVarOccFS exprFS)
noSrcSpan
-- Create an id to keep the codegen happy
exprId = mkVanillaGlobal exprName
(exprType prepd_expr)
prepd_binds = [NonRec exprId prepd_expr]
-- Stub location to keep the codegen happy
mod_location =
ModLocation { ml_hs_file = Just "Interactive.hs"
, ml_hi_file = "Interactive.hi"
, ml_obj_file = "Interactive.jar" }
; (stg_binds, _cost_centre_info)
<- {-# SCC "CoreToStg" #-}
myCoreToStg dflags this_mod prepd_binds
{- link it -}
; modClasses <- codeGen hsc_env this_mod mod_location
[] stg_binds (panic "hpcInfo") Nothing
; hval <- linkExpr hsc_env (T.unpack $ moduleJavaClass this_mod)
(T.unpack $ idNameText dflags exprId) modClasses
; return hval }
#endif
{- **********************************************************************
%* *
Statistics on reading interfaces
%* *
%********************************************************************* -}
dumpIfaceStats :: HscEnv -> IO ()
dumpIfaceStats hsc_env = do
eps <- readIORef (hsc_EPS hsc_env)
dumpIfSet dflags (dump_if_trace || dump_rn_stats)
"Interface statistics"
(ifaceStats eps)
where
dflags = hsc_dflags hsc_env
dump_rn_stats = dopt Opt_D_dump_rn_stats dflags
dump_if_trace = dopt Opt_D_dump_if_trace dflags
{- **********************************************************************
%* *
Progress Messages: Module i of n
%* *
%********************************************************************* -}
showModuleIndex :: (Int, Int) -> String
showModuleIndex (i,n) = "[" ++ padded ++ " of " ++ n_str ++ "] "
where
n_str = show n
i_str = show i
padded = replicate (length n_str - length i_str) ' ' ++ i_str
| rahulmutt/ghcvm | compiler/Eta/Main/HscMain.hs | bsd-3-clause | 73,497 | 0 | 28 | 21,764 | 12,449 | 6,404 | 6,045 | 822 | 9 |
module Language.StreamIt.Filter
( Statement (..)
, FilterT (..)
, Rate (..)
, Filter
, FilterDecl
, evalStmt
, execStmt
, rate
, push
, peek
, pop
, println
, work
, init'
) where
import Data.Char
import Data.List
import Data.Typeable
import Control.Monad.Trans
import Language.StreamIt.Core
data Statement where
Decl :: Elt a => Var a -> Statement
Assign :: Elt a => Var a -> Exp a -> Statement
Branch :: Exp Bool -> Statement -> Statement -> Statement
Loop :: Statement -> Exp Bool -> Statement -> Statement -> Statement
Sequence :: Statement -> Statement -> Statement
Work :: Rate -> Statement -> Statement
Init :: Statement -> Statement
Push :: Elt a => Exp a -> Statement
Pop :: Statement
Println :: Elt a => Exp a -> Statement
Null :: Statement
instance Eq (Statement) where (==) _ _ = True
-- | The Filter monad holds StreamIt filter statements.
newtype FilterT i o m a = FilterT {runFilterT :: ((Int, Statement) -> m (a, (Int, Statement)))}
type Filter a b = FilterT a b IO
instance (Typeable a, Typeable b) => Typeable1 (Filter a b) where
typeOf1 s = let
tyCon = mkTyCon3 "Language" "StreamIt" "Filter.Filter"
(a, b) = peel s
peel :: Filter a b m -> (a, b)
peel = undefined
in mkTyConApp tyCon [typeOf a, typeOf b]
instance (Monad m) => Monad (FilterT a b m) where
return a = FilterT $ \ s -> return (a, s)
(>>=) sf f = FilterT $ \ s -> do (a1, s1) <- runFilterT sf s
(a2, s2) <- runFilterT (f a1) s1
return (a2, s2)
instance MonadTrans (FilterT a b) where
lift m = FilterT $ \ s -> do
a <- m
return (a, s)
instance (MonadIO m) => MonadIO (FilterT a b m) where
liftIO = lift . liftIO
-- Returns the complete type (int->int) of a filter
instance (Typeable a, Typeable b, Typeable m) => Show (Filter a b m) where
show s = map toLower $ (head $ tail t) ++ "->" ++ (head $ tail $ tail t)
where
t = words $ (show . typeOf) s
statement :: (Monad m) => Statement -> FilterT a b m ()
statement a = FilterT $ \ (id, statement) -> return ((), (id, Sequence statement a))
evalStmt :: (Monad m) => Int -> FilterT a b m () -> m (Int, Statement)
evalStmt id (FilterT f) = do
(_, x) <- f (id, Null)
return x
execStmt:: (Monad m) => FilterT a b m () -> m Statement
execStmt f = do
(_, x) <- evalStmt 0 f
return x
get :: (Monad m) => FilterT a b m (Int, Statement)
get = FilterT $ \ a -> return (a, a)
put :: (Monad m) => (Int, Statement) -> FilterT a b m ()
put s = FilterT $ \ _ -> return ((), s)
-- FilterDecl = (Type, Name, Arguments, AST node)
type FilterDecl = (String, String, String, Statement)
instance CoreE (Filter a b) where
var init = do
(id, stmt) <- get
sym <- lift $ gensym init
put (id, Sequence stmt $ Decl sym)
return sym
float = var zero
float' = var
int = var zero
int' = var
bool = var zero
bool' = var
array _ size = var (Array size zero)
a <== b = statement $ Assign a b
ifelse cond onTrue onFalse = do
(id0, stmt) <- get
(id1, stmt1) <- lift $ evalStmt id0 (onTrue >> return ())
(id2, stmt2) <- lift $ evalStmt id1 (onFalse >> return ())
put (id2, stmt)
statement $ Branch cond stmt1 stmt2
if_ cond onTrue = do
(id0, stmt) <- get
(id1, stmt1) <- lift $ evalStmt id0 (onTrue >> return ())
put (id1, stmt)
statement $ Branch cond stmt1 Null
for_ (init, cond, inc) body = do
(id0, stmt) <- get
(id1, stmt1) <- lift $ evalStmt id0 (body >> return ())
ini <- lift $ execStmt init
inc <- lift $ execStmt inc
put (id1, stmt)
statement $ Loop ini cond inc stmt1
while_ cond body = do
(id0, stmt) <- get
(id1, stmt1) <- lift $ evalStmt id0 (body >> return ())
put (id1, stmt)
statement $ Loop Null cond Null stmt1
-- | Push
push :: (Elt a, Elt b) => Exp b -> Filter a b ()
push a = statement $ Push a
-- | Peek
peek :: Elt a => Exp Int -> Exp a
-- RRN: Shouldn't the type be:
-- peek :: (Elt a, Elt b) => Exp Int -> Filter a b (Exp b)
peek = PeekExp
-- | Pop
pop :: (Elt a,Elt b) => Filter a b (Exp a)
pop = do
(id, stmt) <- get
sym <- lift $ gensym zero
put (id, Sequence stmt $
Sequence (Decl sym) $
Assign sym PopExp )
return (Ref sym)
-- | Println
println :: (Elt a) => Exp a -> Filter b c ()
println f = statement $ Println f
-- | Rate declarations for work functions
data Rate = Rate {
pushRate :: Exp Int,
popRate :: Exp Int,
peekRate :: Exp Int
}
showFlowRate :: Rate -> String
showFlowRate Rate {pushRate=a, popRate=b, peekRate=c} =
intercalate " " $ zipWith showf ["push","pop","peek"] [a,b,c]
where
showf tag rate = case rate of
Const 0 -> ""
_ -> tag ++ " " ++ show rate
instance Show Rate where show = showFlowRate
rate :: Exp Int -> Exp Int -> Exp Int -> Rate
rate = Rate
-- | Initialization function
init' :: (Elt a, Elt b) => Filter a b () -> Filter a b ()
init' s = do
(id0, stmt) <- get
(id1, stmt1) <- lift $ evalStmt id0 s
put (id1, stmt)
statement $ Init stmt1
-- | Work
work :: (Elt a, Elt b) => Rate -> Filter a b () -> Filter a b ()
work rate s = do
(id0, stmt) <- get
(id1, stmt1) <- lift $ evalStmt id0 s
put (id1, stmt)
statement $ Work rate stmt1
| adk9/haskell-streamit | Language/StreamIt/Filter.hs | bsd-3-clause | 5,342 | 0 | 14 | 1,471 | 2,376 | 1,234 | 1,142 | -1 | -1 |
-- {-# LANGUAGE #-}
{-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
----------------------------------------------------------------------
-- |
-- Module : Circat.ShowUtils
-- Copyright : (c) 2014 Tabula, Inc.
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Show-related utilities
----------------------------------------------------------------------
module Circat.ShowUtils (Show'(..)) where
-- Show for all type arguments
class Show' f where
show' :: f a -> String
showsPrec' :: Int -> f a -> ShowS
showsPrec' _ x s = show' x ++ s
show' x = showsPrec' 0 x ""
| capn-freako/circat | src/Circat/ShowUtils.hs | bsd-3-clause | 723 | 0 | 9 | 145 | 101 | 59 | 42 | 7 | 0 |
module Zipper (
BinTree(..),
Zipper,
fromTree,
toTree,
value,
left,
right,
up,
setValue,
setLeft,
setRight
) where
-- | A binary tree.
data BinTree a = BT {
btValue :: a -- ^ Value
, btLeft :: Maybe (BinTree a) -- ^ Left child
, btRight :: Maybe (BinTree a) -- ^ Right child
} deriving (Eq, Show)
-- | A zipper for a binary tree.
data Zipper a -- Complete this definition
-- | Get a zipper focussed on the root node.
fromTree :: BinTree a -> Zipper a
fromTree = undefined
-- | Get the complete tree from a zipper.
toTree :: Zipper a -> BinTree a
toTree = undefined
-- | Get the value of the focus node.
value :: Zipper a -> a
value = undefined
-- | Get the left child of the focus node, if any.
left :: Zipper a -> Maybe (Zipper a)
left = undefined
-- | Get the right child of the focus node, if any.
right :: Zipper a -> Maybe (Zipper a)
right = undefined
-- | Get the parent of the focus node, if any.
up :: Zipper a -> Maybe (Zipper a)
up = undefined
-- | Set the value of the focus node.
setValue :: a -> Zipper a -> Zipper a
setValue = undefined
-- | Replace a left child tree.
setLeft :: Maybe (BinTree a) -> Zipper a -> Zipper a
setLeft = undefined
-- | Replace a right child tree.
setRight :: Maybe (BinTree a) -> Zipper a -> Zipper a
setRight = undefined
| pminten/xhaskell | zipper/Zipper.hs | mit | 1,350 | 0 | 11 | 346 | 344 | 191 | 153 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
{- |
Module : Data.ML.Scalar
Description : Scalar models.
Copyright : (c) Paweł Nowak
License : MIT
Maintainer : [email protected]
Stability : experimental
This module exports pure scalar functions as models.
-}
module Data.ML.Scalar (
Scalar(..),
Over(..),
-- * Exponential and logarithm.
Exp(..),
Log(..),
-- * Trigonometric functions.
Sin(..),
Tan(..),
Cos(..),
Asin(..),
Atan(..),
Acos(..),
-- * Hyperbolic functions.
Sinh(..),
Tanh(..),
Cosh(..),
Asinh(..),
Atanh(..),
Acosh(..),
-- * Other functions.
Id(..),
Sqrt(..),
Sigmoid(..)
) where
import Data.Bytes.Serial
import Data.ML.Internal.Compose
import Data.ML.Internal.Scalar
import Data.ML.Internal.TH
import Data.ML.Model
import Linear
-- | Applies a scalar model m over a structure f.
newtype Over f m a = Over' (Compose f m a)
deriving (Functor, Applicative, Foldable, Traversable, Additive, Metric)
pattern Over m = Over' (Compose m)
instance (Serial1 f, Serial1 g) => Serial1 (Over f g) where
serializeWith f (Over' m) = serializeWith f m
deserializeWith f = Over' <$> deserializeWith f
instance (Applicative f, Model m, Input m ~ Scalar, Output m ~ Scalar)
=> Model (Over f m) where
type Input (Over f m) = f
type Output (Over f m) = f
predict x (Over m) = predict' <$> x <*> m
where predict' x' m' = getScalar (predict (Scalar x') m')
mkScalarModel [| exp |] "Exp"
mkScalarModel [| sqrt |] "Sqrt"
mkScalarModel [| log |] "Log"
mkScalarModel [| sin |] "Sin"
mkScalarModel [| tan |] "Tan"
mkScalarModel [| cos |] "Cos"
mkScalarModel [| asin |] "Asin"
mkScalarModel [| atan |] "Atan"
mkScalarModel [| acos |] "Acos"
mkScalarModel [| sinh |] "Sinh"
mkScalarModel [| tanh |] "Tanh"
mkScalarModel [| cosh |] "Cosh"
mkScalarModel [| asinh |] "Asinh"
mkScalarModel [| atanh |] "Atanh"
mkScalarModel [| acosh |] "Acosh"
mkScalarModel [| id |] "Id"
sigmoid :: Floating f => f -> f
sigmoid x = 1 / (1 + exp (-x))
mkScalarModel [| sigmoid |] "Sigmoid"
| bitemyapp/machine-learning | src/Data/ML/Scalar.hs | mit | 2,286 | 0 | 12 | 500 | 695 | 401 | 294 | 63 | 1 |
{-# LANGUAGE CPP #-}
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 1993-2004
--
-- The native code generator's monad.
--
-- -----------------------------------------------------------------------------
module NCGMonad (
NatM_State(..), mkNatM_State,
NatM, -- instance Monad
initNat,
addImportNat,
getUniqueNat,
mapAccumLNat,
setDeltaNat,
getDeltaNat,
getThisModuleNat,
getBlockIdNat,
getNewLabelNat,
getNewRegNat,
getNewRegPairNat,
getPicBaseMaybeNat,
getPicBaseNat,
getDynFlags
)
where
#include "HsVersions.h"
import Reg
import Size
import TargetReg
import BlockId
import CLabel ( CLabel, mkAsmTempLabel )
import UniqSupply
import Unique ( Unique )
import DynFlags
import Module
import Control.Monad ( liftM, ap )
import Control.Applicative ( Applicative(..) )
data NatM_State
= NatM_State {
natm_us :: UniqSupply,
natm_delta :: Int,
natm_imports :: [(CLabel)],
natm_pic :: Maybe Reg,
natm_dflags :: DynFlags,
natm_this_module :: Module
}
newtype NatM result = NatM (NatM_State -> (result, NatM_State))
unNat :: NatM a -> NatM_State -> (a, NatM_State)
unNat (NatM a) = a
mkNatM_State :: UniqSupply -> Int -> DynFlags -> Module -> NatM_State
mkNatM_State us delta dflags this_mod
= NatM_State us delta [] Nothing dflags this_mod
initNat :: NatM_State -> NatM a -> (a, NatM_State)
initNat init_st m
= case unNat m init_st of { (r,st) -> (r,st) }
instance Functor NatM where
fmap = liftM
instance Applicative NatM where
pure = return
(<*>) = ap
instance Monad NatM where
(>>=) = thenNat
return = returnNat
thenNat :: NatM a -> (a -> NatM b) -> NatM b
thenNat expr cont
= NatM $ \st -> case unNat expr st of
(result, st') -> unNat (cont result) st'
returnNat :: a -> NatM a
returnNat result
= NatM $ \st -> (result, st)
mapAccumLNat :: (acc -> x -> NatM (acc, y))
-> acc
-> [x]
-> NatM (acc, [y])
mapAccumLNat _ b []
= return (b, [])
mapAccumLNat f b (x:xs)
= do (b__2, x__2) <- f b x
(b__3, xs__2) <- mapAccumLNat f b__2 xs
return (b__3, x__2:xs__2)
getUniqueNat :: NatM Unique
getUniqueNat = NatM $ \ st ->
case takeUniqFromSupply $ natm_us st of
(uniq, us') -> (uniq, st {natm_us = us'})
instance HasDynFlags NatM where
getDynFlags = NatM $ \ st -> (natm_dflags st, st)
getDeltaNat :: NatM Int
getDeltaNat = NatM $ \ st -> (natm_delta st, st)
setDeltaNat :: Int -> NatM ()
setDeltaNat delta = NatM $ \ st -> ((), st {natm_delta = delta})
getThisModuleNat :: NatM Module
getThisModuleNat = NatM $ \ st -> (natm_this_module st, st)
addImportNat :: CLabel -> NatM ()
addImportNat imp
= NatM $ \ st -> ((), st {natm_imports = imp : natm_imports st})
getBlockIdNat :: NatM BlockId
getBlockIdNat
= do u <- getUniqueNat
return (mkBlockId u)
getNewLabelNat :: NatM CLabel
getNewLabelNat
= do u <- getUniqueNat
return (mkAsmTempLabel u)
getNewRegNat :: Size -> NatM Reg
getNewRegNat rep
= do u <- getUniqueNat
dflags <- getDynFlags
return (RegVirtual $ targetMkVirtualReg (targetPlatform dflags) u rep)
getNewRegPairNat :: Size -> NatM (Reg,Reg)
getNewRegPairNat rep
= do u <- getUniqueNat
dflags <- getDynFlags
let vLo = targetMkVirtualReg (targetPlatform dflags) u rep
let lo = RegVirtual $ targetMkVirtualReg (targetPlatform dflags) u rep
let hi = RegVirtual $ getHiVirtualRegFromLo vLo
return (lo, hi)
getPicBaseMaybeNat :: NatM (Maybe Reg)
getPicBaseMaybeNat
= NatM (\state -> (natm_pic state, state))
getPicBaseNat :: Size -> NatM Reg
getPicBaseNat rep
= do mbPicBase <- getPicBaseMaybeNat
case mbPicBase of
Just picBase -> return picBase
Nothing
-> do
reg <- getNewRegNat rep
NatM (\state -> (reg, state { natm_pic = Just reg }))
| holzensp/ghc | compiler/nativeGen/NCGMonad.hs | bsd-3-clause | 4,288 | 0 | 18 | 1,246 | 1,304 | 707 | 597 | 119 | 2 |
f = À
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/parser/unicode/T2302.hs | bsd-3-clause | 7 | 0 | 4 | 3 | 6 | 3 | 3 | 1 | 1 |
-- !!! Default export list isn't the same as (module M)
-- This should succeed, exporting only the local 'sort',
-- and not being confused by the 'sort' from 'List'.
-- (Hugs gets this wrong)
module M where
import Data.List as M
sort = "foo"
| urbanslug/ghc | testsuite/tests/module/mod154.hs | bsd-3-clause | 245 | 0 | 4 | 48 | 20 | 15 | 5 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
-- | Various utilities used in the scaffolded site.
module Yesod.Default.Util
( addStaticContentExternal
, globFile
, widgetFileNoReload
, widgetFileReload
, TemplateLanguage (..)
, defaultTemplateLanguages
, WidgetFileSettings
, wfsLanguages
, wfsHamletSettings
) where
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString.Lazy as L
import Data.Text (Text, pack, unpack)
import Yesod.Core -- purposely using complete import so that Haddock will see addStaticContent
import Control.Monad (when, unless)
import System.Directory (doesFileExist, createDirectoryIfMissing)
import Language.Haskell.TH.Syntax
import Text.Lucius (luciusFile, luciusFileReload)
import Text.Julius (juliusFile, juliusFileReload)
import Text.Cassius (cassiusFile, cassiusFileReload)
import Text.Hamlet (HamletSettings, defaultHamletSettings)
import Data.Maybe (catMaybes)
import Data.Default (Default (def))
-- | An implementation of 'addStaticContent' which stores the contents in an
-- external file. Files are created in the given static folder with names based
-- on a hash of their content. This allows expiration dates to be set far in
-- the future without worry of users receiving stale content.
addStaticContentExternal
:: (L.ByteString -> Either a L.ByteString) -- ^ javascript minifier
-> (L.ByteString -> String) -- ^ hash function to determine file name
-> FilePath -- ^ location of static directory. files will be placed within a "tmp" subfolder
-> ([Text] -> Route master) -- ^ route constructor, taking a list of pieces
-> Text -- ^ filename extension
-> Text -- ^ mime type
-> L.ByteString -- ^ file contents
-> GHandler sub master (Maybe (Either Text (Route master, [(Text, Text)])))
addStaticContentExternal minify hash staticDir toRoute ext' _ content = do
liftIO $ createDirectoryIfMissing True statictmp
exists <- liftIO $ doesFileExist fn'
unless exists $ liftIO $ L.writeFile fn' content'
return $ Just $ Right (toRoute ["tmp", pack fn], [])
where
fn, statictmp, fn' :: FilePath
-- by basing the hash off of the un-minified content, we avoid a costly
-- minification if the file already exists
fn = hash content ++ '.' : unpack ext'
statictmp = staticDir ++ "/tmp/"
fn' = statictmp ++ fn
content' :: L.ByteString
content'
| ext' == "js" = either (const content) id $ minify content
| otherwise = content
-- | expects a file extension for each type, e.g: hamlet lucius julius
globFile :: String -> String -> FilePath
globFile kind x = "templates/" ++ x ++ "." ++ kind
data TemplateLanguage = TemplateLanguage
{ tlRequiresToWidget :: Bool
, tlExtension :: String
, tlNoReload :: FilePath -> Q Exp
, tlReload :: FilePath -> Q Exp
}
defaultTemplateLanguages :: HamletSettings -> [TemplateLanguage]
defaultTemplateLanguages hset =
[ TemplateLanguage False "hamlet" whamletFile' whamletFile'
, TemplateLanguage True "cassius" cassiusFile cassiusFileReload
, TemplateLanguage True "julius" juliusFile juliusFileReload
, TemplateLanguage True "lucius" luciusFile luciusFileReload
]
where
whamletFile' = whamletFileWithSettings hset
data WidgetFileSettings = WidgetFileSettings
{ wfsLanguages :: HamletSettings -> [TemplateLanguage]
, wfsHamletSettings :: HamletSettings
}
instance Default WidgetFileSettings where
def = WidgetFileSettings defaultTemplateLanguages defaultHamletSettings
widgetFileNoReload :: WidgetFileSettings -> FilePath -> Q Exp
widgetFileNoReload wfs x = combine "widgetFileNoReload" x False $ wfsLanguages wfs $ wfsHamletSettings wfs
widgetFileReload :: WidgetFileSettings -> FilePath -> Q Exp
widgetFileReload wfs x = combine "widgetFileReload" x True $ wfsLanguages wfs $ wfsHamletSettings wfs
combine :: String -> String -> Bool -> [TemplateLanguage] -> Q Exp
combine func file isReload tls = do
mexps <- qmexps
case catMaybes mexps of
[] -> error $ concat
[ "Called "
, func
, " on "
, show file
, ", but no template were found."
]
exps -> return $ DoE $ map NoBindS exps
where
qmexps :: Q [Maybe Exp]
qmexps = mapM go tls
go :: TemplateLanguage -> Q (Maybe Exp)
go tl = whenExists file (tlRequiresToWidget tl) (tlExtension tl) ((if isReload then tlReload else tlNoReload) tl)
whenExists :: String
-> Bool -- ^ requires toWidget wrap
-> String -> (FilePath -> Q Exp) -> Q (Maybe Exp)
whenExists = warnUnlessExists False
warnUnlessExists :: Bool
-> String
-> Bool -- ^ requires toWidget wrap
-> String -> (FilePath -> Q Exp) -> Q (Maybe Exp)
warnUnlessExists shouldWarn x wrap glob f = do
let fn = globFile glob x
e <- qRunIO $ doesFileExist fn
when (shouldWarn && not e) $ qRunIO $ putStrLn $ "widget file not found: " ++ fn
if e
then do
ex <- f fn
if wrap
then do
tw <- [|toWidget|]
return $ Just $ tw `AppE` ex
else return $ Just ex
else return Nothing
| piyush-kurur/yesod | yesod-default/Yesod/Default/Util.hs | mit | 5,317 | 0 | 19 | 1,255 | 1,259 | 672 | 587 | 107 | 3 |
module StateFileProcessing where
import Control.Monad.State
import Data.ByteString.Char8 as BS
import Prelude hiding (lines)
import Text.Regex.Posix
{-is file will attempt to abstract the idea of filtering a file using the state monad.
The general pattern of a file search is if a set of lines match some predicate then those lines should be included in the result then the rest of the file can be processed in a similar matter.
In this case the state s is a ByteString representing the unprocessed contents of the file, the result will be a list of lines that were matched by the predicate
-}
type Match = [ByteString]
type FileState = State [ByteString]
searchListOfFiles :: ([ByteString] -> (Match,[ByteString])) -> [(FilePath, ByteString)] -> [(FilePath, [Match])]
searchListOfFiles pred files = Prelude.map collectMatches files
where collectMatches (fp, bs) = (fp, (searchFile pred bs))
searchFile :: ([ByteString] -> (Match,[ByteString])) -> ByteString -> [Match]
searchFile pred file = let lns = lines file
in evalState (searchComp pred) lns
searchComp :: ([ByteString] -> (Match,[ByteString])) -> FileState [Match]
searchComp pred = do
rest <- get
case rest of
[] -> return []
lns -> do
let (match, rst) = pred lns
put rst
otherMatches <- searchComp pred
case match of
[] -> return otherMatches
_ -> return (match: otherMatches)
--The most basic predicate that matches an applicative instance declaration and grabs the next two lines
appInstancePred :: [ByteString] -> (Match,[ByteString])
appInstancePred (ln:lns) = if ln =~ (pack "instance Applicative")
then let match = ln : Prelude.take 2 lns in
(match, Prelude.drop 2 lns)
else ([], lns)
monadInstancePred :: [ByteString] -> (Match,[ByteString])
monadInstancePred (ln:lns) = if ln =~ (pack "instance Monad\\s")
then let match = ln : Prelude.take 2 lns in
(match, Prelude.drop 2 lns)
else ([], lns)
pureIsAp :: [ByteString] -> (Match,[ByteString])
pureIsAp (ln:lns) = if ln =~ (pack "instance Applicative")
then let (appLn, rst) = findAp lns in
if (BS.null appLn)
then ([], rst)
else ([ln,appLn], rst)
else ([], lns)
where
findAp :: [ByteString] -> (ByteString, [ByteString])
findAp [] = (BS.empty,[])
findAp (l:rst) = if l =~ (pack "\\(<\\*>\\)\\s*=\\s*ap")
then (l,rst)
else findAp rst
--"<\\*>.*=.*(`| )ap(`| )"
| SAdams601/ParRegexSearch | src/StateFileProcessing.hs | mit | 2,734 | 0 | 17 | 804 | 769 | 426 | 343 | 47 | 5 |
not False = True
not _ = False
| scravy/nodash | doc/Boolean/not.hs | mit | 35 | 0 | 4 | 12 | 19 | 8 | 11 | 2 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- SDL2
import qualified SDL
-- 定義 2D vector 用
-- 例如, (V2 x y) 在 cairo 中被用來表示座標
import Linear.V2 (V2(..))
-- 用來橋接 SDL 和 cairo 用的
import SDL.Cairo (createCairoTexture)
-- 畫圖用的小工具都是定義在這
-- 包括一個用來表示"維度"的資料結構: Dim
-- Dim 的 constructor 是 D
-- D 後面要給四個參數分別表示: X Y W H
import SDL.Cairo.Canvas
-- 控制程式邏輯要用的
import Control.Concurrent (threadDelay)
import Control.Monad (unless)
import Control.Monad.Fix (fix)
--
-- 無限長的一串空字串
ctx :: [String]
ctx = "" : [ x | x <- ctx]
-- 玩家資訊
data Player = Player
{ hp :: Int, mp :: Int
, lX :: Int, lY :: Int
}
deriving (Eq,Show)
main :: IO ()
main = do
-- SDL 初始化
SDL.initialize [SDL.InitEverything]
-- 從 SDL 中創造一個 window (用 winConfig 當做設定)
-- 產生一個 renderer 並且將其崁入 window 上 (用 rdrConfig 為設定)
-- P.S. 這裡的 renderer 是類似繪圖影擎的東西, 被稱為 *SDL rendering device*
-- P.S. winConfig 和 rdrConfig 的定義寫在最下面
window <- SDL.createWindow "Hello!" winConfig
renderer <- SDL.createRenderer window (-1) rdrConfig
-- 創造一個 texture (相當於是畫布的東西)
-- 然後把這個畫布貼到 renderer 之上
texture <- createCairoTexture renderer (V2 640 480)
-- 設定初始玩家資訊
let player = Player 50 10 15 15
-- 這裡開始定義程式的 main loop
-- mainLoop 是一個 a -> a 的 lambda function
-- mainLoop 中定義程式邏輯的「執行一步」是要做什麼
let mainLoop = \loop -> do
threadDelay 20000
-- 在 texture 上把東西畫出來
-- 下述三個 functions 都定義在下面
-- cairo 座標軸是左上為 (0,0) 往右是 +X 往下是 +Y
showWindowBg texture
showPlayerInfo texture player
showConcole texture
-- [顯示相關] 將 texture 目前的結果傳遞給 renderer
SDL.copy renderer texture Nothing Nothing
-- [顯示相關] 要求 renderer 顯示與更新畫面
SDL.present renderer
-- [鍵盤事件] 倒出目前所有的 events :: [Event]
events <- SDL.pollEvents
-- [鍵盤事件] 判斷是否 「有」 *ESC被按下去* 的事件
-- P.S. null :: [a] -> Bool 會回傳一個 list 是否為 []
let qPressed = not $ null $ filter eventIsESCPress events
-- 利用鍵盤事件來中斷程式
-- 也就是用 unless 來 break 出這個 mainLoop
unless qPressed loop
-- 利用 fix :: (a -> a) -> a 來無限地執行 mainLoop
fix $ mainLoop
--
-- ########################################################
-- withCanvas :: Texture -> Canvas a -> IO a
showWindowBg :: SDL.Texture -> IO ()
showWindowBg tx = withCanvas tx $ do
-- 重製背景
background $ gray 220
-- 繪製狀態列背景
fill $ blue 255 !@ 64
noStroke
-- 繪製方塊, D 是 Dim 的 constructor
-- 後面四個參數分別表示: X Y W H
rect $ D 0 0 640 100
--
showPlayerInfo :: SDL.Texture -> Player -> IO ()
showPlayerInfo tx p = withCanvas tx $ do
-- 設定 字體 大小 粗體 斜體
textFont $ Font "Hiragino Maru Gothic ProN" 20 False False
-- 設定顏色
fill $ gray 0
-- 繪製文字. P.S. (V2 30 40) 表示座標 (30,40)
text ("HP:[" ++ (show $ hp p) ++ "] MP:["++ (show $ mp p) ++"]")
(V2 30 40)
text ("X/Y:[" ++ (show $ lX p) ++ "/"++ (show $ lY p) ++"]")
(V2 400 40)
--
-- 繪製中央文字顯示區
showConcole :: SDL.Texture -> IO ()
showConcole tx = withCanvas tx $ do
textFont $ Font "Hiragino Maru Gothic ProN" 20 False False
fill $ gray 0
let lctx = take 10 ctx
delta = 30
initY = 150
tab = 25
short = \x -> "> " ++ (take (28 `min` length x) x)
text (short $ lctx !! 0) (V2 tab (initY + 0 * delta))
text (short $ lctx !! 1) (V2 tab (initY + 1 * delta))
text (short $ lctx !! 2) (V2 tab (initY + 2 * delta))
text (short $ lctx !! 3) (V2 tab (initY + 3 * delta))
text (short $ lctx !! 4) (V2 tab (initY + 4 * delta))
text (short $ lctx !! 5) (V2 tab (initY + 5 * delta))
text (short $ lctx !! 6) (V2 tab (initY + 6 * delta))
text (short $ lctx !! 7) (V2 tab (initY + 7 * delta))
text (short $ lctx !! 8) (V2 tab (initY + 8 * delta))
text (short $ lctx !! 9) (V2 tab (initY + 9 * delta))
--
-- ########################################################
--
eventIsESCPress ::SDL.Event -> Bool
eventIsESCPress evt = pressKey $ SDL.eventPayload evt
--
-- 判斷是否滿足某某(離開)條件
pressKey :: SDL.EventPayload -> Bool
pressKey (SDL.KeyboardEvent kEvtData) =
-- 檢查 KeyboardEvent 是否為 Pressed
let kPressed = SDL.keyboardEventKeyMotion kEvtData == SDL.Pressed
-- 判斷 KeyboardEvent 到底詳細來說是由哪個按鍵觸發的
in case (SDL.keysymKeycode $ SDL.keyboardEventKeysym kEvtData) of
-- SDL.KeycodeW -> True && kPressed
-- SDL.KeycodeS -> True && kPressed
-- SDL.KeycodeA -> True && kPressed
-- SDL.KeycodeD -> True && kPressed
-- 按下 ESC 會回傳 True
SDL.KeycodeEscape -> True && kPressed
-- 按下其他按鍵或是其他事件會回傳 false
_ -> False
pressKey _ = False
--
-- ########################################################
--
winConfig = SDL.defaultWindow
{ SDL.windowPosition = SDL.Centered
, SDL.windowInitialSize = V2 640 480
}
rdrConfig = SDL.RendererConfig
{ SDL.rendererType = SDL.AcceleratedVSyncRenderer
, SDL.rendererTargetTexture = True
}
| jaiyalas/creepy-waffle | src/Waffle.hs | mit | 5,727 | 11 | 17 | 1,251 | 1,358 | 703 | 655 | 80 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, ViewPatterns #-}
{-# LANGUAGE PatternGuards, RankNTypes #-}
{-# LANGUAGE ImpredicativeTypes #-}
module Network.Wai.Handler.Warp.Settings where
import Control.Exception
import Control.Monad (when, void)
import Control.Concurrent (forkIOWithUnmask)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Streaming.Network (HostPreference)
import GHC.IO.Exception (IOErrorType(..))
import qualified Network.HTTP.Types as H
import Network.Socket (SockAddr)
import Network.Wai
import Network.Wai.Handler.Warp.Timeout
import Network.Wai.Handler.Warp.Types
import System.IO (stderr)
import System.IO.Error (ioeGetErrorType)
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.Version (showVersion)
import qualified Paths_warp
-- | Various Warp server settings. This is purposely kept as an abstract data
-- type so that new settings can be added without breaking backwards
-- compatibility. In order to create a 'Settings' value, use 'defaultSettings'
-- and the various \'set\' functions to modify individual fields. For example:
--
-- > setTimeout 20 defaultSettings
data Settings = Settings
{ settingsPort :: Int -- ^ Port to listen on. Default value: 3000
, settingsHost :: HostPreference -- ^ Default value: HostIPv4
, settingsOnException :: Maybe Request -> SomeException -> IO () -- ^ What to do with exceptions thrown by either the application or server. Default: ignore server-generated exceptions (see 'InvalidRequest') and print application-generated applications to stderr.
, settingsOnExceptionResponse :: SomeException -> Response
-- ^ A function to create `Response` when an exception occurs.
--
-- Default: 500, text/plain, \"Something went wrong\"
--
-- Since 2.0.3
, settingsOnOpen :: SockAddr -> IO Bool -- ^ What to do when a connection is open. When 'False' is returned, the connection is closed immediately. Otherwise, the connection is going on. Default: always returns 'True'.
, settingsOnClose :: SockAddr -> IO () -- ^ What to do when a connection is close. Default: do nothing.
, settingsTimeout :: Int -- ^ Timeout value in seconds. Default value: 30
, settingsManager :: Maybe Manager -- ^ Use an existing timeout manager instead of spawning a new one. If used, 'settingsTimeout' is ignored. Default is 'Nothing'
, settingsFdCacheDuration :: Int -- ^ Cache duration time of file descriptors in seconds. 0 means that the cache mechanism is not used. Default value: 0
, settingsBeforeMainLoop :: IO ()
-- ^ Code to run after the listening socket is ready but before entering
-- the main event loop. Useful for signaling to tests that they can start
-- running, or to drop permissions after binding to a restricted port.
--
-- Default: do nothing.
--
-- Since 1.3.6
, settingsFork :: ((forall a. IO a -> IO a) -> IO ()) -> IO ()
-- ^ Code to fork a new thread to accept a connection.
--
-- This may be useful if you need OS bound threads, or if
-- you wish to develop an alternative threading model.
--
-- Default: void . forkIOWithUnmask
--
-- Since 3.0.4
, settingsNoParsePath :: Bool
-- ^ Perform no parsing on the rawPathInfo.
--
-- This is useful for writing HTTP proxies.
--
-- Default: False
--
-- Since 2.0.3
, settingsInstallShutdownHandler :: IO () -> IO ()
, settingsServerName :: ByteString
-- ^ Default server name if application does not set one.
--
-- Since 3.0.2
, settingsMaximumBodyFlush :: Maybe Int
-- ^ See @setMaximumBodyFlush@.
--
-- Since 3.0.3
, settingsProxyProtocol :: ProxyProtocol
-- ^ Specify usage of the PROXY protocol.
--
-- Since 3.0.5.
}
-- | Specify usage of the PROXY protocol.
data ProxyProtocol = ProxyProtocolNone
-- ^ See @setProxyProtocolNone@.
| ProxyProtocolRequired
-- ^ See @setProxyProtocolRequired@.
| ProxyProtocolOptional
-- ^ See @setProxyProtocolOptional@.
-- | The default settings for the Warp server. See the individual settings for
-- the default value.
defaultSettings :: Settings
defaultSettings = Settings
{ settingsPort = 3000
, settingsHost = "*4"
, settingsOnException = defaultExceptionHandler
, settingsOnExceptionResponse = defaultExceptionResponse
, settingsOnOpen = const $ return True
, settingsOnClose = const $ return ()
, settingsTimeout = 30
, settingsManager = Nothing
, settingsFdCacheDuration = 0
, settingsBeforeMainLoop = return ()
, settingsFork = void . forkIOWithUnmask
, settingsNoParsePath = False
, settingsInstallShutdownHandler = const $ return ()
, settingsServerName = S8.pack $ "Warp/" ++ showVersion Paths_warp.version
, settingsMaximumBodyFlush = Just 8192
, settingsProxyProtocol = ProxyProtocolNone
}
-- | Apply the logic provided by 'defaultExceptionHandler' to determine if an
-- exception should be shown or not. The goal is to hide exceptions which occur
-- under the normal course of the web server running.
--
-- Since 2.1.3
defaultShouldDisplayException :: SomeException -> Bool
defaultShouldDisplayException se
| Just ThreadKilled <- fromException se = False
| Just (_ :: InvalidRequest) <- fromException se = False
| Just (ioeGetErrorType -> et) <- fromException se
, et == ResourceVanished || et == InvalidArgument = False
| Just TimeoutThread <- fromException se = False
| otherwise = True
defaultExceptionHandler :: Maybe Request -> SomeException -> IO ()
defaultExceptionHandler _ e =
when (defaultShouldDisplayException e)
$ TIO.hPutStrLn stderr $ T.pack $ show e
defaultExceptionResponse :: SomeException -> Response
defaultExceptionResponse _ = responseLBS H.internalServerError500 [(H.hContentType, "text/plain; charset=utf-8")] "Something went wrong"
-- | Default implementation of 'settingsOnExceptionResponse' for the debugging purpose. 500, text/plain, a showed exception.
exceptionResponseForDebug :: SomeException -> Response
exceptionResponseForDebug e = responseLBS H.internalServerError500 [(H.hContentType, "text/plain; charset=utf-8")] (TLE.encodeUtf8 $ TL.pack $ "Exception: " ++ show e)
{-# DEPRECATED settingsPort "Use setPort instead" #-}
{-# DEPRECATED settingsHost "Use setHost instead" #-}
{-# DEPRECATED settingsOnException "Use setOnException instead" #-}
{-# DEPRECATED settingsOnExceptionResponse "Use setOnExceptionResponse instead" #-}
{-# DEPRECATED settingsOnOpen "Use setOnOpen instead" #-}
{-# DEPRECATED settingsOnClose "Use setOnClose instead" #-}
{-# DEPRECATED settingsTimeout "Use setTimeout instead" #-}
{-# DEPRECATED settingsManager "Use setManager instead" #-}
{-# DEPRECATED settingsFdCacheDuration "Use setFdCacheDuration instead" #-}
{-# DEPRECATED settingsBeforeMainLoop "Use setBeforeMainLoop instead" #-}
{-# DEPRECATED settingsNoParsePath "Use setNoParsePath instead" #-}
| jberryman/wai | warp/Network/Wai/Handler/Warp/Settings.hs | mit | 7,256 | 0 | 15 | 1,446 | 934 | 563 | 371 | 89 | 1 |
-- |
-- Utilities, which execute the parser.
module HTMLEntities.Decoder where
import HTMLEntities.Prelude
import qualified Data.Text.Lazy.Builder as TLB
import qualified Data.Attoparsec.Text as P
import qualified HTMLEntities.Parser as P
-- |
-- A decoder of a single entity.
--
-- >>> mapM_ Data.Text.IO.putStrLn $ htmlEntity "©"
-- ©
htmlEntity :: Text -> Either String Text
htmlEntity =
P.parseOnly $
P.htmlEntity <* P.endOfInput
-- |
-- A decoder of a single entity.
--
-- >>> mapM_ Data.Text.IO.putStrLn $ htmlEntityBody "#169"
-- ©
htmlEntityBody :: Text -> Either String Text
htmlEntityBody =
P.parseOnly $
P.htmlEntityBody <* P.endOfInput
-- |
-- A decoder of a text with entities.
--
-- Produces a text builder,
-- which you can then convert into a text or a lazy text,
-- using the <http://hackage.haskell.org/package/text "text"> or
-- <http://hackage.haskell.org/package/conversion-text "conversion-text"> library.
--
-- >>> Data.Text.Lazy.IO.putStrLn $ Data.Text.Lazy.Builder.toLazyText $ htmlEncodedText "€5 ¢2"
-- €5 ¢2
htmlEncodedText :: Text -> TLB.Builder
htmlEncodedText =
fmap (either (error "HTMLEntities.Decoder: impossible happened") id) $
P.parseOnly $
fmap fold $
many $
fmap TLB.fromText P.htmlEntity <|> fmap TLB.singleton P.anyChar
| nikita-volkov/html-entities | library/HTMLEntities/Decoder.hs | mit | 1,311 | 0 | 14 | 205 | 202 | 121 | 81 | 20 | 1 |
module Joy.Documentation (
LineNumber(..),
Located(..),
trim,
numberWidth,
leftPadToWidth,
rightPadToWidth,
englishList,
filePathFileComponent
)
where
import Data.Char
import Data.List
import Data.Word
type LineNumber = Word64
class Located a where
location :: a -> LineNumber
trim :: String -> String
trim string = dropWhile isSpace $ reverse $ dropWhile isSpace $ reverse string
numberWidth :: (RealFrac a, Floating a) => a -> Int
numberWidth number = ceiling $ logBase 10 $ 1 + number
leftPadToWidth :: Int -> Char -> String -> String
leftPadToWidth width filler string =
take (width - length string) (cycle [filler]) ++ string
rightPadToWidth :: Int -> Char -> String -> String
rightPadToWidth width filler string =
string ++ take (width - length string) (cycle [filler])
englishList :: [String] -> String
englishList [] = ""
englishList [item] = item
englishList (a:b:[]) = a ++ " and " ++ b
englishList items = (intercalate ", " $ reverse $ drop 1 $ reverse items)
++ ", and "
++ (head $ reverse items)
filePathFileComponent :: FilePath -> Maybe String
filePathFileComponent path =
let totalLength = length path
slashPoint = maybe 0 (\index -> totalLength - index)
$ elemIndex '/' $ reverse path
in if slashPoint == totalLength
then Nothing
else Just $ drop slashPoint path
| IreneKnapp/Joy | Joy/Documentation.hs | mit | 1,641 | 0 | 14 | 566 | 481 | 251 | 230 | 40 | 2 |
module GHCJS.DOM.Gamepad (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/Gamepad.hs | mit | 37 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_gomoku (
version,
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/ashwinjeyaseelan/Documents/GitHub/gomoku/.stack-work/install/x86_64-osx/lts-9.4/8.0.2/bin"
libdir = "/Users/ashwinjeyaseelan/Documents/GitHub/gomoku/.stack-work/install/x86_64-osx/lts-9.4/8.0.2/lib/x86_64-osx-ghc-8.0.2/gomoku-0.1.0.0"
dynlibdir = "/Users/ashwinjeyaseelan/Documents/GitHub/gomoku/.stack-work/install/x86_64-osx/lts-9.4/8.0.2/lib/x86_64-osx-ghc-8.0.2"
datadir = "/Users/ashwinjeyaseelan/Documents/GitHub/gomoku/.stack-work/install/x86_64-osx/lts-9.4/8.0.2/share/x86_64-osx-ghc-8.0.2/gomoku-0.1.0.0"
libexecdir = "/Users/ashwinjeyaseelan/Documents/GitHub/gomoku/.stack-work/install/x86_64-osx/lts-9.4/8.0.2/libexec"
sysconfdir = "/Users/ashwinjeyaseelan/Documents/GitHub/gomoku/.stack-work/install/x86_64-osx/lts-9.4/8.0.2/etc"
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "gomoku_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "gomoku_libdir") (\_ -> return libdir)
getDynLibDir = catchIO (getEnv "gomoku_dynlibdir") (\_ -> return dynlibdir)
getDataDir = catchIO (getEnv "gomoku_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "gomoku_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "gomoku_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| 8Gitbrix/gomoku | .stack-work/dist/x86_64-osx/Cabal-1.24.2.0/build/autogen/Paths_gomoku.hs | mit | 2,228 | 0 | 10 | 239 | 410 | 238 | 172 | 33 | 1 |
import HarmLang.Types
import HarmLang.InitialBasis
import HarmLang.ChordProgressionDatabase
import HarmLang.HarmonyDistributionModel
import HarmLang.Priors
import HarmLang.Utility
import Data.List
import Data.Maybe
-- groups progressions in a CPD by artist, denoting the artist with a string
getByArtist :: ChordProgressionDatabase -> [(String, [TimedChordProgression])]
getByArtist cpd = (getProgressionsCategorizedByCriterion cpd "Artist")
-- extracts from a list of categories the n with the most progressions
getTopCategories :: Int -> [(String, [TimedChordProgression])] -> [(String, [TimedChordProgression])]
getTopCategories n = (take n) . reverse . sortGroupsBySize
artists :: [String]
--artists = ["Antonio-Carlos Jobim", "Duke Ellington", "Cole Porter", "Richard Rodgers", "George Gershwin", "Jerome Kern"]
artists = ["Antonio-Carlos Jobim", "Duke Ellington", "George Gershwin", "Jerome Kern"]
getTestArtists :: ChordProgressionDatabase -> [(String, [TimedChordProgression])]
getTestArtists db = filter (\ (name, cps) -> (elem name artists)) (getByArtist db)
splitTrainingTest :: Int -> [[ChordProgression]] -> ([(ChordProgression, Int)], [[ChordProgression]])
splitTrainingTest tSize db = (concat $ mapInd (\ l index -> map (\ q -> (q, index)) (take tSize l)) db, map (drop tSize) db)
-- build harmony distribution models, and some cool priors to boot.
makeHdms :: [[ChordProgression]] -> [[ChordProgression]] -> [HarmonyDistributionModel]
makeHdms allData hdmData =
let
k = 3
priorPrior = chordLimitedLaplacianPriorFromDb $ concat allData -- all trans between chords in db are equally likely
prior = hdmPrior $ buildHarmonyDistributionModelWithPrior k priorPrior 1.0 (concat hdmData) -- HDM of all data in db
in
map (\thisHdmData -> buildHarmonyDistributionModelWithPrior k prior 1.0 thisHdmData) hdmData
-- gives the name of each category followed by a colon followed by number of
-- progressions in that category. each entry separated by newline.
summary :: [(String, [TimedChordProgression])] -> String
summary ([]) = ""
summary ((s,l):rest) = s ++ ": " ++ (show $ length l) ++ "\n" ++ (summary rest)
--summary (item:more) = (fst item) ++ ": " ++ (show $ (length . snd) item) ++ "\n" ++ (summary more)
probsToStr :: [Double] -> String
probsToStr [] = ""
probsToStr (a:[]) = show a
probsToStr (a:b) = (show a) ++ ", " ++ (probsToStr b)
main :: IO ()
main = do
putStrLn "Please enter the path to the database, or a newline for the default."
path <- getLine
print "got line"
cpd <- loadChordProgressionDatabase (if path == "" then "./res/progressions.txt" else path)
putStr "A"
putStr $ "\"" ++ (show cpd) ++ "\""
putStr "B"
let topClasses = (getTestArtists cpd)
putStrLn $ "Classes 0 through 3 and number of songs for each:\n" ++ (summary topClasses)
--let hdms = map (\ (name, progs) -> buildHarmonyDistributionModel 2 (map toUntimedProgression progs)) topClasses
--Has type ([(ChordProgression, Int)], [[ChordProgression]]) -- the Int is the index of the artist that the song belongs to
let (test, training) = splitTrainingTest 3 (map ((map toUntimedProgression) . snd) topClasses)
let hdms = makeHdms (map ((map toUntimedProgression) . snd) (getByArtist cpd)) training
putStrLn $ concat (map (\ (prog, classIndex) -> "Song belonging to class " ++ (show classIndex) ++ "; class " ++ (show classIndex) ++ (" ranked #" ++ (show $ (getRank (inferStyle hdms prog) classIndex) + 1)) ++ " most likely to generate:\n\t" ++ (show $ inferStyle hdms prog) ++ "\n") test )
--TODO robustness testing.
getRank :: (Ord n, Num n) => [n] -> Int -> Int
getRank l i = fromJust $ Data.List.elemIndex (l !! i) (reverse $ Data.List.sort l)
| cyruscousins/HarmLang | examples/stylisticinference.hs | mit | 3,705 | 0 | 25 | 599 | 1,018 | 548 | 470 | 48 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Auth0.API.Authentification
( login
, logout
, passwordlessEmail
, passwordlessSMS
, passwordlessTouch
, userInfo
, tokenInfo
, linkAccounts
, unlinkAccounts
) where
import qualified Data.ByteString.Char8 as BS
import Data.Char
import Control.Lens ((^.), (.~), (&))
import Control.Monad
import Control.Monad.Except (ExceptT, liftIO, throwError)
import qualified Data.Aeson as AE
import Data.Aeson (ToJSON, FromJSON, (.=), (.:), (.:?))
import Auth0.API.Types
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Data.Monoid ((<>))
import Network.Wreq
--------------------------------------------------------------------------------
-- | Login with username\/password combination
-- in case of SMS login it's phone-number\/access_token
login :: T.Text -> T.Text -> Config -> ExceptT T.Text IO AccessToken
login user pass Config{..}= do
resp <- liftIO $ post (T.unpack $ basePath <> "/oauth/ro") requestParams
case AE.eitherDecode (resp ^. responseBody) of
(Left err) -> throwError $ T.pack $ show err
(Right tok) -> return tok
where
requestParams =
[ "client_id" := clientId
, "connection" := ("sms" :: T.Text)
, "grant_type" := ("password" :: T.Text)
, "username" := user
, "password" := pass
, "scope" := ("openid" :: T.Text) -- or "openid name email"
]
logout :: AccessToken -> ExceptT T.Text IO ()
logout token = undefined
--------------------------------------------------------------------------------
-- Passwordless
-- | Authentification via email
--
passwordlessEmail :: T.Text -> EmailType -> Config -> ExceptT T.Text IO RespEmail
passwordlessEmail email' type' Config{..} = do
resp <- liftIO $ post (T.unpack $ basePath <> "/passwordless/start") requestParams
case AE.eitherDecode (resp ^. responseBody ) of
(Left err) -> throwError $ T.pack $ show err
(Right tok) -> return tok
where
requestParams =
[ "client_id" := clientId
, "connection" := ("email" :: T.Text)
, "email" := email'
, "send" := sendType -- "link" or "code"
, "authParams" := (""::T.Text)
]
sendType = T.pack $ map toLower $ show type'
-- | Authentification via sms
--
passwordlessSMS :: T.Text -> Config -> ExceptT T.Text IO RespSMS
passwordlessSMS number Config{..} = do
resp <- liftIO $ post (T.unpack $ basePath <> "/passwordless/start") requestParams
case AE.eitherDecode (resp ^. responseBody) of
(Left err) -> throwError $ T.pack $ show err
(Right tok) -> return tok
where
requestParams =
[ "client_id" := clientId
, "connection" := ("sms" :: T.Text)
, "phone_number" := number
]
passwordlessTouch :: Config -> ExceptT T.Text IO T.Text
passwordlessTouch Config{..} = undefined
--------------------------------------------------------------------------------
-- | Returns the user information based on the Auth0 access token (obtained during login).
--
userInfo :: AccessToken -> Config -> ExceptT T.Text IO UserInfo
userInfo token Config{..} = do
resp <- liftIO $ getWith opts (T.unpack $ basePath <> "/userinfo")
case AE.eitherDecode (resp ^. responseBody) of
(Left err) -> throwError $ T.pack $ show err
(Right ui) -> return ui
where
auth = BS.pack $ T.unpack $ T.concat ["Bearer ", (getToken token)]
opts = defaults & header "Authorization" .~ [auth]
-- | Validates a JSON Web Token (signature and expiration)
-- and returns the user information associated with the user id (sub property) of the token.
tokenInfo :: AccessToken -> Config -> ExceptT T.Text IO UserInfo
tokenInfo token Config{..} = do
resp <- liftIO $ post (T.unpack $ basePath <> "/tokeninfo") requestParams
case AE.eitherDecode (resp ^. responseBody) of
(Left err) -> throwError $ T.pack $ show err
(Right ui) -> return ui
where
requestParams =
[ "id_token" := ((getIdToken token) :: T.Text)
]
-- |
--
linkAccounts :: IO ()
linkAccounts = undefined
-- |
--
unlinkAccounts :: IO ()
unlinkAccounts = undefined
| kelecorix/api-auth0 | src/Auth0/API/Authentification.hs | mit | 4,330 | 0 | 13 | 1,054 | 1,206 | 654 | 552 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-gamelift-fleet-ec2inboundpermission.html
module Stratosphere.ResourceProperties.GameLiftFleetIpPermission where
import Stratosphere.ResourceImports
-- | Full data type definition for GameLiftFleetIpPermission. See
-- 'gameLiftFleetIpPermission' for a more convenient constructor.
data GameLiftFleetIpPermission =
GameLiftFleetIpPermission
{ _gameLiftFleetIpPermissionFromPort :: Val Integer
, _gameLiftFleetIpPermissionIpRange :: Val Text
, _gameLiftFleetIpPermissionProtocol :: Val Text
, _gameLiftFleetIpPermissionToPort :: Val Integer
} deriving (Show, Eq)
instance ToJSON GameLiftFleetIpPermission where
toJSON GameLiftFleetIpPermission{..} =
object $
catMaybes
[ (Just . ("FromPort",) . toJSON) _gameLiftFleetIpPermissionFromPort
, (Just . ("IpRange",) . toJSON) _gameLiftFleetIpPermissionIpRange
, (Just . ("Protocol",) . toJSON) _gameLiftFleetIpPermissionProtocol
, (Just . ("ToPort",) . toJSON) _gameLiftFleetIpPermissionToPort
]
-- | Constructor for 'GameLiftFleetIpPermission' containing required fields as
-- arguments.
gameLiftFleetIpPermission
:: Val Integer -- ^ 'glfipFromPort'
-> Val Text -- ^ 'glfipIpRange'
-> Val Text -- ^ 'glfipProtocol'
-> Val Integer -- ^ 'glfipToPort'
-> GameLiftFleetIpPermission
gameLiftFleetIpPermission fromPortarg ipRangearg protocolarg toPortarg =
GameLiftFleetIpPermission
{ _gameLiftFleetIpPermissionFromPort = fromPortarg
, _gameLiftFleetIpPermissionIpRange = ipRangearg
, _gameLiftFleetIpPermissionProtocol = protocolarg
, _gameLiftFleetIpPermissionToPort = toPortarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-gamelift-fleet-ec2inboundpermission.html#cfn-gamelift-fleet-ec2inboundpermissions-fromport
glfipFromPort :: Lens' GameLiftFleetIpPermission (Val Integer)
glfipFromPort = lens _gameLiftFleetIpPermissionFromPort (\s a -> s { _gameLiftFleetIpPermissionFromPort = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-gamelift-fleet-ec2inboundpermission.html#cfn-gamelift-fleet-ec2inboundpermissions-iprange
glfipIpRange :: Lens' GameLiftFleetIpPermission (Val Text)
glfipIpRange = lens _gameLiftFleetIpPermissionIpRange (\s a -> s { _gameLiftFleetIpPermissionIpRange = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-gamelift-fleet-ec2inboundpermission.html#cfn-gamelift-fleet-ec2inboundpermissions-protocol
glfipProtocol :: Lens' GameLiftFleetIpPermission (Val Text)
glfipProtocol = lens _gameLiftFleetIpPermissionProtocol (\s a -> s { _gameLiftFleetIpPermissionProtocol = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-gamelift-fleet-ec2inboundpermission.html#cfn-gamelift-fleet-ec2inboundpermissions-toport
glfipToPort :: Lens' GameLiftFleetIpPermission (Val Integer)
glfipToPort = lens _gameLiftFleetIpPermissionToPort (\s a -> s { _gameLiftFleetIpPermissionToPort = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/GameLiftFleetIpPermission.hs | mit | 3,171 | 0 | 13 | 332 | 447 | 253 | 194 | 41 | 1 |
module Parse where
import Prelude hiding (error)
import Control.Arrow hiding (loop)
import qualified Data.Set as S
import Data.List.Split
import Data.Char
import Data.Maybe
import Types
import ChanBoards
import Utility
{--
End options
/4 (st=4&to=4) Only one post
/4- (st=4) Starting at a certain post number
/-6 (to=6) Stopping at a certain post number
/4-6 (st=4to=4) Between two posts
/l10 (ls=10) Last n posts
/i (imode=true) mobile mode
/. (nofirst=false) keep the first post
/n (nofirst=true) remove the first post
--}
-- Going to split the string and get the server,board,and post number.
-- If these all exist and are correct, then we will check for the "end options"
-- If there are end options, we will parse them and return them in the data type.
-- If a board or server dont match the premade list, then everything is set to Nothing, and an error is output
--
-- Sample Header data:
-- /pele/hnews/123123123/l50&raw=0.0&sid=Monazilla/2.00:437576303V875807Q65482S5373415V0353657X819589B683935C83892l0684065u718984C13042Y073615439W33071V8555402N76303M0122748U5567915F128809I381065V6928103Q47334M0251341Y65808j5567915e7"
parseInput :: HeaderNew -> Input
parseInput input = if fst getServer && fst getBoard && fst getPost
then Input
(snd getServer)
(snd getBoard)
(snd getPost)
getRaw
getSID
(fst parseStartStop)
(snd parseStartStop)
getL
getKeepFirst
getRemoveFirst
Nothing
Nothing
else Input
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
(Just True)
(Just "Server, board, or post were wrong")
where
-- Checks for the "HEAD" http flag.
checkIfHeadOnly
| length (snd $ requestQuery input) < 4 = Nothing
| take 4 (snd $ requestQuery input) == "HEAD" = Just True
| otherwise = Just False
checkIfItsGet
| length (snd $ requestQuery input) < 3 = Nothing
| take 3 (snd $ requestQuery input) == "GET" = Just True
| otherwise = Just False
-- parse the data for the "l50" at the end of the post
getL :: Maybe Int
getL = if 'l' `elem` separateOptions -- if 'l' exists in the opts
then parseForL separateOptions -- parseForL
else Nothing
where
parseForL :: String -> Maybe Int
parseForL opts = if null $ filterL opts
then Nothing
else Just . convertToInt . filterL $ opts
filterL :: String -> String
filterL = filter isNumber . concat . drop 1 . splitOn "l"
getSID :: Maybe String
getSID
| length splitByQuestionMark < 2 = Nothing -- Make sure there is a question mark first
| length sid /= 196 = Nothing -- Make sure the SID is 196 chars long including the "sid="
| take (length headerSID) sid /= headerSID = Nothing -- Make sure the first 19 chars are "sid=Monazilla/2.00:"
| otherwise = Just sid
where
headerSID :: String
headerSID = "sid=Monazilla/2.00:"
questioned :: String
questioned = if length splitByQuestionMark > 1
then concat $ drop 1 splitByQuestionMark
else ""
-- get the sid
sid :: String
sid = if getRaw == Nothing -- check if getRaw is empty
then questioned -- if its empty, just return the string split by question mark
else if length (splitOn "&" questioned) > 1 --otherwise check if a "&" exists
then concat . drop 1 $ splitOn "&" questioned -- then split by "&" and return the second unit
else "" -- else just return nothing
-- parse the data for the "raw=0.0" stuff (its useless now, but will be changed later)
getRaw :: Maybe String
getRaw
| not (elem '?' inputData) = Nothing -- make sure question mark exists before splitting
| length splitByQuestionMark < 2 = Nothing -- Make sure the list has at least 2 entries
| not (elem '&' (splitByQuestionMark !! 1)) = Nothing -- Makes sure an ampersand exists
| take 3 (splitByQuestionMark !! 1) == "raw" = Just (head . splitOn "&" $ splitByQuestionMark !! 1) -- check if the first three characters are 'raw', if so, then split it at the ampersand and return the head
| otherwise = Nothing
splitByAmp = splitOn "&" inputData
splitByQuestionMark = splitOn "?" inputData
-- Define whether or not we keep the first post based on getRemoveFirst's decision
getKeepFirst
| isNothing getRemoveFirst = Just True -- If getRemoveFirst is Nothing, then keep the first post
| fromJust getRemoveFirst = Just False -- if getRemoveFirst is True, we dont keep the first post
| otherwise = Just True -- otherwise, we keep the first post
-- Check whether or not an 'n' is in the options
getRemoveFirst = Just $ 'n' `elem` separateOptions
-- Parse for the "start" and "stop" options, return in a tuple
parseStartStop :: (Maybe Int, Maybe Int)
parseStartStop
| isJust getL = (Nothing, Nothing) -- If getL is a Just, then we return nothing. If getL is nothing, we continue
| '-' `elem` separateOptions = checkForStartStop -- if options has a '-' then check for start and stop
| null (map isNumber (filter isNumber separateOptions)) = (Nothing, Nothing) -- If no numbers in options, then return nothing
| otherwise = (Just . convertToInt &&& Just . convertToInt) (filter isNumber separateOptions) -- return a single number
where
checkForStartStop = if length splitForDash == 2
then numberLogic splitForDash -- filter non numbers from the two elements
-- if the first number is "", then set it to Nothing
-- if the second number is "", then set it to Nothing
else (Nothing,Nothing)
-- Split on dash to help separate options
splitForDash :: [String]
splitForDash = map (filter isNumber) . take 2 $ splitOn "-" separateOptions -- take2 is okay, because we already confirmed that a '-' exists
-- numberlogic takes a list with two strings in it, checks if they are empty, converts them to integers, then outputs as a tuple
numberLogic :: [String] -> (Maybe Int, Maybe Int)
numberLogic n = (f &&& b) n
where
f n = if head n == "" -- Check the first number to see if it exists. return Nothing if it doesnt
then Nothing
else Just . convertToInt . head $ n
b n = if (head . drop 1) n == "" -- Check the second number to see if it exists. return Nothing if it doesnt
then Nothing
else Just . convertToInt . head . drop 1 $ n
separateOptions :: String
separateOptions = if length splitBySlashes < 4 -- Make sure there are enough things before doing the (!!)
then ""
else if '?' `elem` splitPiece -- Make sure a question mark exists
then head . splitOn "?" $ splitPiece
else ""
where
splitPiece = splitBySlashes !! 4
getServer :: (Bool, Maybe String)
getServer
| length splitBySlashes < 4 = (False, Nothing) -- Check if there are more than 4 Strings in the list
| (splitBySlashes !! 1) `S.member` listOfAllServers = (True, Just $ splitBySlashes !! 1)-- Check if the first in the list (ie pele) is in the server list
| otherwise = (False, Nothing) -- If not, then return with nothing
getBoard :: (Bool, Maybe String)
getBoard
| length splitBySlashes < 4 = (False, Nothing) -- Check if there are more than 4 Strings in the list
| (splitBySlashes !! 2) `S.member` listOfBoards = (True, Just $ splitBySlashes !! 2) -- check if second word is in the board list
| otherwise = (False, Nothing) -- If not, then return with nothing
getPost :: (Bool, Maybe String)
getPost
| length splitBySlashes < 4 = (False, Nothing)-- Check if there are more than 4 Strings in the list
| False `elem` map isNumber (splitBySlashes !! 3) = (False, Nothing) -- Check if d!!3 is a number
| otherwise = (True, Just (splitBySlashes !! 3))
splitBySlashes = splitOn "/" inputData
inputData = concat . take 1 . drop 1 . splitOn " " . snd . requestQuery $ input :: String
blankInput :: Input
blankInput = Input Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
| Cipherwraith/Rokka | Parse.hs | gpl-2.0 | 9,392 | 0 | 14 | 3,224 | 1,641 | 861 | 780 | -1 | -1 |
{-# language TemplateHaskell #-}
{-# language DeriveDataTypeable #-}
{-# language FlexibleInstances #-}
{-# language MultiParamTypeClasses #-}
module PL.Split.Term.Fixed where
import PL.Data
import PL.Reader
import PL.Tree
import PL.Signatur hiding ( signatur )
import qualified PL.Signatur
import PL.Struktur
import qualified PL.Interpretation as I
import PL.Semantik
import qualified PL.Roll.Syntax
import Challenger.Partial
import Autolib.ToDoc
import Autolib.Reporter
import Autolib.Set
import Autolib.FiniteMap
import Autolib.Size
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Util.Zufall
import qualified Data.Map as M
import qualified Data.Set as S
import Inter.Types
import Data.Typeable
data PL_Split_Term = PL_Split_Term
deriving ( Show, Read, Typeable )
instance OrderScore PL_Split_Term where
scoringOrder _ = Increasing
data Ord u => Instance u =
Instance { signatur :: Signatur
, a_struktur :: Struktur u
, b_struktur :: Struktur u
} deriving ( Typeable )
$(derives [ makeReader, makeToDoc ] [ ''Instance] )
instance Verify PL_Split_Term ( Instance Int ) where
verify _ i = do
when ( not $ has_nullary_functions $ signatur i )
$ reject $ text "Die Signatur enthält keine nullstelligen Funktionssymbole."
when ( not $ S.null $ freie_variablen $ signatur i )
$ reject $ text "Die Signatur enthält freie Variablen."
instance Partial PL_Split_Term ( Instance Int ) Term where
report _ i = do
inform $ vcat
[ text "Finden Sie einen Term zur Signatur"
, nest 4 $ toDoc ( signatur i )
, text ", der in der Struktur"
, nest 4 $ text "A" <+> equals <+> toDoc ( a_struktur i )
, text "eine anderen Wert hat"
, text "als in der Struktur"
, nest 4 $ text "B" <+> equals <+> toDoc ( b_struktur i )
]
initial _ i =
let ts = PL.Roll.Syntax.terms ( signatur i )
in last $ take 1000
$ takeWhile ( \ t -> size t < 10 )
$ concat ts
partial _ i t = do
inform $ text "Die Baumstruktur des Terms ist"
peng t
silent $ do
inform $ text "paßt der Term zur Signatur?"
PL.Signatur.check ( signatur i ) t
total _ i t = do
let int s = I.Interpretation
{ I.struktur = s , I.belegung = M.empty }
inform $ text "Wert des Terms in der Struktur A ..."
a_wert <-
PL.Semantik.compute ( int $ a_struktur i ) t
inform $ text "... ist" <+> toDoc a_wert
inform $ text "Wert des Terms in der Struktur B ..."
b_wert <-
PL.Semantik.compute ( int $ b_struktur i ) t
inform $ text "... ist" <+> toDoc b_wert
assert ( a_wert /= b_wert )
$ text "Werte sind verschieden?"
make :: Make
make = direct PL_Split_Term example_instance
example_instance :: Instance Int
example_instance =
Instance { signatur = PL.Signatur.signatur
( read "s(z())" :: Term )
, a_struktur = Struktur
{ universum = S.fromList [0,1]
, functions = M.fromList
$ read "[ (s,{(0,1),(1,0)}), (z,{0})]"
, predicates = M.fromList []
}
, b_struktur = Struktur
{ universum = S.fromList [0,1]
, functions = M.fromList
$ read "[ (s,{(0,1),(1,1)}), (z,{0})]"
, predicates = M.fromList []
}
}
| marcellussiegburg/autotool | collection/src/PL/Split/Term/Fixed.hs | gpl-2.0 | 3,862 | 0 | 15 | 1,420 | 944 | 491 | 453 | 93 | 1 |
--
-- Copyright (c) 2013 Bonelli Nicola <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
module Config where
import Data.List
import Data.Char
import Control.Monad
import System.Directory
import System.FilePath ((</>))
import Util
import CGrep.Lang
cgreprc :: FilePath
cgreprc = "cgreprc"
version :: String
version = "6.5.5"
data Config = Config
{ configLanguages :: [Lang]
, configPruneDirs :: [String]
, configAutoColor :: Bool
} deriving (Show, Read)
dropComments :: String -> String
dropComments = unlines . filter notComment . lines
where notComment = (not . ("#" `isPrefixOf`)) . dropWhile isSpace
getConfig :: IO Config
getConfig = do
home <- getHomeDirectory
confs <- filterM doesFileExist ["." ++ cgreprc, home </> "." ++ cgreprc, "/etc" </> cgreprc]
if notNull confs then liftM dropComments (readFile (head confs)) >>= \xs ->
return (prettyRead xs "Config error" :: Config)
else return $ Config [] [] False
| beni55/cgrep | src/Config.hs | gpl-2.0 | 1,717 | 0 | 13 | 368 | 303 | 176 | 127 | 27 | 2 |
{-
Copyright (c) 2011-2012, Sergey Astanin
All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- | Conversion of 'Pandoc' documents to FB2 (FictionBook2) format.
FictionBook is an XML-based e-book format. For more information see:
<http://www.fictionbook.org/index.php/Eng:XML_Schema_Fictionbook_2.1>
-}
module Text.Pandoc.Writers.FB2 (writeFB2) where
import Control.Monad.State (StateT, evalStateT, get, modify)
import Control.Monad.State (liftM, liftM2, liftIO)
import Data.ByteString.Base64 (encode)
import Data.Char (toUpper, toLower, isSpace, isAscii, isControl)
import Data.List (intersperse, intercalate, isPrefixOf)
import Data.Either (lefts, rights)
import Network.Browser (browse, request, setAllowRedirects, setOutHandler)
import Network.HTTP (catchIO_, getRequest, getHeaders, getResponseBody)
import Network.HTTP (lookupHeader, HeaderName(..), urlEncode)
import Network.URI (isURI, unEscapeString)
import System.FilePath (takeExtension)
import Text.XML.Light
import qualified Control.Exception as E
import qualified Data.ByteString as B
import qualified Text.XML.Light as X
import qualified Text.XML.Light.Cursor as XC
import Text.Pandoc.Definition
import Text.Pandoc.Options (WriterOptions(..), HTMLMathMethod(..), def)
import Text.Pandoc.Shared (orderedListMarkers, isHeaderBlock)
import Text.Pandoc.Walk
-- | Data to be written at the end of the document:
-- (foot)notes, URLs, references, images.
data FbRenderState = FbRenderState
{ footnotes :: [ (Int, String, [Content]) ] -- ^ #, ID, text
, imagesToFetch :: [ (String, String) ] -- ^ filename, URL or path
, parentListMarker :: String -- ^ list marker of the parent ordered list
, parentBulletLevel :: Int -- ^ nesting level of the unordered list
, writerOptions :: WriterOptions
} deriving (Show)
-- | FictionBook building monad.
type FBM = StateT FbRenderState IO
newFB :: FbRenderState
newFB = FbRenderState { footnotes = [], imagesToFetch = []
, parentListMarker = "", parentBulletLevel = 0
, writerOptions = def }
data ImageMode = NormalImage | InlineImage deriving (Eq)
instance Show ImageMode where
show NormalImage = "imageType"
show InlineImage = "inlineImageType"
-- | Produce an FB2 document from a 'Pandoc' document.
writeFB2 :: WriterOptions -- ^ conversion options
-> Pandoc -- ^ document to convert
-> IO String -- ^ FictionBook2 document (not encoded yet)
writeFB2 opts (Pandoc meta blocks) = flip evalStateT newFB $ do
modify (\s -> s { writerOptions = opts { writerStandalone = True } })
desc <- description meta
fp <- frontpage meta
secs <- renderSections 1 blocks
let body = el "body" $ fp ++ secs
notes <- renderFootnotes
(imgs,missing) <- liftM imagesToFetch get >>= \s -> liftIO (fetchImages s)
let body' = replaceImagesWithAlt missing body
let fb2_xml = el "FictionBook" (fb2_attrs, [desc, body'] ++ notes ++ imgs)
return $ xml_head ++ (showContent fb2_xml)
where
xml_head = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
fb2_attrs =
let xmlns = "http://www.gribuser.ru/xml/fictionbook/2.0"
xlink = "http://www.w3.org/1999/xlink"
in [ uattr "xmlns" xmlns
, attr ("xmlns", "l") xlink ]
--
frontpage :: Meta -> FBM [Content]
frontpage meta' = do
t <- cMapM toXml . docTitle $ meta'
return $
[ el "title" (el "p" t)
, el "annotation" (map (el "p" . cMap plain)
(docAuthors meta' ++ [docDate meta']))
]
description :: Meta -> FBM Content
description meta' = do
bt <- booktitle meta'
let as = authors meta'
dd <- docdate meta'
return $ el "description"
[ el "title-info" (bt ++ as ++ dd)
, el "document-info" [ el "program-used" "pandoc" ] -- FIXME: +version
]
booktitle :: Meta -> FBM [Content]
booktitle meta' = do
t <- cMapM toXml . docTitle $ meta'
return $ if null t
then []
else [ el "book-title" t ]
authors :: Meta -> [Content]
authors meta' = cMap author (docAuthors meta')
author :: [Inline] -> [Content]
author ss =
let ws = words . cMap plain $ ss
email = (el "email") `fmap` (take 1 $ filter ('@' `elem`) ws)
ws' = filter ('@' `notElem`) ws
names = case ws' of
(nickname:[]) -> [ el "nickname" nickname ]
(fname:lname:[]) -> [ el "first-name" fname
, el "last-name" lname ]
(fname:rest) -> [ el "first-name" fname
, el "middle-name" (concat . init $ rest)
, el "last-name" (last rest) ]
([]) -> []
in list $ el "author" (names ++ email)
docdate :: Meta -> FBM [Content]
docdate meta' = do
let ss = docDate meta'
d <- cMapM toXml ss
return $ if null d
then []
else [el "date" d]
-- | Divide the stream of blocks into sections and convert to XML
-- representation.
renderSections :: Int -> [Block] -> FBM [Content]
renderSections level blocks = do
let secs = splitSections level blocks
mapM (renderSection level) secs
renderSection :: Int -> ([Inline], [Block]) -> FBM Content
renderSection level (ttl, body) = do
title <- if null ttl
then return []
else return . list . el "title" . formatTitle $ ttl
content <- if (hasSubsections body)
then renderSections (level + 1) body
else cMapM blockToXml body
return $ el "section" (title ++ content)
where
hasSubsections = any isHeaderBlock
-- | Only <p> and <empty-line> are allowed within <title> in FB2.
formatTitle :: [Inline] -> [Content]
formatTitle inlines =
let lns = split isLineBreak inlines
lns' = map (el "p" . cMap plain) lns
in intersperse (el "empty-line" ()) lns'
split :: (a -> Bool) -> [a] -> [[a]]
split _ [] = []
split cond xs = let (b,a) = break cond xs
in (b:split cond (drop 1 a))
isLineBreak :: Inline -> Bool
isLineBreak LineBreak = True
isLineBreak _ = False
-- | Divide the stream of block elements into sections: [(title, blocks)].
splitSections :: Int -> [Block] -> [([Inline], [Block])]
splitSections level blocks = reverse $ revSplit (reverse blocks)
where
revSplit [] = []
revSplit rblocks =
let (lastsec, before) = break sameLevel rblocks
(header, prevblocks) =
case before of
((Header n _ title):prevblocks') ->
if n == level
then (title, prevblocks')
else ([], before)
_ -> ([], before)
in (header, reverse lastsec) : revSplit prevblocks
sameLevel (Header n _ _) = n == level
sameLevel _ = False
-- | Make another FictionBook body with footnotes.
renderFootnotes :: FBM [Content]
renderFootnotes = do
fns <- footnotes `liftM` get
if null fns
then return [] -- no footnotes
else return . list $
el "body" ([uattr "name" "notes"], map renderFN (reverse fns))
where
renderFN (n, idstr, cs) =
let fn_texts = (el "title" (el "p" (show n))) : cs
in el "section" ([uattr "id" idstr], fn_texts)
-- | Fetch images and encode them for the FictionBook XML.
-- Return image data and a list of hrefs of the missing images.
fetchImages :: [(String,String)] -> IO ([Content],[String])
fetchImages links = do
imgs <- mapM (uncurry fetchImage) links
return $ (rights imgs, lefts imgs)
-- | Fetch image data from disk or from network and make a <binary> XML section.
-- Return either (Left hrefOfMissingImage) or (Right xmlContent).
fetchImage :: String -> String -> IO (Either String Content)
fetchImage href link = do
mbimg <-
case (isURI link, readDataURI link) of
(True, Just (mime,_,True,base64)) ->
let mime' = map toLower mime
in if mime' == "image/png" || mime' == "image/jpeg"
then return (Just (mime',base64))
else return Nothing
(True, Just _) -> return Nothing -- not base64-encoded
(True, Nothing) -> fetchURL link
(False, _) -> do
d <- nothingOnError $ B.readFile (unEscapeString link)
let t = case map toLower (takeExtension link) of
".png" -> Just "image/png"
".jpg" -> Just "image/jpeg"
".jpeg" -> Just "image/jpeg"
".jpe" -> Just "image/jpeg"
_ -> Nothing -- only PNG and JPEG are supported in FB2
return $ liftM2 (,) t (liftM (toStr . encode) d)
case mbimg of
Just (imgtype, imgdata) -> do
return . Right $ el "binary"
( [uattr "id" href
, uattr "content-type" imgtype]
, txt imgdata )
_ -> return (Left ('#':href))
where
nothingOnError :: (IO B.ByteString) -> (IO (Maybe B.ByteString))
nothingOnError action = liftM Just action `E.catch` omnihandler
omnihandler :: E.SomeException -> IO (Maybe B.ByteString)
omnihandler _ = return Nothing
-- | Extract mime type and encoded data from the Data URI.
readDataURI :: String -- ^ URI
-> Maybe (String,String,Bool,String)
-- ^ Maybe (mime,charset,isBase64,data)
readDataURI uri =
let prefix = "data:"
in if not (prefix `isPrefixOf` uri)
then Nothing
else
let rest = drop (length prefix) uri
meta = takeWhile (/= ',') rest -- without trailing ','
uridata = drop (length meta + 1) rest
parts = split (== ';') meta
(mime,cs,enc)=foldr upd ("text/plain","US-ASCII",False) parts
in Just (mime,cs,enc,uridata)
where
upd str m@(mime,cs,enc)
| isMimeType str = (str,cs,enc)
| "charset=" `isPrefixOf` str = (mime,drop (length "charset=") str,enc)
| str == "base64" = (mime,cs,True)
| otherwise = m
-- Without parameters like ;charset=...; see RFC 2045, 5.1
isMimeType :: String -> Bool
isMimeType s =
case split (=='/') s of
[mtype,msubtype] ->
((map toLower mtype) `elem` types
|| "x-" `isPrefixOf` (map toLower mtype))
&& all valid mtype
&& all valid msubtype
_ -> False
where
types = ["text","image","audio","video","application","message","multipart"]
valid c = isAscii c && not (isControl c) && not (isSpace c) &&
c `notElem` "()<>@,;:\\\"/[]?="
-- | Fetch URL, return its Content-Type and binary data on success.
fetchURL :: String -> IO (Maybe (String, String))
fetchURL url = do
flip catchIO_ (return Nothing) $ do
r <- browse $ do
setOutHandler (const (return ()))
setAllowRedirects True
liftM snd . request . getRequest $ url
let content_type = lookupHeader HdrContentType (getHeaders r)
content <- liftM (Just . toStr . encode . toBS) . getResponseBody $ Right r
return $ liftM2 (,) content_type content
where
toBS :: String -> B.ByteString
toBS = B.pack . map (toEnum . fromEnum)
toStr :: B.ByteString -> String
toStr = map (toEnum . fromEnum) . B.unpack
footnoteID :: Int -> String
footnoteID i = "n" ++ (show i)
linkID :: Int -> String
linkID i = "l" ++ (show i)
-- | Convert a block-level Pandoc's element to FictionBook XML representation.
blockToXml :: Block -> FBM [Content]
blockToXml (Plain ss) = cMapM toXml ss -- FIXME: can lead to malformed FB2
blockToXml (Para [Math DisplayMath formula]) = insertMath NormalImage formula
-- title beginning with fig: indicates that the image is a figure
blockToXml (Para [Image alt (src,'f':'i':'g':':':tit)]) =
insertImage NormalImage (Image alt (src,tit))
blockToXml (Para ss) = liftM (list . el "p") $ cMapM toXml ss
blockToXml (CodeBlock _ s) = return . spaceBeforeAfter .
map (el "p" . el "code") . lines $ s
blockToXml (RawBlock _ s) = return . spaceBeforeAfter .
map (el "p" . el "code") . lines $ s
blockToXml (Div _ bs) = cMapM blockToXml bs
blockToXml (BlockQuote bs) = liftM (list . el "cite") $ cMapM blockToXml bs
blockToXml (OrderedList a bss) = do
state <- get
let pmrk = parentListMarker state
let markers = map ((pmrk ++ " ") ++) $ orderedListMarkers a
let mkitem mrk bs = do
modify (\s -> s { parentListMarker = mrk })
itemtext <- cMapM blockToXml . paraToPlain $ bs
modify (\s -> s { parentListMarker = pmrk }) -- old parent marker
return . el "p" $ [ txt mrk, txt " " ] ++ itemtext
mapM (uncurry mkitem) (zip markers bss)
blockToXml (BulletList bss) = do
state <- get
let level = parentBulletLevel state
let pmrk = parentListMarker state
let prefix = replicate (length pmrk) ' '
let bullets = ["\x2022", "\x25e6", "*", "\x2043", "\x2023"]
let mrk = prefix ++ bullets !! (level `mod` (length bullets))
let mkitem bs = do
modify (\s -> s { parentBulletLevel = (level+1) })
itemtext <- cMapM blockToXml . paraToPlain $ bs
modify (\s -> s { parentBulletLevel = level }) -- restore bullet level
return $ el "p" $ [ txt (mrk ++ " ") ] ++ itemtext
mapM mkitem bss
blockToXml (DefinitionList defs) =
cMapM mkdef defs
where
mkdef (term, bss) = do
def' <- cMapM (cMapM blockToXml . sep . paraToPlain . map indent) bss
t <- wrap "strong" term
return [ el "p" t, el "p" def' ]
sep blocks =
if all needsBreak blocks then
blocks ++ [Plain [LineBreak]]
else
blocks
needsBreak (Para _) = False
needsBreak (Plain ins) = LineBreak `notElem` ins
needsBreak _ = True
blockToXml (Header _ _ _) = -- should never happen, see renderSections
error "unexpected header in section text"
blockToXml HorizontalRule = return
[ el "empty-line" ()
, el "p" (txt (replicate 10 '—'))
, el "empty-line" () ]
blockToXml (Table caption aligns _ headers rows) = do
hd <- mkrow "th" headers aligns
bd <- mapM (\r -> mkrow "td" r aligns) rows
c <- return . el "emphasis" =<< cMapM toXml caption
return [el "table" (hd : bd), el "p" c]
where
mkrow :: String -> [TableCell] -> [Alignment] -> FBM Content
mkrow tag cells aligns' =
(el "tr") `liftM` (mapM (mkcell tag) (zip cells aligns'))
--
mkcell :: String -> (TableCell, Alignment) -> FBM Content
mkcell tag (cell, align) = do
cblocks <- cMapM blockToXml cell
return $ el tag ([align_attr align], cblocks)
--
align_attr a = Attr (QName "align" Nothing Nothing) (align_str a)
align_str AlignLeft = "left"
align_str AlignCenter = "center"
align_str AlignRight = "right"
align_str AlignDefault = "left"
blockToXml Null = return []
-- Replace paragraphs with plain text and line break.
-- Necessary to simulate multi-paragraph lists in FB2.
paraToPlain :: [Block] -> [Block]
paraToPlain [] = []
paraToPlain (Para inlines : rest) =
let p = (Plain (inlines ++ [LineBreak]))
in p : paraToPlain rest
paraToPlain (p:rest) = p : paraToPlain rest
-- Simulate increased indentation level. Will not really work
-- for multi-line paragraphs.
indent :: Block -> Block
indent = indentBlock
where
-- indentation space
spacer :: String
spacer = replicate 4 ' '
--
indentBlock (Plain ins) = Plain ((Str spacer):ins)
indentBlock (Para ins) = Para ((Str spacer):ins)
indentBlock (CodeBlock a s) =
let s' = unlines . map (spacer++) . lines $ s
in CodeBlock a s'
indentBlock (BlockQuote bs) = BlockQuote (map indent bs)
indentBlock (Header l attr' ins) = Header l attr' (indentLines ins)
indentBlock everythingElse = everythingElse
-- indent every (explicit) line
indentLines :: [Inline] -> [Inline]
indentLines ins = let lns = split isLineBreak ins :: [[Inline]]
in intercalate [LineBreak] $ map ((Str spacer):) lns
capitalize :: Inline -> Inline
capitalize (Str xs) = Str $ map toUpper xs
capitalize x = x
-- | Convert a Pandoc's Inline element to FictionBook XML representation.
toXml :: Inline -> FBM [Content]
toXml (Str s) = return [txt s]
toXml (Span _ ils) = cMapM toXml ils
toXml (Emph ss) = list `liftM` wrap "emphasis" ss
toXml (Strong ss) = list `liftM` wrap "strong" ss
toXml (Strikeout ss) = list `liftM` wrap "strikethrough" ss
toXml (Superscript ss) = list `liftM` wrap "sup" ss
toXml (Subscript ss) = list `liftM` wrap "sub" ss
toXml (SmallCaps ss) = cMapM toXml $ walk capitalize ss
toXml (Quoted SingleQuote ss) = do -- FIXME: should be language-specific
inner <- cMapM toXml ss
return $ [txt "‘"] ++ inner ++ [txt "’"]
toXml (Quoted DoubleQuote ss) = do
inner <- cMapM toXml ss
return $ [txt "“"] ++ inner ++ [txt "”"]
toXml (Cite _ ss) = cMapM toXml ss -- FIXME: support citation styles
toXml (Code _ s) = return [el "code" s]
toXml Space = return [txt " "]
toXml LineBreak = return [el "empty-line" ()]
toXml (Math _ formula) = insertMath InlineImage formula
toXml (RawInline _ _) = return [] -- raw TeX and raw HTML are suppressed
toXml (Link text (url,ttl)) = do
fns <- footnotes `liftM` get
let n = 1 + length fns
let ln_id = linkID n
let ln_ref = list . el "sup" . txt $ "[" ++ show n ++ "]"
ln_text <- cMapM toXml text
let ln_desc =
let ttl' = dropWhile isSpace ttl
in if null ttl'
then list . el "p" $ el "code" url
else list . el "p" $ [ txt (ttl' ++ ": "), el "code" url ]
modify (\s -> s { footnotes = (n, ln_id, ln_desc) : fns })
return $ ln_text ++
[ el "a"
( [ attr ("l","href") ('#':ln_id)
, uattr "type" "note" ]
, ln_ref) ]
toXml img@(Image _ _) = insertImage InlineImage img
toXml (Note bs) = do
fns <- footnotes `liftM` get
let n = 1 + length fns
let fn_id = footnoteID n
fn_desc <- cMapM blockToXml bs
modify (\s -> s { footnotes = (n, fn_id, fn_desc) : fns })
let fn_ref = el "sup" . txt $ "[" ++ show n ++ "]"
return . list $ el "a" ( [ attr ("l","href") ('#':fn_id)
, uattr "type" "note" ]
, fn_ref )
insertMath :: ImageMode -> String -> FBM [Content]
insertMath immode formula = do
htmlMath <- return . writerHTMLMathMethod . writerOptions =<< get
case htmlMath of
WebTeX url -> do
let alt = [Code nullAttr formula]
let imgurl = url ++ urlEncode formula
let img = Image alt (imgurl, "")
insertImage immode img
_ -> return [el "code" formula]
insertImage :: ImageMode -> Inline -> FBM [Content]
insertImage immode (Image alt (url,ttl)) = do
images <- imagesToFetch `liftM` get
let n = 1 + length images
let fname = "image" ++ show n
modify (\s -> s { imagesToFetch = (fname, url) : images })
let ttlattr = case (immode, null ttl) of
(NormalImage, False) -> [ uattr "title" ttl ]
_ -> []
return . list $
el "image" $
[ attr ("l","href") ('#':fname)
, attr ("l","type") (show immode)
, uattr "alt" (cMap plain alt) ]
++ ttlattr
insertImage _ _ = error "unexpected inline instead of image"
replaceImagesWithAlt :: [String] -> Content -> Content
replaceImagesWithAlt missingHrefs body =
let cur = XC.fromContent body
cur' = replaceAll cur
in XC.toTree . XC.root $ cur'
where
--
replaceAll :: XC.Cursor -> XC.Cursor
replaceAll c =
let n = XC.current c
c' = if isImage n && isMissing n
then XC.modifyContent replaceNode c
else c
in case XC.nextDF c' of
(Just cnext) -> replaceAll cnext
Nothing -> c' -- end of document
--
isImage :: Content -> Bool
isImage (Elem e) = (elName e) == (uname "image")
isImage _ = False
--
isMissing (Elem img@(Element _ _ _ _)) =
let imgAttrs = elAttribs img
badAttrs = map (attr ("l","href")) missingHrefs
in any (`elem` imgAttrs) badAttrs
isMissing _ = False
--
replaceNode :: Content -> Content
replaceNode n@(Elem img@(Element _ _ _ _)) =
let attrs = elAttribs img
alt = getAttrVal attrs (uname "alt")
imtype = getAttrVal attrs (qname "l" "type")
in case (alt, imtype) of
(Just alt', Just imtype') ->
if imtype' == show NormalImage
then el "p" alt'
else txt alt'
(Just alt', Nothing) -> txt alt' -- no type attribute
_ -> n -- don't replace if alt text is not found
replaceNode n = n
--
getAttrVal :: [X.Attr] -> QName -> Maybe String
getAttrVal attrs name =
case filter ((name ==) . attrKey) attrs of
(a:_) -> Just (attrVal a)
_ -> Nothing
-- | Wrap all inlines with an XML tag (given its unqualified name).
wrap :: String -> [Inline] -> FBM Content
wrap tagname inlines = el tagname `liftM` cMapM toXml inlines
-- " Create a singleton list.
list :: a -> [a]
list = (:[])
-- | Convert an 'Inline' to plaintext.
plain :: Inline -> String
plain (Str s) = s
plain (Emph ss) = concat (map plain ss)
plain (Span _ ss) = concat (map plain ss)
plain (Strong ss) = concat (map plain ss)
plain (Strikeout ss) = concat (map plain ss)
plain (Superscript ss) = concat (map plain ss)
plain (Subscript ss) = concat (map plain ss)
plain (SmallCaps ss) = concat (map plain ss)
plain (Quoted _ ss) = concat (map plain ss)
plain (Cite _ ss) = concat (map plain ss) -- FIXME
plain (Code _ s) = s
plain Space = " "
plain LineBreak = "\n"
plain (Math _ s) = s
plain (RawInline _ s) = s
plain (Link text (url,_)) = concat (map plain text ++ [" <", url, ">"])
plain (Image alt _) = concat (map plain alt)
plain (Note _) = "" -- FIXME
-- | Create an XML element.
el :: (Node t)
=> String -- ^ unqualified element name
-> t -- ^ node contents
-> Content -- ^ XML content
el name cs = Elem $ unode name cs
-- | Put empty lines around content
spaceBeforeAfter :: [Content] -> [Content]
spaceBeforeAfter cs =
let emptyline = el "empty-line" ()
in [emptyline] ++ cs ++ [emptyline]
-- | Create a plain-text XML content.
txt :: String -> Content
txt s = Text $ CData CDataText s Nothing
-- | Create an XML attribute with an unqualified name.
uattr :: String -> String -> Text.XML.Light.Attr
uattr name val = Attr (uname name) val
-- | Create an XML attribute with a qualified name from given namespace.
attr :: (String, String) -> String -> Text.XML.Light.Attr
attr (ns, name) val = Attr (qname ns name) val
-- | Unqualified name
uname :: String -> QName
uname name = QName name Nothing Nothing
-- | Qualified name
qname :: String -> String -> QName
qname ns name = QName name Nothing (Just ns)
-- | Abbreviation for 'concatMap'.
cMap :: (a -> [b]) -> [a] -> [b]
cMap = concatMap
-- | Monadic equivalent of 'concatMap'.
cMapM :: (Monad m) => (a -> m [b]) -> [a] -> m [b]
cMapM f xs = concat `liftM` mapM f xs
| nickbart1980/pandoc | src/Text/Pandoc/Writers/FB2.hs | gpl-2.0 | 23,982 | 0 | 19 | 6,589 | 8,040 | 4,156 | 3,884 | 491 | 10 |
----------------------------------------------------------------------------
-- |
-- Module : Text.XML.Schema.Structure
-- Copyright : (c) Simon Foster 2004
-- License : GPL version 2 (see COPYING)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (ghc >= 6 only)
--
-- Data-types required for parsing an XML Schema.
--
-- @This file is part of HAIFA.@
--
-- @HAIFA is free software; you can redistribute it and\/or modify it under the terms of the
-- GNU General Public License as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.@
--
-- @HAIFA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
-- even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.@
--
-- @You should have received a copy of the GNU General Public License along with HAIFA; if not,
-- write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA@
----------------------------------------------------------------------------
module Text.XML.Schema.Structure where
import Control.Monad
import Control.Monad.Error
import Constants
import Governor
import Utils
import Text.XML.DOMParser hiding (trace)
import Network.URI
import qualified Char
import List
import Maybe
import Data.FiniteMap
import Text.Regex
import Data.PackedString
--import SimpleType
import Text.XML.Schema.BasicTypes
import Text.XML.Schema.Typer
--import Debug.Trace
--trace' x = trace x x
{-
Please note the name scheme for record syntax XSD data-type attributes; xy_elementName, where x is the first
letter of data-type name and y is either a or e, depending on whether is is an element or an attribute of
the data-type.
-}
{- |
A representation of an XSD Element.
See Section 3.3 of W3C xmlschema-1.
NB:
- Although final can only contain Extension or Restriction, I have used the ERS data-type, for reasons of
simplification.
-}
data KeyData =
Unique (Maybe ID) (Maybe NCName) Selector [Field] |
Key (Maybe ID) (Maybe NCName) Selector [Field] |
KeyRef (Maybe ID) (Maybe NCName) (Maybe QName) Selector [Field] deriving Show -- FIXME : Finish
data Selector = Selector (Maybe ID) String deriving Show
data Field = Field (Maybe ID) String deriving Show
{- |
The definition of an XML attribute in XSD.
Section 3.2 of W3C xmlschema-1.
-}
data Attribute =
Attr { aa_default::Maybe String, aa_fixed::Maybe String,
aa_form::IsQualified, aa_id::Maybe ID,
aa_name::Maybe NCName, aa_ref::Maybe QName,
aa_type::Maybe QName, aa_use::Use,
ae_simpleType::Maybe SimpleType } deriving Show
data AorAG = A_Attr Attribute | A_AG AttributeGroup deriving Show
data AttributeGroup =
AttrGroup { aga_id::Maybe ID, aga_name::Maybe NCName, aga_ref::Maybe QName, age_attr::[AorAG], age_anyAttr::Maybe AnyAttribute } deriving Show
data ProcessCont = P_Lax | P_Skip | P_Strict deriving Show
data AnyAttribute = AnyAttr { aaa_id::Maybe ID, aaa_namespace::Selection2 NSRange, aaa_processContents::ProcessCont } deriving Show
data Element =
Elem { ea_abstract::Bool, ea_block::Selection ERS,
ea_default::Maybe PackedString, ea_final::Selection ERS,
ea_fixed::Maybe PackedString, ea_form::IsQualified, ea_id::Maybe ID,
ea_maxOccurs::Int, ea_minOccurs::Int,
ea_name::Maybe NCName, ea_nillable::Bool,
ea_ref::Maybe QName, ea_substitutionGroup::Maybe QName, ea_type::Maybe QName,
ee_cont::Maybe (Either SimpleType ComplexType), ee_keyData::Maybe [KeyData]
} deriving Show
data ComplexType =
Comp { ca_abstract::Bool, ca_block::Selection ERS,
ca_final::Selection ERS, ca_id::Maybe ID, ca_mixed::Bool,
ca_name::Maybe NCName, ce_cont::Maybe ComplexTypeCont } deriving Show
data ComplexTypeCont = CT_Simp SimpleContent | CT_Comp ComplexContent | CT_Struct {cte_struct::Maybe Struct, cte_attr::[AorAG], cte_anyAttr::Maybe AnyAttribute} deriving Show
--data ComplexTypeStruct = CompStruct {cse_struct::Struct, cse_attr::[AorAG], cse_anyAttr::Maybe AnyAttribute}
data ComplexContent = ComplexContent { cca_id::Maybe ID, cca_mixed::Bool, cce_cont::ComplexContentCont } deriving Show
data ComplexContentCont = C_Ext Extension | C_Res Restriction deriving Show
{-
data Group u = Empty | Seq [Group] u | Choice Group Group u | Inter [Group] u | Rep Int Int Group u
data Element = Elem QName QName [Attribute]
data Attribute = Attr QName QName
-}
data SimpVariety =
S_Restr Restriction |
S_List { la_id::Maybe ID, la_itemType::Maybe QName, le_simpleType::Maybe SimpleType } |
S_Union { ua_id::Maybe ID, ua_memberTypes::[QName], ue_simpleTypes::[SimpleType] } deriving Show
data Restriction =
R_SimpRestr { ra_base::Maybe QName, ra_id::Maybe ID,
re_simpleType::Maybe SimpleType, re_restrParam::[RestrParam],
re_attr::[AorAG], re_anyAttr::Maybe AnyAttribute } |
R_CompRestr { ra_base::Maybe QName, ra_id::Maybe ID, re_struct::Maybe Struct,
re_attr::[AorAG], re_anyAttr::Maybe AnyAttribute} deriving Show
{- |
Possible restrictions on a SimpleType, mostly simple numerical or lexical checks.
See section 4.3 of W3C xmlschema-2.
-}
data RestrParam =
MinExclusive OID String Bool | MinInclusive OID String Bool | MaxExclusive OID String Bool |
MaxInclusive OID String Bool | TotalDigits OID Int Bool | FractionDigits OID Int Bool |
Length OID Int Bool | MinLength OID Int Bool | MaxLength OID Int Bool |
Enumeration OID String | WhiteSpace OID (Maybe WSParam) Bool | Pattern OID String deriving Show
data WSParam = Collapse | Preserve | Replace deriving Show
data Extension =
E_SimpExt { exa_base::Maybe QName, exa_id::Maybe ID, exe_attr::[AorAG], exe_anyAttr::Maybe AnyAttribute } |
E_CompExt { exa_base::Maybe QName, exa_id::Maybe ID, exe_struct::Maybe Struct,
exe_attr::[AorAG], exe_anyAttr::Maybe AnyAttribute} deriving Show
{- |
The definition of an XML simple type in XSD.
Section 3.14 of W3C xmlschema-1.
-}
data SimpleType =
Simp { sa_final::Selection LUR, sa_id::Maybe ID,
sa_name::Maybe NCName, se_cont::Maybe SimpVariety } deriving Show
data SimpleContent = SCont { sca_id::Maybe ID, sce_cont::Either Extension Restriction } deriving Show
data Group = Group {ga_name::Maybe NCName, ge_cont::GroupStruct} deriving Show
data GroupStruct = G_Inter Inter | G_Choice Choice | G_Seq Seq deriving Show
data Inter = Inter {ia_id::Maybe ID, ia_minOccurs::Int, ia_maxOccurs::Maybe Int, ie_cont::[Element]} deriving Show
data Struct = S_Element Element | S_Group Group | S_Choice Choice | S_Seq Seq | S_Inter Inter | S_Any Any deriving Show
data Choice = Choice {cha_id::Maybe ID, cha_minOccurs::Int, cha_maxOccurs::Maybe Int, che_cont::[Struct]} deriving Show
data Seq = Seq {sqa_id::Maybe ID, sqa_minOccurs::Int, sqa_maxOccurs::Maybe Int, sqe_cont::[Struct]} deriving Show
data Any = Any { ana_id::Maybe ID, ana_maxOccurs::Maybe Int, ana_minOccurs::Int, ana_namespace::Selection2 NSRange, ana_processContents::ProcessCont } deriving Show
data External = E_Include ID URI |
E_Import ID URI URI |
E_Redefine deriving Show {- FIXME: Add proper data-types for this -}
data Notation = Notation deriving Show -- FIXME : Finish this
data XSGroup = X_Group Group | X_AttrGroup AttributeGroup | X_Elem Element | X_Attr Attribute | X_Notation Notation deriving Show
data XSchema = XS{xa_attributeFormDefault::IsQualified, xa_blockDefault::Selection ERS,
xa_elementFormDefault::IsQualified, xa_finalDefault::Selection ERS,
xa_id::Maybe ID, xa_targetNamespace::Maybe URI,
xa_version::Maybe String, xa_lang::Maybe String,
xe_externals::[External], types::[Type], xe_groups::[Group], xe_elements::[Element], xe_attr::[AorAG]} deriving Show
data Type = T_Simp SimpleType | T_Comp ComplexType deriving Show
| twopoint718/haifa | src/Text/XML/Schema/Structure.hs | gpl-2.0 | 9,032 | 0 | 11 | 2,371 | 1,771 | 1,054 | 717 | 91 | 0 |
-- |
-- Module : Network.ScrapeChanges
-- Copyright : (C) 2015-16 Matthias Herrmann
-- License : GPL-3
-- Maintainer : [email protected]
--
-- Main module, reexports everything you need to use "scrape-changes". Full working example:
--
-- @
--
-- {-\# LANGUAGE OverloadedStrings \#-}
--
-- import Data.ByteString (isInfixOf)
-- import Data.ByteString.Lazy (ByteString, toStrict)
-- import Data.Text.Lazy.Encoding (decodeUtf8With)
-- import Data.Foldable (find)
-- import Data.Maybe (fromMaybe)
-- import Text.HTML.TagSoup (Tag(..), (~==), (~/=), parseTags, fromAttrib)
-- import Data.List.NonEmpty (NonEmpty ((:|)))
-- import qualified System.Log.Logger as Logger
-- import qualified System.Log.Handler.Syslog as Syslog
-- import Data.Monoid ((<>))
-- import Control.Monad (forever)
-- import Network.ScrapeChanges
--
-- main :: IO ()
-- main = do
-- _ <- configureLogging
-- _ <- (Logger.errorM thisLogger . show) `either` id $ scrapeChangesJobs
-- putStrLn "scrape-changes examples executable. Just look at the example source code."
-- -- |Simplest way to block the main thread forever. Good enough for the use cases of 'scrape-changes'
-- _ <- forever getLine
-- -- |Will never be executed in this case
-- clearAllScrapeConfigs
--
-- -- |Google logo scrape function using the tagsoup library
-- scrapeGoogleLogo :: ByteString -> Text
-- scrapeGoogleLogo byteString =
-- let tags = parseTags byteString
-- divWithBackgroundUrl = find (~/= TagClose ("div" :: ByteString)) $
-- dropWhile (not . isDivWithBackgroundUrl) tags
-- resultMaybe = decodeUtf8Lenient . styleAttribContent <$> divWithBackgroundUrl
-- in fromMaybe "" resultMaybe
-- where decodeUtf8Lenient = decodeUtf8With $ const . const . Just $ '?'
-- isDivWithBackgroundUrl t =
-- let containsBackgroundUrl = isInfixOf "background:url" . toStrict
-- in t ~== TagOpen ("div" :: ByteString) [] && containsBackgroundUrl (styleAttribContent t)
-- styleAttribContent = fromAttrib "style"
--
-- scrapeChangesJobs :: Either [(Url, [ValidationError])] (IO ())
-- scrapeChangesJobs = repeatScrapeAll [
-- -- Checks each minute for changes and sends a mail if there are any
-- ScrapeSchedule {
-- _scrapeScheduleCron = "* * * * *" -- std cron format
-- , _scrapeScheduleConfig = mailScrapeConfig "http://www.google.co.uk" -- to scrape
-- (MailAddr Nothing "[email protected]") -- from
-- (MailAddr Nothing "[email protected]" :| []) -- to
-- , _scrapeScheduleScraper = scrapeGoogleLogo --scrape function
-- }
-- -- Checks each minute for changes and notifies to syslog if there are any
-- , ScrapeSchedule {
-- _scrapeScheduleCron = "* * * * *"
-- , _scrapeScheduleConfig = otherScrapeConfig "http://www.google.co.uk"
-- (\text -> Logger.infoM thisLogger . show $
-- "Change detected: " <> text)
-- , _scrapeScheduleScraper = scrapeGoogleLogo
-- }
-- ]
--
-- configureLogging :: IO ()
-- configureLogging = do
-- syslogHandler <- Syslog.openlog thisLogger [] Syslog.DAEMON Logger.DEBUG
-- let logConfig = flip Logger.updateGlobalLogger (Logger.addHandler syslogHandler . Logger.setLevel Logger.DEBUG)
-- sequence_ $ logConfig <$> ["Network.ScrapeChanges", thisLogger]
--
-- thisLogger :: String
-- thisLogger = "scrape-changes-examples"
-- @
module Network.ScrapeChanges(
scrape
, repeatScrape
, repeatScrapeAll
, scrapeAll
, mailScrapeConfig
, otherScrapeConfig
, clearScrapeConfig
, clearAllScrapeConfigs
, module Domain
) where
import Network.ScrapeChanges.Internal as Internal
import Network.ScrapeChanges.Domain as Domain
import qualified Data.Validation as Validation
import qualified Data.Tuple as TU
import qualified System.Cron.Schedule as CronSchedule
import Control.Lens
import qualified Network.Wreq as Http
import qualified Control.Concurrent.Async as Async
import qualified System.Log.Logger as Log
import qualified Data.Foldable as Foldable
import qualified Data.Maybe as Maybe
import qualified Data.Traversable as Traversable
import qualified Control.Monad as Monad
import qualified Control.Exception as Exception
-- |The basic scrape function. It fires a GET request against the url defined within the provided
-- 'ScrapeConfig'. The body is passed to the provided 'Scraper'. The result 'Data.Text.Lazy.Text' of the
-- latter is used to determine whether something has changed on the respective website. If so, the callback
-- configured in 'ScrapeConfig' is executed and 'CallbackCalled' is returned. Otherwise 'CallbackNotCalled' is
-- returned.
scrape :: ScrapeConfig -> Scraper -> Either [ValidationError] (IO ScrapeResult)
scrape sc s = let result = scrapeOrchestration <$ validateScrapeConfig sc
in result ^. Validation._Either
where scrapeOrchestration =
let responseBody = (^. Http.responseBody)
urlToRequest = sc ^. scrapeInfoUrl
requestLog = Log.infoM loggerName $ "Requesting " ++ urlToRequest
request = (s . responseBody <$>) . Http.get
response = (request urlToRequest <* requestLog) `Exception.catch` httpExceptionHandler sc
in do (response', latestHashedResponse) <- Async.concurrently response (readLatestHash sc)
let currentHashedResponse = hash' response'
let executeCallbackConfig' = executeCallbackConfig sc response'
let saveHash'' = saveHash' currentHashedResponse urlToRequest
let saveHashAndExecuteCallbackConfig = Async.concurrently saveHash'' executeCallbackConfig'
if hashesAreDifferent latestHashedResponse currentHashedResponse
then CallbackCalled <$ saveHashAndExecuteCallbackConfig
else pure CallbackNotCalled
hashesAreDifferent :: LatestHash -> CurrentHash -> Bool
hashesAreDifferent latestHash currentHash =
Maybe.isNothing latestHash || Foldable.or ((/= currentHash) <$> latestHash)
saveHash' :: Hash -> Url -> IO ()
saveHash' h url = let saveHashMsg = "Saved new hash for url '" ++ url ++ "'"
saveHashLog = Log.infoM loggerName saveHashMsg
in saveHash sc h <* saveHashLog
type LatestHash = Maybe String
type CurrentHash = String
-- |Repeat executing 'scrape' by providing a 'CronScheduleString'. The returned
-- IO action is non blocking
repeatScrape :: CronScheduleString -> ScrapeConfig -> Scraper -> Either [ValidationError] (IO ())
repeatScrape cs sc s = let result = repeatScrapeAll [ScrapeSchedule cs sc s]
resultErrorMapped = (snd . head <$> (result ^. swapped)) ^. swapped
in resultErrorMapped
-- |Execute a list of 'ScrapeSchedule' in parallel. If validation of any 'ScrapeSchedule' fails,
-- 'Left' containing 'ValidationError' indexed by the corresponding 'Url' is returned.
repeatScrapeAll :: [ScrapeSchedule] -> Either [(Url, [ValidationError])] (IO ())
repeatScrapeAll scrapeSchedules =
let cronSchedules = Traversable.for scrapeSchedules $ \(ScrapeSchedule cronSchedule scrapeConfig scraper) ->
let scrapeConfigUrl = scrapeConfig ^. scrapeInfoUrl
cronScheduleValidation = validateCronSchedule cronSchedule
resultValidation = scrape scrapeConfig scraper ^. Validation._AccValidation
result = toCronSchedule <$> resultValidation <*> cronScheduleValidation
in ((\x -> [(scrapeConfigUrl, x)]) <$> (result ^. swapped)) ^. swapped
in (Monad.void . CronSchedule.execSchedule . Foldable.sequenceA_) <$> cronSchedules ^. Validation._Either
where toCronSchedule :: IO t -> CronScheduleString -> CronSchedule.Schedule ()
toCronSchedule scrapeAction = CronSchedule.addJob (Monad.void scrapeAction)
-- |Execute a list of 'ScrapeConfig' in sequence using 'scrape' and collect
-- the results in a map containing the respective 'Url' as key.
scrapeAll :: [(ScrapeConfig, Scraper)] -> [(Url, Either [ValidationError] (IO ScrapeResult))]
scrapeAll infos = let responses = TU.uncurry scrape <$> infos
urls = (^. scrapeInfoUrl) <$> (fst <$> infos)
in urls `zip` responses
-- |Clear all mutable state associated with the provided 'ScrapeConfig'
clearScrapeConfig :: ScrapeConfig -> IO ()
clearScrapeConfig = removeHash
-- |Clear all mutable state ever used by "scrape-changes"
clearAllScrapeConfigs :: IO ()
clearAllScrapeConfigs = removeHashes
| 2chilled/scrape-changes | scrape-changes/src/Network/ScrapeChanges.hs | gpl-3.0 | 8,768 | 0 | 19 | 1,931 | 1,097 | 641 | 456 | 73 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module HLinear.Hook.LeftTransformation.Basic
where
import qualified Prelude as P
import HLinear.Utility.Prelude hiding ( one )
import qualified Data.Vector as V
import qualified Math.Structure as MS
import HLinear.Matrix.Definition ( Matrix(..), IsMatrix(..) )
import qualified HLinear.Matrix.Basic as M
import HLinear.Matrix.Block ()
import HLinear.Hook.LeftTransformation.Column hiding ( one, isOne )
import qualified HLinear.Hook.LeftTransformation.Column as LTC
import HLinear.Hook.LeftTransformation.Definition
import HLinear.Utility.RPermute
--------------------------------------------------------------------------------
-- resizing
--------------------------------------------------------------------------------
minimizeSize :: ( DecidableZero a, DecidableOne a )
=> LeftTransformation a -> LeftTransformation a
minimizeSize (LeftTransformation nrs cs) =
if null cs'
then LeftTransformation 0 V.empty
else LeftTransformation nrs' cs'
where
cs' = V.dropWhile LTC.isOne cs
nrs' = nrs - (V.length cs - V.length cs')
minimizeSize lt@(LeftTransformationMatrix m) = lt
fitSize :: Ring a => Int -> LeftTransformation a -> LeftTransformation a
fitSize n lt@(LeftTransformation nrs cs)
| nrs >= n = lt
| otherwise =
let szDiff = n-nrs
cs' = fmap (LTC.adjustOffset (+szDiff)) cs
cszero = V.generate szDiff (LTC.one n)
in LeftTransformation n $ cszero <> cs'
fitSize n lt@(LeftTransformationMatrix m)
| nrs >= n = lt
| otherwise = LeftTransformationMatrix $ M.one (n-nrs) <> m
where
nrs = nmbRows m
--------------------------------------------------------------------------------
-- Eq, Show, and NFData instances
--------------------------------------------------------------------------------
deriving instance Show a => Show (LeftTransformation a)
instance ( Eq a, Ring a, DecidableZero a, DecidableOne a )
=> Eq (LeftTransformation a) where
-- this is equality in the injective limit of left transformations
-- with respect to adding identity matrices to the top left
lt@(LeftTransformation _ _) == lt'@(LeftTransformation _ _) =
let LeftTransformation nrs cs = minimizeSize lt
LeftTransformation nrs' cs' = minimizeSize lt'
ncs = V.length cs
ncs' = V.length cs'
in nrs == nrs'
&& V.and (V.zipWith (==) cs cs')
&& case compare ncs ncs' of
EQ -> True
GT -> V.all LTC.isOne $ V.drop ncs' cs
LT -> V.all LTC.isOne $ V.drop ncs cs'
lt == lt' =
let nrsmax = max (nmbRows lt) (nmbRows lt')
m = toMatrix (fitSize nrsmax lt) :: Matrix a
m' = toMatrix (fitSize nrsmax lt') :: Matrix a
in m == m'
instance NFData a => NFData (LeftTransformation a) where
rnf (LeftTransformation nrs cs) = seq (rnf nrs) $ seq (rnf cs) ()
rnf (LeftTransformationMatrix m) = seq (rnf m) ()
--------------------------------------------------------------------------------
-- rows and columns
--------------------------------------------------------------------------------
instance HasNmbRows (LeftTransformation a) where
nmbRows (LeftTransformationMatrix m) = nmbRows m
nmbRows (LeftTransformation nrs _) = nrs
instance HasNmbCols (LeftTransformation a) where
nmbCols (LeftTransformationMatrix m) = nmbCols m
nmbCols (LeftTransformation nrs _) = nrs
--------------------------------------------------------------------------------
-- container
--------------------------------------------------------------------------------
instance Functor LeftTransformation where
fmap = fmapDefault
instance Foldable LeftTransformation where
foldMap = foldMapDefault
instance Traversable LeftTransformation where
traverse f (LeftTransformation nrs rs) =
LeftTransformation nrs <$> traverse (traverse f) rs
--------------------------------------------------------------------------------
-- creation
--------------------------------------------------------------------------------
one :: Int -> LeftTransformation a
one nrs
| nrs >= 0 = LeftTransformation nrs V.empty
| nrs < 0 = error "LeftTransformation.one: negative nrs"
diagonal :: AdditiveMonoid a => Vector (Unit a) -> LeftTransformation a
diagonal ds =
let nrs = V.length ds
in LeftTransformation nrs $ (`V.imap` ds) $ \ix d ->
LeftTransformationColumn ix d $
V.replicate (nrs-ix-1) zero
singleton ::
Unit a -> Vector a -> LeftTransformation a
singleton a v =
LeftTransformation (1 + V.length v) $
V.singleton $ LeftTransformationColumn 0 a v
singletonAdditive ::
Rig a
=> Vector a -> LeftTransformation a
singletonAdditive = singleton MS.one
singletonMultiplicative ::
(AdditiveMonoid a, DecidableZero a)
=> Unit a -> Int -> LeftTransformation a
singletonMultiplicative a nrs_pred
| nrs_pred >= 0 = singleton a $ V.replicate nrs_pred zero
| nrs_pred < 0 = error "LeftTransformation.singletonMultiplicative: negative nrs_pred"
fromVector ::
DecidableUnit a
=> Vector a -> LeftTransformation a
fromVector v = singleton (toUnit $ V.head v) (V.tail v)
--------------------------------------------------------------------------------
-- conversion
--------------------------------------------------------------------------------
instance Ring a => IsMatrix (LeftTransformation a) a where
toMatrix (LeftTransformationMatrix m) = m
toMatrix (LeftTransformation nrs cs) =
Matrix nrs nrs $
V.generate nrs $ \ix ->
V.generate nrs $ \jx ->
let a = maybe MS.one LTC.head $ cs V.!? jx
in
case compare ix jx of
LT -> zero
EQ -> a
GT -> maybe zero ((*a) . (!ix)) $ cs V.!? jx
--------------------------------------------------------------------------------
-- subtransformations
--------------------------------------------------------------------------------
splitAt :: Int -> LeftTransformation a
-> (LeftTransformation a, LeftTransformation a)
splitAt ix lt@(LeftTransformation nrs cs)
| ix >= ncs = (lt, one nrs')
| otherwise =
let (csLeft, csRight) = V.splitAt ix cs
in ( LeftTransformation nrs csLeft
, LeftTransformation nrs' $ fmap (LTC.adjustOffset (+(nrs'-nrs))) csRight
)
where
ncs = V.length cs
nrs' = max 0 $ min nrs (nrs-ix)
drop :: Int -> LeftTransformation a -> LeftTransformation a
drop ix lt@(LeftTransformation nrs cs)
| ix < 0 = lt
| otherwise = LeftTransformation nrs' $
fmap (LTC.adjustOffset (subtract ix)) $ V.drop ix cs
where
nrs' = nrs - ix
| martinra/hlinear | src/HLinear/Hook/LeftTransformation/Basic.hs | gpl-3.0 | 6,584 | 0 | 20 | 1,255 | 1,914 | 970 | 944 | -1 | -1 |
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
module FunctionalSpec (spec) where
import Test.Hspec
import Language.Mulang.Parsers.Haskell
import Language.Mulang.Inspector.Generic
import Language.Mulang.Inspector.Functional
spec :: Spec
spec = do
describe "usesGuards" $ do
describe "detects guards when" $ do
it "is present" $ do
usesGuards (hs "f x | c x = 2\n\
\ | otherwise = 4") `shouldBe` True
it "is present" $ do
usesGuards (hs "f x = c x == 2") `shouldBe` False
describe "lambda analyzer" $ do
describe "detects lambdas when" $ do
it "is present" $ do
usesLambda (hs "f x = \\y -> 4") `shouldBe` True
it "is present" $ do
usesLambda (hs "f x = 4") `shouldBe` False
describe "usesAnonymousVariable" $ do
it "is True if _ is present in paramenters" $ do
usesAnonymousVariable (hs "foo _ = 1") `shouldBe` True
it "is True if _ is present in nested list patterns" $ do
usesAnonymousVariable (hs "foo [3, _] = 1") `shouldBe` True
it "is True if _ is present in nested infix application patterns" $ do
usesAnonymousVariable (hs "foo (x:_) = 1") `shouldBe` True
it "is True if _ is present in nested application patterns" $ do
usesAnonymousVariable (hs "foo (F _ 1) = 1") `shouldBe` True
it "is True if _ is present in nested tuple patterns" $ do
usesAnonymousVariable (hs "foo (_, 1) = 1") `shouldBe` True
it "is True if _ is present in nested at patterns" $ do
usesAnonymousVariable (hs "foo x@(_, 1) = 1") `shouldBe` True
it "is False if _ is not present in parameters" $ do
usesAnonymousVariable (hs "foo x = 1") `shouldBe` False
it "is False if _ is present only in seccond equation" $ do
let code = hs . unlines $ ["foo False bool = bool", "foo True _ = True"]
usesAnonymousVariable code `shouldBe` True
it "is False if there is no _ but a comment" $ do
usesAnonymousVariable (hs "foo x = 1\n--") `shouldBe` False
it "is False if there is only a comment" $ do
usesAnonymousVariable (hs "--") `shouldBe` False
describe "usesForComprehension" $ do
it "is True when list comprehension exists" $ do
usesForComprehension (hs "x = [m|m<-t]") `shouldBe` True
it "is False when comprehension doesnt exists" $ do
usesForComprehension (hs "x = []") `shouldBe` False
it "is True when do syntax is used" $ do
usesForComprehension (hs "y = do { x <- xs; return x }") `shouldBe` True
describe "usesComprehension" $ do
it "is True when list comprehension exists" $ do
usesComprehension (hs "x = [m|m<-t]") `shouldBe` True
it "is False when comprehension doesnt exists" $ do
usesComprehension (hs "x = []") `shouldBe` False
| mumuki/mulang | spec/FunctionalSpec.hs | gpl-3.0 | 2,834 | 0 | 19 | 747 | 665 | 315 | 350 | 55 | 1 |
module Hadolint.Rule.DL3028 (rule) where
import qualified Data.Text as Text
import Hadolint.Rule
import Hadolint.Shell (ParsedShell)
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax (Instruction (..), RunArgs (..))
rule :: Rule ParsedShell
rule = simpleRule code severity message check
where
code = "DL3028"
severity = DLWarningC
message =
"Pin versions in gem install. Instead of `gem install <gem>` use `gem \
\install <gem>:<version>`"
check (Run (RunArgs args _)) = foldArguments (all versionFixed . gems) args
check _ = True
versionFixed package = ":" `Text.isInfixOf` package
{-# INLINEABLE rule #-}
gems :: Shell.ParsedShell -> [Text.Text]
gems shell =
[ arg
| cmd <- Shell.presentCommands shell,
Shell.cmdHasArgs "gem" ["install", "i"] cmd,
not (Shell.cmdHasArgs "gem" ["-v"] cmd),
not (Shell.cmdHasArgs "gem" ["--version"] cmd),
not (Shell.cmdHasPrefixArg "gem" "--version=" cmd),
arg <- Shell.getArgsNoFlags cmd,
arg /= "install",
arg /= "i",
arg /= "--"
]
| lukasmartinelli/hadolint | src/Hadolint/Rule/DL3028.hs | gpl-3.0 | 1,088 | 0 | 11 | 234 | 325 | 177 | 148 | 27 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.InstanceGroupManagers.RecreateInstances
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Schedules a group action to recreate the specified instances in the
-- managed instance group. The instances are deleted and recreated using
-- the current instance template for the managed instance group. This
-- operation is marked as DONE when the action is scheduled even if the
-- instances have not yet been recreated. You must separately verify the
-- status of the recreating action with the listmanagedinstances method.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instanceGroupManagers.recreateInstances@.
module Network.Google.Resource.Compute.InstanceGroupManagers.RecreateInstances
(
-- * REST Resource
InstanceGroupManagersRecreateInstancesResource
-- * Creating a Request
, instanceGroupManagersRecreateInstances
, InstanceGroupManagersRecreateInstances
-- * Request Lenses
, igmriProject
, igmriInstanceGroupManager
, igmriZone
, igmriPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instanceGroupManagers.recreateInstances@ method which the
-- 'InstanceGroupManagersRecreateInstances' request conforms to.
type InstanceGroupManagersRecreateInstancesResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instanceGroupManagers" :>
Capture "instanceGroupManager" Text :>
"recreateInstances" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
InstanceGroupManagersRecreateInstancesRequest
:> Post '[JSON] Operation
-- | Schedules a group action to recreate the specified instances in the
-- managed instance group. The instances are deleted and recreated using
-- the current instance template for the managed instance group. This
-- operation is marked as DONE when the action is scheduled even if the
-- instances have not yet been recreated. You must separately verify the
-- status of the recreating action with the listmanagedinstances method.
--
-- /See:/ 'instanceGroupManagersRecreateInstances' smart constructor.
data InstanceGroupManagersRecreateInstances = InstanceGroupManagersRecreateInstances'
{ _igmriProject :: !Text
, _igmriInstanceGroupManager :: !Text
, _igmriZone :: !Text
, _igmriPayload :: !InstanceGroupManagersRecreateInstancesRequest
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstanceGroupManagersRecreateInstances' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'igmriProject'
--
-- * 'igmriInstanceGroupManager'
--
-- * 'igmriZone'
--
-- * 'igmriPayload'
instanceGroupManagersRecreateInstances
:: Text -- ^ 'igmriProject'
-> Text -- ^ 'igmriInstanceGroupManager'
-> Text -- ^ 'igmriZone'
-> InstanceGroupManagersRecreateInstancesRequest -- ^ 'igmriPayload'
-> InstanceGroupManagersRecreateInstances
instanceGroupManagersRecreateInstances pIgmriProject_ pIgmriInstanceGroupManager_ pIgmriZone_ pIgmriPayload_ =
InstanceGroupManagersRecreateInstances'
{ _igmriProject = pIgmriProject_
, _igmriInstanceGroupManager = pIgmriInstanceGroupManager_
, _igmriZone = pIgmriZone_
, _igmriPayload = pIgmriPayload_
}
-- | Project ID for this request.
igmriProject :: Lens' InstanceGroupManagersRecreateInstances Text
igmriProject
= lens _igmriProject (\ s a -> s{_igmriProject = a})
-- | The name of the managed instance group.
igmriInstanceGroupManager :: Lens' InstanceGroupManagersRecreateInstances Text
igmriInstanceGroupManager
= lens _igmriInstanceGroupManager
(\ s a -> s{_igmriInstanceGroupManager = a})
-- | The name of the zone where the managed instance group is located.
igmriZone :: Lens' InstanceGroupManagersRecreateInstances Text
igmriZone
= lens _igmriZone (\ s a -> s{_igmriZone = a})
-- | Multipart request metadata.
igmriPayload :: Lens' InstanceGroupManagersRecreateInstances InstanceGroupManagersRecreateInstancesRequest
igmriPayload
= lens _igmriPayload (\ s a -> s{_igmriPayload = a})
instance GoogleRequest
InstanceGroupManagersRecreateInstances where
type Rs InstanceGroupManagersRecreateInstances =
Operation
type Scopes InstanceGroupManagersRecreateInstances =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient
InstanceGroupManagersRecreateInstances'{..}
= go _igmriProject _igmriZone
_igmriInstanceGroupManager
(Just AltJSON)
_igmriPayload
computeService
where go
= buildClient
(Proxy ::
Proxy InstanceGroupManagersRecreateInstancesResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/InstanceGroupManagers/RecreateInstances.hs | mpl-2.0 | 5,929 | 0 | 18 | 1,303 | 557 | 334 | 223 | 95 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.CloudKMS.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.CloudKMS.Types.Product where
import Network.Google.CloudKMS.Types.Sum
import Network.Google.Prelude
-- | Response message for KeyManagementService.AsymmetricDecrypt.
--
-- /See:/ 'asymmetricDecryptResponse' smart constructor.
data AsymmetricDecryptResponse =
AsymmetricDecryptResponse'
{ _adrPlaintextCrc32c :: !(Maybe (Textual Int64))
, _adrPlaintext :: !(Maybe Bytes)
, _adrProtectionLevel :: !(Maybe AsymmetricDecryptResponseProtectionLevel)
, _adrVerifiedCiphertextCrc32c :: !(Maybe Bool)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AsymmetricDecryptResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'adrPlaintextCrc32c'
--
-- * 'adrPlaintext'
--
-- * 'adrProtectionLevel'
--
-- * 'adrVerifiedCiphertextCrc32c'
asymmetricDecryptResponse
:: AsymmetricDecryptResponse
asymmetricDecryptResponse =
AsymmetricDecryptResponse'
{ _adrPlaintextCrc32c = Nothing
, _adrPlaintext = Nothing
, _adrProtectionLevel = Nothing
, _adrVerifiedCiphertextCrc32c = Nothing
}
-- | Integrity verification field. A CRC32C checksum of the returned
-- AsymmetricDecryptResponse.plaintext. An integrity check of
-- AsymmetricDecryptResponse.plaintext can be performed by computing the
-- CRC32C checksum of AsymmetricDecryptResponse.plaintext and comparing
-- your results to this field. Discard the response in case of non-matching
-- checksum values, and perform a limited number of retries. A persistent
-- mismatch may indicate an issue in your computation of the CRC32C
-- checksum. Note: This field is defined as int64 for reasons of
-- compatibility across different languages. However, it is a non-negative
-- integer, which will never exceed 2^32-1, and can be safely downconverted
-- to uint32 in languages that support this type.
adrPlaintextCrc32c :: Lens' AsymmetricDecryptResponse (Maybe Int64)
adrPlaintextCrc32c
= lens _adrPlaintextCrc32c
(\ s a -> s{_adrPlaintextCrc32c = a})
. mapping _Coerce
-- | The decrypted data originally encrypted with the matching public key.
adrPlaintext :: Lens' AsymmetricDecryptResponse (Maybe ByteString)
adrPlaintext
= lens _adrPlaintext (\ s a -> s{_adrPlaintext = a})
. mapping _Bytes
-- | The ProtectionLevel of the CryptoKeyVersion used in decryption.
adrProtectionLevel :: Lens' AsymmetricDecryptResponse (Maybe AsymmetricDecryptResponseProtectionLevel)
adrProtectionLevel
= lens _adrProtectionLevel
(\ s a -> s{_adrProtectionLevel = a})
-- | Integrity verification field. A flag indicating whether
-- AsymmetricDecryptRequest.ciphertext_crc32c was received by
-- KeyManagementService and used for the integrity verification of the
-- ciphertext. A false value of this field indicates either that
-- AsymmetricDecryptRequest.ciphertext_crc32c was left unset or that it was
-- not delivered to KeyManagementService. If you\'ve set
-- AsymmetricDecryptRequest.ciphertext_crc32c but this field is still
-- false, discard the response and perform a limited number of retries.
adrVerifiedCiphertextCrc32c :: Lens' AsymmetricDecryptResponse (Maybe Bool)
adrVerifiedCiphertextCrc32c
= lens _adrVerifiedCiphertextCrc32c
(\ s a -> s{_adrVerifiedCiphertextCrc32c = a})
instance FromJSON AsymmetricDecryptResponse where
parseJSON
= withObject "AsymmetricDecryptResponse"
(\ o ->
AsymmetricDecryptResponse' <$>
(o .:? "plaintextCrc32c") <*> (o .:? "plaintext") <*>
(o .:? "protectionLevel")
<*> (o .:? "verifiedCiphertextCrc32c"))
instance ToJSON AsymmetricDecryptResponse where
toJSON AsymmetricDecryptResponse'{..}
= object
(catMaybes
[("plaintextCrc32c" .=) <$> _adrPlaintextCrc32c,
("plaintext" .=) <$> _adrPlaintext,
("protectionLevel" .=) <$> _adrProtectionLevel,
("verifiedCiphertextCrc32c" .=) <$>
_adrVerifiedCiphertextCrc32c])
-- | Response message for KeyManagementService.Encrypt.
--
-- /See:/ 'encryptResponse' smart constructor.
data EncryptResponse =
EncryptResponse'
{ _erVerifiedAdditionalAuthenticatedDataCrc32c :: !(Maybe Bool)
, _erVerifiedPlaintextCrc32c :: !(Maybe Bool)
, _erName :: !(Maybe Text)
, _erProtectionLevel :: !(Maybe EncryptResponseProtectionLevel)
, _erCiphertext :: !(Maybe Bytes)
, _erCiphertextCrc32c :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'EncryptResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'erVerifiedAdditionalAuthenticatedDataCrc32c'
--
-- * 'erVerifiedPlaintextCrc32c'
--
-- * 'erName'
--
-- * 'erProtectionLevel'
--
-- * 'erCiphertext'
--
-- * 'erCiphertextCrc32c'
encryptResponse
:: EncryptResponse
encryptResponse =
EncryptResponse'
{ _erVerifiedAdditionalAuthenticatedDataCrc32c = Nothing
, _erVerifiedPlaintextCrc32c = Nothing
, _erName = Nothing
, _erProtectionLevel = Nothing
, _erCiphertext = Nothing
, _erCiphertextCrc32c = Nothing
}
-- | Integrity verification field. A flag indicating whether
-- EncryptRequest.additional_authenticated_data_crc32c was received by
-- KeyManagementService and used for the integrity verification of the AAD.
-- A false value of this field indicates either that
-- EncryptRequest.additional_authenticated_data_crc32c was left unset or
-- that it was not delivered to KeyManagementService. If you\'ve set
-- EncryptRequest.additional_authenticated_data_crc32c but this field is
-- still false, discard the response and perform a limited number of
-- retries.
erVerifiedAdditionalAuthenticatedDataCrc32c :: Lens' EncryptResponse (Maybe Bool)
erVerifiedAdditionalAuthenticatedDataCrc32c
= lens _erVerifiedAdditionalAuthenticatedDataCrc32c
(\ s a ->
s{_erVerifiedAdditionalAuthenticatedDataCrc32c = a})
-- | Integrity verification field. A flag indicating whether
-- EncryptRequest.plaintext_crc32c was received by KeyManagementService and
-- used for the integrity verification of the plaintext. A false value of
-- this field indicates either that EncryptRequest.plaintext_crc32c was
-- left unset or that it was not delivered to KeyManagementService. If
-- you\'ve set EncryptRequest.plaintext_crc32c but this field is still
-- false, discard the response and perform a limited number of retries.
erVerifiedPlaintextCrc32c :: Lens' EncryptResponse (Maybe Bool)
erVerifiedPlaintextCrc32c
= lens _erVerifiedPlaintextCrc32c
(\ s a -> s{_erVerifiedPlaintextCrc32c = a})
-- | The resource name of the CryptoKeyVersion used in encryption. Check this
-- field to verify that the intended resource was used for encryption.
erName :: Lens' EncryptResponse (Maybe Text)
erName = lens _erName (\ s a -> s{_erName = a})
-- | The ProtectionLevel of the CryptoKeyVersion used in encryption.
erProtectionLevel :: Lens' EncryptResponse (Maybe EncryptResponseProtectionLevel)
erProtectionLevel
= lens _erProtectionLevel
(\ s a -> s{_erProtectionLevel = a})
-- | The encrypted data.
erCiphertext :: Lens' EncryptResponse (Maybe ByteString)
erCiphertext
= lens _erCiphertext (\ s a -> s{_erCiphertext = a})
. mapping _Bytes
-- | Integrity verification field. A CRC32C checksum of the returned
-- EncryptResponse.ciphertext. An integrity check of
-- EncryptResponse.ciphertext can be performed by computing the CRC32C
-- checksum of EncryptResponse.ciphertext and comparing your results to
-- this field. Discard the response in case of non-matching checksum
-- values, and perform a limited number of retries. A persistent mismatch
-- may indicate an issue in your computation of the CRC32C checksum. Note:
-- This field is defined as int64 for reasons of compatibility across
-- different languages. However, it is a non-negative integer, which will
-- never exceed 2^32-1, and can be safely downconverted to uint32 in
-- languages that support this type.
erCiphertextCrc32c :: Lens' EncryptResponse (Maybe Int64)
erCiphertextCrc32c
= lens _erCiphertextCrc32c
(\ s a -> s{_erCiphertextCrc32c = a})
. mapping _Coerce
instance FromJSON EncryptResponse where
parseJSON
= withObject "EncryptResponse"
(\ o ->
EncryptResponse' <$>
(o .:? "verifiedAdditionalAuthenticatedDataCrc32c")
<*> (o .:? "verifiedPlaintextCrc32c")
<*> (o .:? "name")
<*> (o .:? "protectionLevel")
<*> (o .:? "ciphertext")
<*> (o .:? "ciphertextCrc32c"))
instance ToJSON EncryptResponse where
toJSON EncryptResponse'{..}
= object
(catMaybes
[("verifiedAdditionalAuthenticatedDataCrc32c" .=) <$>
_erVerifiedAdditionalAuthenticatedDataCrc32c,
("verifiedPlaintextCrc32c" .=) <$>
_erVerifiedPlaintextCrc32c,
("name" .=) <$> _erName,
("protectionLevel" .=) <$> _erProtectionLevel,
("ciphertext" .=) <$> _erCiphertext,
("ciphertextCrc32c" .=) <$> _erCiphertextCrc32c])
-- | Service-specific metadata. For example the available capacity at the
-- given location.
--
-- /See:/ 'locationSchema' smart constructor.
newtype LocationSchema =
LocationSchema'
{ _lsAddtional :: HashMap Text JSONValue
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LocationSchema' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsAddtional'
locationSchema
:: HashMap Text JSONValue -- ^ 'lsAddtional'
-> LocationSchema
locationSchema pLsAddtional_ =
LocationSchema' {_lsAddtional = _Coerce # pLsAddtional_}
-- | Properties of the object. Contains field \'type with type URL.
lsAddtional :: Lens' LocationSchema (HashMap Text JSONValue)
lsAddtional
= lens _lsAddtional (\ s a -> s{_lsAddtional = a}) .
_Coerce
instance FromJSON LocationSchema where
parseJSON
= withObject "LocationSchema"
(\ o -> LocationSchema' <$> (parseJSONObject o))
instance ToJSON LocationSchema where
toJSON = toJSON . _lsAddtional
-- | Specifies the audit configuration for a service. The configuration
-- determines which permission types are logged, and what identities, if
-- any, are exempted from logging. An AuditConfig must have one or more
-- AuditLogConfigs. If there are AuditConfigs for both \`allServices\` and
-- a specific service, the union of the two AuditConfigs is used for that
-- service: the log_types specified in each AuditConfig are enabled, and
-- the exempted_members in each AuditLogConfig are exempted. Example Policy
-- with multiple AuditConfigs: { \"audit_configs\": [ { \"service\":
-- \"allServices\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\",
-- \"exempted_members\": [ \"user:jose\'example.com\" ] }, { \"log_type\":
-- \"DATA_WRITE\" }, { \"log_type\": \"ADMIN_READ\" } ] }, { \"service\":
-- \"sampleservice.googleapis.com\", \"audit_log_configs\": [ {
-- \"log_type\": \"DATA_READ\" }, { \"log_type\": \"DATA_WRITE\",
-- \"exempted_members\": [ \"user:aliya\'example.com\" ] } ] } ] } For
-- sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ
-- logging. It also exempts jose\'example.com from DATA_READ logging, and
-- aliya\'example.com from DATA_WRITE logging.
--
-- /See:/ 'auditConfig' smart constructor.
data AuditConfig =
AuditConfig'
{ _acService :: !(Maybe Text)
, _acAuditLogConfigs :: !(Maybe [AuditLogConfig])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AuditConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acService'
--
-- * 'acAuditLogConfigs'
auditConfig
:: AuditConfig
auditConfig = AuditConfig' {_acService = Nothing, _acAuditLogConfigs = Nothing}
-- | Specifies a service that will be enabled for audit logging. For example,
-- \`storage.googleapis.com\`, \`cloudsql.googleapis.com\`. \`allServices\`
-- is a special value that covers all services.
acService :: Lens' AuditConfig (Maybe Text)
acService
= lens _acService (\ s a -> s{_acService = a})
-- | The configuration for logging of each type of permission.
acAuditLogConfigs :: Lens' AuditConfig [AuditLogConfig]
acAuditLogConfigs
= lens _acAuditLogConfigs
(\ s a -> s{_acAuditLogConfigs = a})
. _Default
. _Coerce
instance FromJSON AuditConfig where
parseJSON
= withObject "AuditConfig"
(\ o ->
AuditConfig' <$>
(o .:? "service") <*>
(o .:? "auditLogConfigs" .!= mempty))
instance ToJSON AuditConfig where
toJSON AuditConfig'{..}
= object
(catMaybes
[("service" .=) <$> _acService,
("auditLogConfigs" .=) <$> _acAuditLogConfigs])
-- | Represents a textual expression in the Common Expression Language (CEL)
-- syntax. CEL is a C-like expression language. The syntax and semantics of
-- CEL are documented at https:\/\/github.com\/google\/cel-spec. Example
-- (Comparison): title: \"Summary size limit\" description: \"Determines if
-- a summary is less than 100 chars\" expression: \"document.summary.size()
-- \< 100\" Example (Equality): title: \"Requestor is owner\" description:
-- \"Determines if requestor is the document owner\" expression:
-- \"document.owner == request.auth.claims.email\" Example (Logic): title:
-- \"Public documents\" description: \"Determine whether the document
-- should be publicly visible\" expression: \"document.type != \'private\'
-- && document.type != \'internal\'\" Example (Data Manipulation): title:
-- \"Notification string\" description: \"Create a notification string with
-- a timestamp.\" expression: \"\'New message received at \' +
-- string(document.create_time)\" The exact variables and functions that
-- may be referenced within an expression are determined by the service
-- that evaluates it. See the service documentation for additional
-- information.
--
-- /See:/ 'expr' smart constructor.
data Expr =
Expr'
{ _eLocation :: !(Maybe Text)
, _eExpression :: !(Maybe Text)
, _eTitle :: !(Maybe Text)
, _eDescription :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Expr' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eLocation'
--
-- * 'eExpression'
--
-- * 'eTitle'
--
-- * 'eDescription'
expr
:: Expr
expr =
Expr'
{ _eLocation = Nothing
, _eExpression = Nothing
, _eTitle = Nothing
, _eDescription = Nothing
}
-- | Optional. String indicating the location of the expression for error
-- reporting, e.g. a file name and a position in the file.
eLocation :: Lens' Expr (Maybe Text)
eLocation
= lens _eLocation (\ s a -> s{_eLocation = a})
-- | Textual representation of an expression in Common Expression Language
-- syntax.
eExpression :: Lens' Expr (Maybe Text)
eExpression
= lens _eExpression (\ s a -> s{_eExpression = a})
-- | Optional. Title for the expression, i.e. a short string describing its
-- purpose. This can be used e.g. in UIs which allow to enter the
-- expression.
eTitle :: Lens' Expr (Maybe Text)
eTitle = lens _eTitle (\ s a -> s{_eTitle = a})
-- | Optional. Description of the expression. This is a longer text which
-- describes the expression, e.g. when hovered over it in a UI.
eDescription :: Lens' Expr (Maybe Text)
eDescription
= lens _eDescription (\ s a -> s{_eDescription = a})
instance FromJSON Expr where
parseJSON
= withObject "Expr"
(\ o ->
Expr' <$>
(o .:? "location") <*> (o .:? "expression") <*>
(o .:? "title")
<*> (o .:? "description"))
instance ToJSON Expr where
toJSON Expr'{..}
= object
(catMaybes
[("location" .=) <$> _eLocation,
("expression" .=) <$> _eExpression,
("title" .=) <$> _eTitle,
("description" .=) <$> _eDescription])
-- | The response message for Locations.ListLocations.
--
-- /See:/ 'listLocationsResponse' smart constructor.
data ListLocationsResponse =
ListLocationsResponse'
{ _llrNextPageToken :: !(Maybe Text)
, _llrLocations :: !(Maybe [Location])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListLocationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llrNextPageToken'
--
-- * 'llrLocations'
listLocationsResponse
:: ListLocationsResponse
listLocationsResponse =
ListLocationsResponse' {_llrNextPageToken = Nothing, _llrLocations = Nothing}
-- | The standard List next-page token.
llrNextPageToken :: Lens' ListLocationsResponse (Maybe Text)
llrNextPageToken
= lens _llrNextPageToken
(\ s a -> s{_llrNextPageToken = a})
-- | A list of locations that matches the specified filter in the request.
llrLocations :: Lens' ListLocationsResponse [Location]
llrLocations
= lens _llrLocations (\ s a -> s{_llrLocations = a})
. _Default
. _Coerce
instance FromJSON ListLocationsResponse where
parseJSON
= withObject "ListLocationsResponse"
(\ o ->
ListLocationsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "locations" .!= mempty))
instance ToJSON ListLocationsResponse where
toJSON ListLocationsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _llrNextPageToken,
("locations" .=) <$> _llrLocations])
-- | Response message for KeyManagementService.ListKeyRings.
--
-- /See:/ 'listKeyRingsResponse' smart constructor.
data ListKeyRingsResponse =
ListKeyRingsResponse'
{ _lkrrNextPageToken :: !(Maybe Text)
, _lkrrTotalSize :: !(Maybe (Textual Int32))
, _lkrrKeyRings :: !(Maybe [KeyRing])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListKeyRingsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lkrrNextPageToken'
--
-- * 'lkrrTotalSize'
--
-- * 'lkrrKeyRings'
listKeyRingsResponse
:: ListKeyRingsResponse
listKeyRingsResponse =
ListKeyRingsResponse'
{ _lkrrNextPageToken = Nothing
, _lkrrTotalSize = Nothing
, _lkrrKeyRings = Nothing
}
-- | A token to retrieve next page of results. Pass this value in
-- ListKeyRingsRequest.page_token to retrieve the next page of results.
lkrrNextPageToken :: Lens' ListKeyRingsResponse (Maybe Text)
lkrrNextPageToken
= lens _lkrrNextPageToken
(\ s a -> s{_lkrrNextPageToken = a})
-- | The total number of KeyRings that matched the query.
lkrrTotalSize :: Lens' ListKeyRingsResponse (Maybe Int32)
lkrrTotalSize
= lens _lkrrTotalSize
(\ s a -> s{_lkrrTotalSize = a})
. mapping _Coerce
-- | The list of KeyRings.
lkrrKeyRings :: Lens' ListKeyRingsResponse [KeyRing]
lkrrKeyRings
= lens _lkrrKeyRings (\ s a -> s{_lkrrKeyRings = a})
. _Default
. _Coerce
instance FromJSON ListKeyRingsResponse where
parseJSON
= withObject "ListKeyRingsResponse"
(\ o ->
ListKeyRingsResponse' <$>
(o .:? "nextPageToken") <*> (o .:? "totalSize") <*>
(o .:? "keyRings" .!= mempty))
instance ToJSON ListKeyRingsResponse where
toJSON ListKeyRingsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lkrrNextPageToken,
("totalSize" .=) <$> _lkrrTotalSize,
("keyRings" .=) <$> _lkrrKeyRings])
-- | Response message for KeyManagementService.AsymmetricSign.
--
-- /See:/ 'asymmetricSignResponse' smart constructor.
data AsymmetricSignResponse =
AsymmetricSignResponse'
{ _asrSignature :: !(Maybe Bytes)
, _asrSignatureCrc32c :: !(Maybe (Textual Int64))
, _asrName :: !(Maybe Text)
, _asrProtectionLevel :: !(Maybe AsymmetricSignResponseProtectionLevel)
, _asrVerifiedDigestCrc32c :: !(Maybe Bool)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AsymmetricSignResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asrSignature'
--
-- * 'asrSignatureCrc32c'
--
-- * 'asrName'
--
-- * 'asrProtectionLevel'
--
-- * 'asrVerifiedDigestCrc32c'
asymmetricSignResponse
:: AsymmetricSignResponse
asymmetricSignResponse =
AsymmetricSignResponse'
{ _asrSignature = Nothing
, _asrSignatureCrc32c = Nothing
, _asrName = Nothing
, _asrProtectionLevel = Nothing
, _asrVerifiedDigestCrc32c = Nothing
}
-- | The created signature.
asrSignature :: Lens' AsymmetricSignResponse (Maybe ByteString)
asrSignature
= lens _asrSignature (\ s a -> s{_asrSignature = a})
. mapping _Bytes
-- | Integrity verification field. A CRC32C checksum of the returned
-- AsymmetricSignResponse.signature. An integrity check of
-- AsymmetricSignResponse.signature can be performed by computing the
-- CRC32C checksum of AsymmetricSignResponse.signature and comparing your
-- results to this field. Discard the response in case of non-matching
-- checksum values, and perform a limited number of retries. A persistent
-- mismatch may indicate an issue in your computation of the CRC32C
-- checksum. Note: This field is defined as int64 for reasons of
-- compatibility across different languages. However, it is a non-negative
-- integer, which will never exceed 2^32-1, and can be safely downconverted
-- to uint32 in languages that support this type.
asrSignatureCrc32c :: Lens' AsymmetricSignResponse (Maybe Int64)
asrSignatureCrc32c
= lens _asrSignatureCrc32c
(\ s a -> s{_asrSignatureCrc32c = a})
. mapping _Coerce
-- | The resource name of the CryptoKeyVersion used for signing. Check this
-- field to verify that the intended resource was used for signing.
asrName :: Lens' AsymmetricSignResponse (Maybe Text)
asrName = lens _asrName (\ s a -> s{_asrName = a})
-- | The ProtectionLevel of the CryptoKeyVersion used for signing.
asrProtectionLevel :: Lens' AsymmetricSignResponse (Maybe AsymmetricSignResponseProtectionLevel)
asrProtectionLevel
= lens _asrProtectionLevel
(\ s a -> s{_asrProtectionLevel = a})
-- | Integrity verification field. A flag indicating whether
-- AsymmetricSignRequest.digest_crc32c was received by KeyManagementService
-- and used for the integrity verification of the digest. A false value of
-- this field indicates either that AsymmetricSignRequest.digest_crc32c was
-- left unset or that it was not delivered to KeyManagementService. If
-- you\'ve set AsymmetricSignRequest.digest_crc32c but this field is still
-- false, discard the response and perform a limited number of retries.
asrVerifiedDigestCrc32c :: Lens' AsymmetricSignResponse (Maybe Bool)
asrVerifiedDigestCrc32c
= lens _asrVerifiedDigestCrc32c
(\ s a -> s{_asrVerifiedDigestCrc32c = a})
instance FromJSON AsymmetricSignResponse where
parseJSON
= withObject "AsymmetricSignResponse"
(\ o ->
AsymmetricSignResponse' <$>
(o .:? "signature") <*> (o .:? "signatureCrc32c") <*>
(o .:? "name")
<*> (o .:? "protectionLevel")
<*> (o .:? "verifiedDigestCrc32c"))
instance ToJSON AsymmetricSignResponse where
toJSON AsymmetricSignResponse'{..}
= object
(catMaybes
[("signature" .=) <$> _asrSignature,
("signatureCrc32c" .=) <$> _asrSignatureCrc32c,
("name" .=) <$> _asrName,
("protectionLevel" .=) <$> _asrProtectionLevel,
("verifiedDigestCrc32c" .=) <$>
_asrVerifiedDigestCrc32c])
-- | The public key component of the wrapping key. For details of the type of
-- key this public key corresponds to, see the ImportMethod.
--
-- /See:/ 'wrAppingPublicKey' smart constructor.
newtype WrAppingPublicKey =
WrAppingPublicKey'
{ _wapkPem :: Maybe Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'WrAppingPublicKey' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'wapkPem'
wrAppingPublicKey
:: WrAppingPublicKey
wrAppingPublicKey = WrAppingPublicKey' {_wapkPem = Nothing}
-- | The public key, encoded in PEM format. For more information, see the
-- [RFC 7468](https:\/\/tools.ietf.org\/html\/rfc7468) sections for
-- [General
-- Considerations](https:\/\/tools.ietf.org\/html\/rfc7468#section-2) and
-- [Textual Encoding of Subject Public Key Info]
-- (https:\/\/tools.ietf.org\/html\/rfc7468#section-13).
wapkPem :: Lens' WrAppingPublicKey (Maybe Text)
wapkPem = lens _wapkPem (\ s a -> s{_wapkPem = a})
instance FromJSON WrAppingPublicKey where
parseJSON
= withObject "WrAppingPublicKey"
(\ o -> WrAppingPublicKey' <$> (o .:? "pem"))
instance ToJSON WrAppingPublicKey where
toJSON WrAppingPublicKey'{..}
= object (catMaybes [("pem" .=) <$> _wapkPem])
-- | A KeyRing is a toplevel logical grouping of CryptoKeys.
--
-- /See:/ 'keyRing' smart constructor.
data KeyRing =
KeyRing'
{ _krName :: !(Maybe Text)
, _krCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'KeyRing' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'krName'
--
-- * 'krCreateTime'
keyRing
:: KeyRing
keyRing = KeyRing' {_krName = Nothing, _krCreateTime = Nothing}
-- | Output only. The resource name for the KeyRing in the format
-- \`projects\/*\/locations\/*\/keyRings\/*\`.
krName :: Lens' KeyRing (Maybe Text)
krName = lens _krName (\ s a -> s{_krName = a})
-- | Output only. The time at which this KeyRing was created.
krCreateTime :: Lens' KeyRing (Maybe UTCTime)
krCreateTime
= lens _krCreateTime (\ s a -> s{_krCreateTime = a})
. mapping _DateTime
instance FromJSON KeyRing where
parseJSON
= withObject "KeyRing"
(\ o ->
KeyRing' <$> (o .:? "name") <*> (o .:? "createTime"))
instance ToJSON KeyRing where
toJSON KeyRing'{..}
= object
(catMaybes
[("name" .=) <$> _krName,
("createTime" .=) <$> _krCreateTime])
-- | Request message for KeyManagementService.DestroyCryptoKeyVersion.
--
-- /See:/ 'destroyCryptoKeyVersionRequest' smart constructor.
data DestroyCryptoKeyVersionRequest =
DestroyCryptoKeyVersionRequest'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DestroyCryptoKeyVersionRequest' with the minimum fields required to make a request.
--
destroyCryptoKeyVersionRequest
:: DestroyCryptoKeyVersionRequest
destroyCryptoKeyVersionRequest = DestroyCryptoKeyVersionRequest'
instance FromJSON DestroyCryptoKeyVersionRequest
where
parseJSON
= withObject "DestroyCryptoKeyVersionRequest"
(\ o -> pure DestroyCryptoKeyVersionRequest')
instance ToJSON DestroyCryptoKeyVersionRequest where
toJSON = const emptyObject
-- | A resource that represents Google Cloud Platform location.
--
-- /See:/ 'location' smart constructor.
data Location =
Location'
{ _lName :: !(Maybe Text)
, _lMetadata :: !(Maybe LocationSchema)
, _lDisplayName :: !(Maybe Text)
, _lLabels :: !(Maybe LocationLabels)
, _lLocationId :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Location' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lName'
--
-- * 'lMetadata'
--
-- * 'lDisplayName'
--
-- * 'lLabels'
--
-- * 'lLocationId'
location
:: Location
location =
Location'
{ _lName = Nothing
, _lMetadata = Nothing
, _lDisplayName = Nothing
, _lLabels = Nothing
, _lLocationId = Nothing
}
-- | Resource name for the location, which may vary between implementations.
-- For example: \`\"projects\/example-project\/locations\/us-east1\"\`
lName :: Lens' Location (Maybe Text)
lName = lens _lName (\ s a -> s{_lName = a})
-- | Service-specific metadata. For example the available capacity at the
-- given location.
lMetadata :: Lens' Location (Maybe LocationSchema)
lMetadata
= lens _lMetadata (\ s a -> s{_lMetadata = a})
-- | The friendly name for this location, typically a nearby city name. For
-- example, \"Tokyo\".
lDisplayName :: Lens' Location (Maybe Text)
lDisplayName
= lens _lDisplayName (\ s a -> s{_lDisplayName = a})
-- | Cross-service attributes for the location. For example
-- {\"cloud.googleapis.com\/region\": \"us-east1\"}
lLabels :: Lens' Location (Maybe LocationLabels)
lLabels = lens _lLabels (\ s a -> s{_lLabels = a})
-- | The canonical id for this location. For example: \`\"us-east1\"\`.
lLocationId :: Lens' Location (Maybe Text)
lLocationId
= lens _lLocationId (\ s a -> s{_lLocationId = a})
instance FromJSON Location where
parseJSON
= withObject "Location"
(\ o ->
Location' <$>
(o .:? "name") <*> (o .:? "metadata") <*>
(o .:? "displayName")
<*> (o .:? "labels")
<*> (o .:? "locationId"))
instance ToJSON Location where
toJSON Location'{..}
= object
(catMaybes
[("name" .=) <$> _lName,
("metadata" .=) <$> _lMetadata,
("displayName" .=) <$> _lDisplayName,
("labels" .=) <$> _lLabels,
("locationId" .=) <$> _lLocationId])
-- | Request message for KeyManagementService.AsymmetricSign.
--
-- /See:/ 'asymmetricSignRequest' smart constructor.
data AsymmetricSignRequest =
AsymmetricSignRequest'
{ _asrDigest :: !(Maybe Digest)
, _asrDigestCrc32c :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AsymmetricSignRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asrDigest'
--
-- * 'asrDigestCrc32c'
asymmetricSignRequest
:: AsymmetricSignRequest
asymmetricSignRequest =
AsymmetricSignRequest' {_asrDigest = Nothing, _asrDigestCrc32c = Nothing}
-- | Optional. The digest of the data to sign. The digest must be produced
-- with the same digest algorithm as specified by the key version\'s
-- algorithm.
asrDigest :: Lens' AsymmetricSignRequest (Maybe Digest)
asrDigest
= lens _asrDigest (\ s a -> s{_asrDigest = a})
-- | Optional. An optional CRC32C checksum of the
-- AsymmetricSignRequest.digest. If specified, KeyManagementService will
-- verify the integrity of the received AsymmetricSignRequest.digest using
-- this checksum. KeyManagementService will report an error if the checksum
-- verification fails. If you receive a checksum error, your client should
-- verify that CRC32C(AsymmetricSignRequest.digest) is equal to
-- AsymmetricSignRequest.digest_crc32c, and if so, perform a limited number
-- of retries. A persistent mismatch may indicate an issue in your
-- computation of the CRC32C checksum. Note: This field is defined as int64
-- for reasons of compatibility across different languages. However, it is
-- a non-negative integer, which will never exceed 2^32-1, and can be
-- safely downconverted to uint32 in languages that support this type.
asrDigestCrc32c :: Lens' AsymmetricSignRequest (Maybe Int64)
asrDigestCrc32c
= lens _asrDigestCrc32c
(\ s a -> s{_asrDigestCrc32c = a})
. mapping _Coerce
instance FromJSON AsymmetricSignRequest where
parseJSON
= withObject "AsymmetricSignRequest"
(\ o ->
AsymmetricSignRequest' <$>
(o .:? "digest") <*> (o .:? "digestCrc32c"))
instance ToJSON AsymmetricSignRequest where
toJSON AsymmetricSignRequest'{..}
= object
(catMaybes
[("digest" .=) <$> _asrDigest,
("digestCrc32c" .=) <$> _asrDigestCrc32c])
-- | Response message for KeyManagementService.ListImportJobs.
--
-- /See:/ 'listImportJobsResponse' smart constructor.
data ListImportJobsResponse =
ListImportJobsResponse'
{ _lijrNextPageToken :: !(Maybe Text)
, _lijrImportJobs :: !(Maybe [ImportJob])
, _lijrTotalSize :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListImportJobsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lijrNextPageToken'
--
-- * 'lijrImportJobs'
--
-- * 'lijrTotalSize'
listImportJobsResponse
:: ListImportJobsResponse
listImportJobsResponse =
ListImportJobsResponse'
{ _lijrNextPageToken = Nothing
, _lijrImportJobs = Nothing
, _lijrTotalSize = Nothing
}
-- | A token to retrieve next page of results. Pass this value in
-- ListImportJobsRequest.page_token to retrieve the next page of results.
lijrNextPageToken :: Lens' ListImportJobsResponse (Maybe Text)
lijrNextPageToken
= lens _lijrNextPageToken
(\ s a -> s{_lijrNextPageToken = a})
-- | The list of ImportJobs.
lijrImportJobs :: Lens' ListImportJobsResponse [ImportJob]
lijrImportJobs
= lens _lijrImportJobs
(\ s a -> s{_lijrImportJobs = a})
. _Default
. _Coerce
-- | The total number of ImportJobs that matched the query.
lijrTotalSize :: Lens' ListImportJobsResponse (Maybe Int32)
lijrTotalSize
= lens _lijrTotalSize
(\ s a -> s{_lijrTotalSize = a})
. mapping _Coerce
instance FromJSON ListImportJobsResponse where
parseJSON
= withObject "ListImportJobsResponse"
(\ o ->
ListImportJobsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "importJobs" .!= mempty)
<*> (o .:? "totalSize"))
instance ToJSON ListImportJobsResponse where
toJSON ListImportJobsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lijrNextPageToken,
("importJobs" .=) <$> _lijrImportJobs,
("totalSize" .=) <$> _lijrTotalSize])
-- | Certificate chains needed to verify the attestation. Certificates in
-- chains are PEM-encoded and are ordered based on
-- https:\/\/tools.ietf.org\/html\/rfc5246#section-7.4.2.
--
-- /See:/ 'certificateChains' smart constructor.
data CertificateChains =
CertificateChains'
{ _ccGooglePartitionCerts :: !(Maybe [Text])
, _ccGoogleCardCerts :: !(Maybe [Text])
, _ccCaviumCerts :: !(Maybe [Text])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CertificateChains' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccGooglePartitionCerts'
--
-- * 'ccGoogleCardCerts'
--
-- * 'ccCaviumCerts'
certificateChains
:: CertificateChains
certificateChains =
CertificateChains'
{ _ccGooglePartitionCerts = Nothing
, _ccGoogleCardCerts = Nothing
, _ccCaviumCerts = Nothing
}
-- | Google partition certificate chain corresponding to the attestation.
ccGooglePartitionCerts :: Lens' CertificateChains [Text]
ccGooglePartitionCerts
= lens _ccGooglePartitionCerts
(\ s a -> s{_ccGooglePartitionCerts = a})
. _Default
. _Coerce
-- | Google card certificate chain corresponding to the attestation.
ccGoogleCardCerts :: Lens' CertificateChains [Text]
ccGoogleCardCerts
= lens _ccGoogleCardCerts
(\ s a -> s{_ccGoogleCardCerts = a})
. _Default
. _Coerce
-- | Cavium certificate chain corresponding to the attestation.
ccCaviumCerts :: Lens' CertificateChains [Text]
ccCaviumCerts
= lens _ccCaviumCerts
(\ s a -> s{_ccCaviumCerts = a})
. _Default
. _Coerce
instance FromJSON CertificateChains where
parseJSON
= withObject "CertificateChains"
(\ o ->
CertificateChains' <$>
(o .:? "googlePartitionCerts" .!= mempty) <*>
(o .:? "googleCardCerts" .!= mempty)
<*> (o .:? "caviumCerts" .!= mempty))
instance ToJSON CertificateChains where
toJSON CertificateChains'{..}
= object
(catMaybes
[("googlePartitionCerts" .=) <$>
_ccGooglePartitionCerts,
("googleCardCerts" .=) <$> _ccGoogleCardCerts,
("caviumCerts" .=) <$> _ccCaviumCerts])
-- | The public key for a given CryptoKeyVersion. Obtained via GetPublicKey.
--
-- /See:/ 'publicKey' smart constructor.
data PublicKey =
PublicKey'
{ _pkPem :: !(Maybe Text)
, _pkPemCrc32c :: !(Maybe (Textual Int64))
, _pkName :: !(Maybe Text)
, _pkAlgorithm :: !(Maybe PublicKeyAlgorithm)
, _pkProtectionLevel :: !(Maybe PublicKeyProtectionLevel)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PublicKey' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pkPem'
--
-- * 'pkPemCrc32c'
--
-- * 'pkName'
--
-- * 'pkAlgorithm'
--
-- * 'pkProtectionLevel'
publicKey
:: PublicKey
publicKey =
PublicKey'
{ _pkPem = Nothing
, _pkPemCrc32c = Nothing
, _pkName = Nothing
, _pkAlgorithm = Nothing
, _pkProtectionLevel = Nothing
}
-- | The public key, encoded in PEM format. For more information, see the
-- [RFC 7468](https:\/\/tools.ietf.org\/html\/rfc7468) sections for
-- [General
-- Considerations](https:\/\/tools.ietf.org\/html\/rfc7468#section-2) and
-- [Textual Encoding of Subject Public Key Info]
-- (https:\/\/tools.ietf.org\/html\/rfc7468#section-13).
pkPem :: Lens' PublicKey (Maybe Text)
pkPem = lens _pkPem (\ s a -> s{_pkPem = a})
-- | Integrity verification field. A CRC32C checksum of the returned
-- PublicKey.pem. An integrity check of PublicKey.pem can be performed by
-- computing the CRC32C checksum of PublicKey.pem and comparing your
-- results to this field. Discard the response in case of non-matching
-- checksum values, and perform a limited number of retries. A persistent
-- mismatch may indicate an issue in your computation of the CRC32C
-- checksum. Note: This field is defined as int64 for reasons of
-- compatibility across different languages. However, it is a non-negative
-- integer, which will never exceed 2^32-1, and can be safely downconverted
-- to uint32 in languages that support this type. NOTE: This field is in
-- Beta.
pkPemCrc32c :: Lens' PublicKey (Maybe Int64)
pkPemCrc32c
= lens _pkPemCrc32c (\ s a -> s{_pkPemCrc32c = a}) .
mapping _Coerce
-- | The name of the CryptoKeyVersion public key. Provided here for
-- verification. NOTE: This field is in Beta.
pkName :: Lens' PublicKey (Maybe Text)
pkName = lens _pkName (\ s a -> s{_pkName = a})
-- | The Algorithm associated with this key.
pkAlgorithm :: Lens' PublicKey (Maybe PublicKeyAlgorithm)
pkAlgorithm
= lens _pkAlgorithm (\ s a -> s{_pkAlgorithm = a})
-- | The ProtectionLevel of the CryptoKeyVersion public key.
pkProtectionLevel :: Lens' PublicKey (Maybe PublicKeyProtectionLevel)
pkProtectionLevel
= lens _pkProtectionLevel
(\ s a -> s{_pkProtectionLevel = a})
instance FromJSON PublicKey where
parseJSON
= withObject "PublicKey"
(\ o ->
PublicKey' <$>
(o .:? "pem") <*> (o .:? "pemCrc32c") <*>
(o .:? "name")
<*> (o .:? "algorithm")
<*> (o .:? "protectionLevel"))
instance ToJSON PublicKey where
toJSON PublicKey'{..}
= object
(catMaybes
[("pem" .=) <$> _pkPem,
("pemCrc32c" .=) <$> _pkPemCrc32c,
("name" .=) <$> _pkName,
("algorithm" .=) <$> _pkAlgorithm,
("protectionLevel" .=) <$> _pkProtectionLevel])
-- | Response message for KeyManagementService.Decrypt.
--
-- /See:/ 'decryptResponse' smart constructor.
data DecryptResponse =
DecryptResponse'
{ _drUsedPrimary :: !(Maybe Bool)
, _drPlaintextCrc32c :: !(Maybe (Textual Int64))
, _drPlaintext :: !(Maybe Bytes)
, _drProtectionLevel :: !(Maybe DecryptResponseProtectionLevel)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DecryptResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'drUsedPrimary'
--
-- * 'drPlaintextCrc32c'
--
-- * 'drPlaintext'
--
-- * 'drProtectionLevel'
decryptResponse
:: DecryptResponse
decryptResponse =
DecryptResponse'
{ _drUsedPrimary = Nothing
, _drPlaintextCrc32c = Nothing
, _drPlaintext = Nothing
, _drProtectionLevel = Nothing
}
-- | Whether the Decryption was performed using the primary key version.
drUsedPrimary :: Lens' DecryptResponse (Maybe Bool)
drUsedPrimary
= lens _drUsedPrimary
(\ s a -> s{_drUsedPrimary = a})
-- | Integrity verification field. A CRC32C checksum of the returned
-- DecryptResponse.plaintext. An integrity check of
-- DecryptResponse.plaintext can be performed by computing the CRC32C
-- checksum of DecryptResponse.plaintext and comparing your results to this
-- field. Discard the response in case of non-matching checksum values, and
-- perform a limited number of retries. A persistent mismatch may indicate
-- an issue in your computation of the CRC32C checksum. Note: receiving
-- this response message indicates that KeyManagementService is able to
-- successfully decrypt the ciphertext. Note: This field is defined as
-- int64 for reasons of compatibility across different languages. However,
-- it is a non-negative integer, which will never exceed 2^32-1, and can be
-- safely downconverted to uint32 in languages that support this type.
drPlaintextCrc32c :: Lens' DecryptResponse (Maybe Int64)
drPlaintextCrc32c
= lens _drPlaintextCrc32c
(\ s a -> s{_drPlaintextCrc32c = a})
. mapping _Coerce
-- | The decrypted data originally supplied in EncryptRequest.plaintext.
drPlaintext :: Lens' DecryptResponse (Maybe ByteString)
drPlaintext
= lens _drPlaintext (\ s a -> s{_drPlaintext = a}) .
mapping _Bytes
-- | The ProtectionLevel of the CryptoKeyVersion used in decryption.
drProtectionLevel :: Lens' DecryptResponse (Maybe DecryptResponseProtectionLevel)
drProtectionLevel
= lens _drProtectionLevel
(\ s a -> s{_drProtectionLevel = a})
instance FromJSON DecryptResponse where
parseJSON
= withObject "DecryptResponse"
(\ o ->
DecryptResponse' <$>
(o .:? "usedPrimary") <*> (o .:? "plaintextCrc32c")
<*> (o .:? "plaintext")
<*> (o .:? "protectionLevel"))
instance ToJSON DecryptResponse where
toJSON DecryptResponse'{..}
= object
(catMaybes
[("usedPrimary" .=) <$> _drUsedPrimary,
("plaintextCrc32c" .=) <$> _drPlaintextCrc32c,
("plaintext" .=) <$> _drPlaintext,
("protectionLevel" .=) <$> _drProtectionLevel])
-- | A CryptoKeyVersionTemplate specifies the properties to use when creating
-- a new CryptoKeyVersion, either manually with CreateCryptoKeyVersion or
-- automatically as a result of auto-rotation.
--
-- /See:/ 'cryptoKeyVersionTemplate' smart constructor.
data CryptoKeyVersionTemplate =
CryptoKeyVersionTemplate'
{ _ckvtAlgorithm :: !(Maybe CryptoKeyVersionTemplateAlgorithm)
, _ckvtProtectionLevel :: !(Maybe CryptoKeyVersionTemplateProtectionLevel)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CryptoKeyVersionTemplate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ckvtAlgorithm'
--
-- * 'ckvtProtectionLevel'
cryptoKeyVersionTemplate
:: CryptoKeyVersionTemplate
cryptoKeyVersionTemplate =
CryptoKeyVersionTemplate'
{_ckvtAlgorithm = Nothing, _ckvtProtectionLevel = Nothing}
-- | Required. Algorithm to use when creating a CryptoKeyVersion based on
-- this template. For backwards compatibility, GOOGLE_SYMMETRIC_ENCRYPTION
-- is implied if both this field is omitted and CryptoKey.purpose is
-- ENCRYPT_DECRYPT.
ckvtAlgorithm :: Lens' CryptoKeyVersionTemplate (Maybe CryptoKeyVersionTemplateAlgorithm)
ckvtAlgorithm
= lens _ckvtAlgorithm
(\ s a -> s{_ckvtAlgorithm = a})
-- | ProtectionLevel to use when creating a CryptoKeyVersion based on this
-- template. Immutable. Defaults to SOFTWARE.
ckvtProtectionLevel :: Lens' CryptoKeyVersionTemplate (Maybe CryptoKeyVersionTemplateProtectionLevel)
ckvtProtectionLevel
= lens _ckvtProtectionLevel
(\ s a -> s{_ckvtProtectionLevel = a})
instance FromJSON CryptoKeyVersionTemplate where
parseJSON
= withObject "CryptoKeyVersionTemplate"
(\ o ->
CryptoKeyVersionTemplate' <$>
(o .:? "algorithm") <*> (o .:? "protectionLevel"))
instance ToJSON CryptoKeyVersionTemplate where
toJSON CryptoKeyVersionTemplate'{..}
= object
(catMaybes
[("algorithm" .=) <$> _ckvtAlgorithm,
("protectionLevel" .=) <$> _ckvtProtectionLevel])
-- | Request message for \`SetIamPolicy\` method.
--
-- /See:/ 'setIAMPolicyRequest' smart constructor.
data SetIAMPolicyRequest =
SetIAMPolicyRequest'
{ _siprUpdateMask :: !(Maybe GFieldMask)
, _siprPolicy :: !(Maybe Policy)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SetIAMPolicyRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'siprUpdateMask'
--
-- * 'siprPolicy'
setIAMPolicyRequest
:: SetIAMPolicyRequest
setIAMPolicyRequest =
SetIAMPolicyRequest' {_siprUpdateMask = Nothing, _siprPolicy = Nothing}
-- | OPTIONAL: A FieldMask specifying which fields of the policy to modify.
-- Only the fields in the mask will be modified. If no mask is provided,
-- the following default mask is used: \`paths: \"bindings, etag\"\`
siprUpdateMask :: Lens' SetIAMPolicyRequest (Maybe GFieldMask)
siprUpdateMask
= lens _siprUpdateMask
(\ s a -> s{_siprUpdateMask = a})
-- | REQUIRED: The complete policy to be applied to the \`resource\`. The
-- size of the policy is limited to a few 10s of KB. An empty policy is a
-- valid policy but certain Cloud Platform services (such as Projects)
-- might reject them.
siprPolicy :: Lens' SetIAMPolicyRequest (Maybe Policy)
siprPolicy
= lens _siprPolicy (\ s a -> s{_siprPolicy = a})
instance FromJSON SetIAMPolicyRequest where
parseJSON
= withObject "SetIAMPolicyRequest"
(\ o ->
SetIAMPolicyRequest' <$>
(o .:? "updateMask") <*> (o .:? "policy"))
instance ToJSON SetIAMPolicyRequest where
toJSON SetIAMPolicyRequest'{..}
= object
(catMaybes
[("updateMask" .=) <$> _siprUpdateMask,
("policy" .=) <$> _siprPolicy])
-- | Labels with user-defined metadata. For more information, see [Labeling
-- Keys](https:\/\/cloud.google.com\/kms\/docs\/labeling-keys).
--
-- /See:/ 'cryptoKeyLabels' smart constructor.
newtype CryptoKeyLabels =
CryptoKeyLabels'
{ _cklAddtional :: HashMap Text Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CryptoKeyLabels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cklAddtional'
cryptoKeyLabels
:: HashMap Text Text -- ^ 'cklAddtional'
-> CryptoKeyLabels
cryptoKeyLabels pCklAddtional_ =
CryptoKeyLabels' {_cklAddtional = _Coerce # pCklAddtional_}
cklAddtional :: Lens' CryptoKeyLabels (HashMap Text Text)
cklAddtional
= lens _cklAddtional (\ s a -> s{_cklAddtional = a})
. _Coerce
instance FromJSON CryptoKeyLabels where
parseJSON
= withObject "CryptoKeyLabels"
(\ o -> CryptoKeyLabels' <$> (parseJSONObject o))
instance ToJSON CryptoKeyLabels where
toJSON = toJSON . _cklAddtional
-- | A CryptoKey represents a logical key that can be used for cryptographic
-- operations. A CryptoKey is made up of zero or more versions, which
-- represent the actual key material used in cryptographic operations.
--
-- /See:/ 'cryptoKey' smart constructor.
data CryptoKey =
CryptoKey'
{ _ckVersionTemplate :: !(Maybe CryptoKeyVersionTemplate)
, _ckPurpose :: !(Maybe CryptoKeyPurpose)
, _ckRotationPeriod :: !(Maybe GDuration)
, _ckPrimary :: !(Maybe CryptoKeyVersion)
, _ckName :: !(Maybe Text)
, _ckLabels :: !(Maybe CryptoKeyLabels)
, _ckNextRotationTime :: !(Maybe DateTime')
, _ckCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CryptoKey' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ckVersionTemplate'
--
-- * 'ckPurpose'
--
-- * 'ckRotationPeriod'
--
-- * 'ckPrimary'
--
-- * 'ckName'
--
-- * 'ckLabels'
--
-- * 'ckNextRotationTime'
--
-- * 'ckCreateTime'
cryptoKey
:: CryptoKey
cryptoKey =
CryptoKey'
{ _ckVersionTemplate = Nothing
, _ckPurpose = Nothing
, _ckRotationPeriod = Nothing
, _ckPrimary = Nothing
, _ckName = Nothing
, _ckLabels = Nothing
, _ckNextRotationTime = Nothing
, _ckCreateTime = Nothing
}
-- | A template describing settings for new CryptoKeyVersion instances. The
-- properties of new CryptoKeyVersion instances created by either
-- CreateCryptoKeyVersion or auto-rotation are controlled by this template.
ckVersionTemplate :: Lens' CryptoKey (Maybe CryptoKeyVersionTemplate)
ckVersionTemplate
= lens _ckVersionTemplate
(\ s a -> s{_ckVersionTemplate = a})
-- | Immutable. The immutable purpose of this CryptoKey.
ckPurpose :: Lens' CryptoKey (Maybe CryptoKeyPurpose)
ckPurpose
= lens _ckPurpose (\ s a -> s{_ckPurpose = a})
-- | next_rotation_time will be advanced by this period when the service
-- automatically rotates a key. Must be at least 24 hours and at most
-- 876,000 hours. If rotation_period is set, next_rotation_time must also
-- be set. Keys with purpose ENCRYPT_DECRYPT support automatic rotation.
-- For other keys, this field must be omitted.
ckRotationPeriod :: Lens' CryptoKey (Maybe Scientific)
ckRotationPeriod
= lens _ckRotationPeriod
(\ s a -> s{_ckRotationPeriod = a})
. mapping _GDuration
-- | Output only. A copy of the \"primary\" CryptoKeyVersion that will be
-- used by Encrypt when this CryptoKey is given in EncryptRequest.name. The
-- CryptoKey\'s primary version can be updated via
-- UpdateCryptoKeyPrimaryVersion. Keys with purpose ENCRYPT_DECRYPT may
-- have a primary. For other keys, this field will be omitted.
ckPrimary :: Lens' CryptoKey (Maybe CryptoKeyVersion)
ckPrimary
= lens _ckPrimary (\ s a -> s{_ckPrimary = a})
-- | Output only. The resource name for this CryptoKey in the format
-- \`projects\/*\/locations\/*\/keyRings\/*\/cryptoKeys\/*\`.
ckName :: Lens' CryptoKey (Maybe Text)
ckName = lens _ckName (\ s a -> s{_ckName = a})
-- | Labels with user-defined metadata. For more information, see [Labeling
-- Keys](https:\/\/cloud.google.com\/kms\/docs\/labeling-keys).
ckLabels :: Lens' CryptoKey (Maybe CryptoKeyLabels)
ckLabels = lens _ckLabels (\ s a -> s{_ckLabels = a})
-- | At next_rotation_time, the Key Management Service will automatically: 1.
-- Create a new version of this CryptoKey. 2. Mark the new version as
-- primary. Key rotations performed manually via CreateCryptoKeyVersion and
-- UpdateCryptoKeyPrimaryVersion do not affect next_rotation_time. Keys
-- with purpose ENCRYPT_DECRYPT support automatic rotation. For other keys,
-- this field must be omitted.
ckNextRotationTime :: Lens' CryptoKey (Maybe UTCTime)
ckNextRotationTime
= lens _ckNextRotationTime
(\ s a -> s{_ckNextRotationTime = a})
. mapping _DateTime
-- | Output only. The time at which this CryptoKey was created.
ckCreateTime :: Lens' CryptoKey (Maybe UTCTime)
ckCreateTime
= lens _ckCreateTime (\ s a -> s{_ckCreateTime = a})
. mapping _DateTime
instance FromJSON CryptoKey where
parseJSON
= withObject "CryptoKey"
(\ o ->
CryptoKey' <$>
(o .:? "versionTemplate") <*> (o .:? "purpose") <*>
(o .:? "rotationPeriod")
<*> (o .:? "primary")
<*> (o .:? "name")
<*> (o .:? "labels")
<*> (o .:? "nextRotationTime")
<*> (o .:? "createTime"))
instance ToJSON CryptoKey where
toJSON CryptoKey'{..}
= object
(catMaybes
[("versionTemplate" .=) <$> _ckVersionTemplate,
("purpose" .=) <$> _ckPurpose,
("rotationPeriod" .=) <$> _ckRotationPeriod,
("primary" .=) <$> _ckPrimary,
("name" .=) <$> _ckName, ("labels" .=) <$> _ckLabels,
("nextRotationTime" .=) <$> _ckNextRotationTime,
("createTime" .=) <$> _ckCreateTime])
-- | Request message for KeyManagementService.Decrypt.
--
-- /See:/ 'decryptRequest' smart constructor.
data DecryptRequest =
DecryptRequest'
{ _drAdditionalAuthenticatedData :: !(Maybe Bytes)
, _drAdditionalAuthenticatedDataCrc32c :: !(Maybe (Textual Int64))
, _drCiphertext :: !(Maybe Bytes)
, _drCiphertextCrc32c :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DecryptRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'drAdditionalAuthenticatedData'
--
-- * 'drAdditionalAuthenticatedDataCrc32c'
--
-- * 'drCiphertext'
--
-- * 'drCiphertextCrc32c'
decryptRequest
:: DecryptRequest
decryptRequest =
DecryptRequest'
{ _drAdditionalAuthenticatedData = Nothing
, _drAdditionalAuthenticatedDataCrc32c = Nothing
, _drCiphertext = Nothing
, _drCiphertextCrc32c = Nothing
}
-- | Optional. Optional data that must match the data originally supplied in
-- EncryptRequest.additional_authenticated_data.
drAdditionalAuthenticatedData :: Lens' DecryptRequest (Maybe ByteString)
drAdditionalAuthenticatedData
= lens _drAdditionalAuthenticatedData
(\ s a -> s{_drAdditionalAuthenticatedData = a})
. mapping _Bytes
-- | Optional. An optional CRC32C checksum of the
-- DecryptRequest.additional_authenticated_data. If specified,
-- KeyManagementService will verify the integrity of the received
-- DecryptRequest.additional_authenticated_data using this checksum.
-- KeyManagementService will report an error if the checksum verification
-- fails. If you receive a checksum error, your client should verify that
-- CRC32C(DecryptRequest.additional_authenticated_data) is equal to
-- DecryptRequest.additional_authenticated_data_crc32c, and if so, perform
-- a limited number of retries. A persistent mismatch may indicate an issue
-- in your computation of the CRC32C checksum. Note: This field is defined
-- as int64 for reasons of compatibility across different languages.
-- However, it is a non-negative integer, which will never exceed 2^32-1,
-- and can be safely downconverted to uint32 in languages that support this
-- type.
drAdditionalAuthenticatedDataCrc32c :: Lens' DecryptRequest (Maybe Int64)
drAdditionalAuthenticatedDataCrc32c
= lens _drAdditionalAuthenticatedDataCrc32c
(\ s a ->
s{_drAdditionalAuthenticatedDataCrc32c = a})
. mapping _Coerce
-- | Required. The encrypted data originally returned in
-- EncryptResponse.ciphertext.
drCiphertext :: Lens' DecryptRequest (Maybe ByteString)
drCiphertext
= lens _drCiphertext (\ s a -> s{_drCiphertext = a})
. mapping _Bytes
-- | Optional. An optional CRC32C checksum of the DecryptRequest.ciphertext.
-- If specified, KeyManagementService will verify the integrity of the
-- received DecryptRequest.ciphertext using this checksum.
-- KeyManagementService will report an error if the checksum verification
-- fails. If you receive a checksum error, your client should verify that
-- CRC32C(DecryptRequest.ciphertext) is equal to
-- DecryptRequest.ciphertext_crc32c, and if so, perform a limited number of
-- retries. A persistent mismatch may indicate an issue in your computation
-- of the CRC32C checksum. Note: This field is defined as int64 for reasons
-- of compatibility across different languages. However, it is a
-- non-negative integer, which will never exceed 2^32-1, and can be safely
-- downconverted to uint32 in languages that support this type.
drCiphertextCrc32c :: Lens' DecryptRequest (Maybe Int64)
drCiphertextCrc32c
= lens _drCiphertextCrc32c
(\ s a -> s{_drCiphertextCrc32c = a})
. mapping _Coerce
instance FromJSON DecryptRequest where
parseJSON
= withObject "DecryptRequest"
(\ o ->
DecryptRequest' <$>
(o .:? "additionalAuthenticatedData") <*>
(o .:? "additionalAuthenticatedDataCrc32c")
<*> (o .:? "ciphertext")
<*> (o .:? "ciphertextCrc32c"))
instance ToJSON DecryptRequest where
toJSON DecryptRequest'{..}
= object
(catMaybes
[("additionalAuthenticatedData" .=) <$>
_drAdditionalAuthenticatedData,
("additionalAuthenticatedDataCrc32c" .=) <$>
_drAdditionalAuthenticatedDataCrc32c,
("ciphertext" .=) <$> _drCiphertext,
("ciphertextCrc32c" .=) <$> _drCiphertextCrc32c])
-- | Contains an HSM-generated attestation about a key operation. For more
-- information, see [Verifying attestations]
-- (https:\/\/cloud.google.com\/kms\/docs\/attest-key).
--
-- /See:/ 'keyOperationAttestation' smart constructor.
data KeyOperationAttestation =
KeyOperationAttestation'
{ _koaFormat :: !(Maybe KeyOperationAttestationFormat)
, _koaContent :: !(Maybe Bytes)
, _koaCertChains :: !(Maybe CertificateChains)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'KeyOperationAttestation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'koaFormat'
--
-- * 'koaContent'
--
-- * 'koaCertChains'
keyOperationAttestation
:: KeyOperationAttestation
keyOperationAttestation =
KeyOperationAttestation'
{_koaFormat = Nothing, _koaContent = Nothing, _koaCertChains = Nothing}
-- | Output only. The format of the attestation data.
koaFormat :: Lens' KeyOperationAttestation (Maybe KeyOperationAttestationFormat)
koaFormat
= lens _koaFormat (\ s a -> s{_koaFormat = a})
-- | Output only. The attestation data provided by the HSM when the key
-- operation was performed.
koaContent :: Lens' KeyOperationAttestation (Maybe ByteString)
koaContent
= lens _koaContent (\ s a -> s{_koaContent = a}) .
mapping _Bytes
-- | Output only. The certificate chains needed to validate the attestation
koaCertChains :: Lens' KeyOperationAttestation (Maybe CertificateChains)
koaCertChains
= lens _koaCertChains
(\ s a -> s{_koaCertChains = a})
instance FromJSON KeyOperationAttestation where
parseJSON
= withObject "KeyOperationAttestation"
(\ o ->
KeyOperationAttestation' <$>
(o .:? "format") <*> (o .:? "content") <*>
(o .:? "certChains"))
instance ToJSON KeyOperationAttestation where
toJSON KeyOperationAttestation'{..}
= object
(catMaybes
[("format" .=) <$> _koaFormat,
("content" .=) <$> _koaContent,
("certChains" .=) <$> _koaCertChains])
-- | Response message for KeyManagementService.ListCryptoKeyVersions.
--
-- /See:/ 'listCryptoKeyVersionsResponse' smart constructor.
data ListCryptoKeyVersionsResponse =
ListCryptoKeyVersionsResponse'
{ _lckvrNextPageToken :: !(Maybe Text)
, _lckvrTotalSize :: !(Maybe (Textual Int32))
, _lckvrCryptoKeyVersions :: !(Maybe [CryptoKeyVersion])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListCryptoKeyVersionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lckvrNextPageToken'
--
-- * 'lckvrTotalSize'
--
-- * 'lckvrCryptoKeyVersions'
listCryptoKeyVersionsResponse
:: ListCryptoKeyVersionsResponse
listCryptoKeyVersionsResponse =
ListCryptoKeyVersionsResponse'
{ _lckvrNextPageToken = Nothing
, _lckvrTotalSize = Nothing
, _lckvrCryptoKeyVersions = Nothing
}
-- | A token to retrieve next page of results. Pass this value in
-- ListCryptoKeyVersionsRequest.page_token to retrieve the next page of
-- results.
lckvrNextPageToken :: Lens' ListCryptoKeyVersionsResponse (Maybe Text)
lckvrNextPageToken
= lens _lckvrNextPageToken
(\ s a -> s{_lckvrNextPageToken = a})
-- | The total number of CryptoKeyVersions that matched the query.
lckvrTotalSize :: Lens' ListCryptoKeyVersionsResponse (Maybe Int32)
lckvrTotalSize
= lens _lckvrTotalSize
(\ s a -> s{_lckvrTotalSize = a})
. mapping _Coerce
-- | The list of CryptoKeyVersions.
lckvrCryptoKeyVersions :: Lens' ListCryptoKeyVersionsResponse [CryptoKeyVersion]
lckvrCryptoKeyVersions
= lens _lckvrCryptoKeyVersions
(\ s a -> s{_lckvrCryptoKeyVersions = a})
. _Default
. _Coerce
instance FromJSON ListCryptoKeyVersionsResponse where
parseJSON
= withObject "ListCryptoKeyVersionsResponse"
(\ o ->
ListCryptoKeyVersionsResponse' <$>
(o .:? "nextPageToken") <*> (o .:? "totalSize") <*>
(o .:? "cryptoKeyVersions" .!= mempty))
instance ToJSON ListCryptoKeyVersionsResponse where
toJSON ListCryptoKeyVersionsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lckvrNextPageToken,
("totalSize" .=) <$> _lckvrTotalSize,
("cryptoKeyVersions" .=) <$>
_lckvrCryptoKeyVersions])
-- | Request message for KeyManagementService.RestoreCryptoKeyVersion.
--
-- /See:/ 'restoreCryptoKeyVersionRequest' smart constructor.
data RestoreCryptoKeyVersionRequest =
RestoreCryptoKeyVersionRequest'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RestoreCryptoKeyVersionRequest' with the minimum fields required to make a request.
--
restoreCryptoKeyVersionRequest
:: RestoreCryptoKeyVersionRequest
restoreCryptoKeyVersionRequest = RestoreCryptoKeyVersionRequest'
instance FromJSON RestoreCryptoKeyVersionRequest
where
parseJSON
= withObject "RestoreCryptoKeyVersionRequest"
(\ o -> pure RestoreCryptoKeyVersionRequest')
instance ToJSON RestoreCryptoKeyVersionRequest where
toJSON = const emptyObject
-- | Request message for KeyManagementService.UpdateCryptoKeyPrimaryVersion.
--
-- /See:/ 'updateCryptoKeyPrimaryVersionRequest' smart constructor.
newtype UpdateCryptoKeyPrimaryVersionRequest =
UpdateCryptoKeyPrimaryVersionRequest'
{ _uckpvrCryptoKeyVersionId :: Maybe Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UpdateCryptoKeyPrimaryVersionRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uckpvrCryptoKeyVersionId'
updateCryptoKeyPrimaryVersionRequest
:: UpdateCryptoKeyPrimaryVersionRequest
updateCryptoKeyPrimaryVersionRequest =
UpdateCryptoKeyPrimaryVersionRequest' {_uckpvrCryptoKeyVersionId = Nothing}
-- | Required. The id of the child CryptoKeyVersion to use as primary.
uckpvrCryptoKeyVersionId :: Lens' UpdateCryptoKeyPrimaryVersionRequest (Maybe Text)
uckpvrCryptoKeyVersionId
= lens _uckpvrCryptoKeyVersionId
(\ s a -> s{_uckpvrCryptoKeyVersionId = a})
instance FromJSON
UpdateCryptoKeyPrimaryVersionRequest
where
parseJSON
= withObject "UpdateCryptoKeyPrimaryVersionRequest"
(\ o ->
UpdateCryptoKeyPrimaryVersionRequest' <$>
(o .:? "cryptoKeyVersionId"))
instance ToJSON UpdateCryptoKeyPrimaryVersionRequest
where
toJSON UpdateCryptoKeyPrimaryVersionRequest'{..}
= object
(catMaybes
[("cryptoKeyVersionId" .=) <$>
_uckpvrCryptoKeyVersionId])
-- | An ImportJob can be used to create CryptoKeys and CryptoKeyVersions
-- using pre-existing key material, generated outside of Cloud KMS. When an
-- ImportJob is created, Cloud KMS will generate a \"wrapping key\", which
-- is a public\/private key pair. You use the wrapping key to encrypt (also
-- known as wrap) the pre-existing key material to protect it during the
-- import process. The nature of the wrapping key depends on the choice of
-- import_method. When the wrapping key generation is complete, the state
-- will be set to ACTIVE and the public_key can be fetched. The fetched
-- public key can then be used to wrap your pre-existing key material. Once
-- the key material is wrapped, it can be imported into a new
-- CryptoKeyVersion in an existing CryptoKey by calling
-- ImportCryptoKeyVersion. Multiple CryptoKeyVersions can be imported with
-- a single ImportJob. Cloud KMS uses the private key portion of the
-- wrapping key to unwrap the key material. Only Cloud KMS has access to
-- the private key. An ImportJob expires 3 days after it is created. Once
-- expired, Cloud KMS will no longer be able to import or unwrap any key
-- material that was wrapped with the ImportJob\'s public key. For more
-- information, see [Importing a
-- key](https:\/\/cloud.google.com\/kms\/docs\/importing-a-key).
--
-- /See:/ 'importJob' smart constructor.
data ImportJob =
ImportJob'
{ _ijState :: !(Maybe ImportJobState)
, _ijImportMethod :: !(Maybe ImportJobImportMethod)
, _ijAttestation :: !(Maybe KeyOperationAttestation)
, _ijPublicKey :: !(Maybe WrAppingPublicKey)
, _ijGenerateTime :: !(Maybe DateTime')
, _ijName :: !(Maybe Text)
, _ijExpireEventTime :: !(Maybe DateTime')
, _ijProtectionLevel :: !(Maybe ImportJobProtectionLevel)
, _ijExpireTime :: !(Maybe DateTime')
, _ijCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ImportJob' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ijState'
--
-- * 'ijImportMethod'
--
-- * 'ijAttestation'
--
-- * 'ijPublicKey'
--
-- * 'ijGenerateTime'
--
-- * 'ijName'
--
-- * 'ijExpireEventTime'
--
-- * 'ijProtectionLevel'
--
-- * 'ijExpireTime'
--
-- * 'ijCreateTime'
importJob
:: ImportJob
importJob =
ImportJob'
{ _ijState = Nothing
, _ijImportMethod = Nothing
, _ijAttestation = Nothing
, _ijPublicKey = Nothing
, _ijGenerateTime = Nothing
, _ijName = Nothing
, _ijExpireEventTime = Nothing
, _ijProtectionLevel = Nothing
, _ijExpireTime = Nothing
, _ijCreateTime = Nothing
}
-- | Output only. The current state of the ImportJob, indicating if it can be
-- used.
ijState :: Lens' ImportJob (Maybe ImportJobState)
ijState = lens _ijState (\ s a -> s{_ijState = a})
-- | Required. Immutable. The wrapping method to be used for incoming key
-- material.
ijImportMethod :: Lens' ImportJob (Maybe ImportJobImportMethod)
ijImportMethod
= lens _ijImportMethod
(\ s a -> s{_ijImportMethod = a})
-- | Output only. Statement that was generated and signed by the key creator
-- (for example, an HSM) at key creation time. Use this statement to verify
-- attributes of the key as stored on the HSM, independently of Google.
-- Only present if the chosen ImportMethod is one with a protection level
-- of HSM.
ijAttestation :: Lens' ImportJob (Maybe KeyOperationAttestation)
ijAttestation
= lens _ijAttestation
(\ s a -> s{_ijAttestation = a})
-- | Output only. The public key with which to wrap key material prior to
-- import. Only returned if state is ACTIVE.
ijPublicKey :: Lens' ImportJob (Maybe WrAppingPublicKey)
ijPublicKey
= lens _ijPublicKey (\ s a -> s{_ijPublicKey = a})
-- | Output only. The time this ImportJob\'s key material was generated.
ijGenerateTime :: Lens' ImportJob (Maybe UTCTime)
ijGenerateTime
= lens _ijGenerateTime
(\ s a -> s{_ijGenerateTime = a})
. mapping _DateTime
-- | Output only. The resource name for this ImportJob in the format
-- \`projects\/*\/locations\/*\/keyRings\/*\/importJobs\/*\`.
ijName :: Lens' ImportJob (Maybe Text)
ijName = lens _ijName (\ s a -> s{_ijName = a})
-- | Output only. The time this ImportJob expired. Only present if state is
-- EXPIRED.
ijExpireEventTime :: Lens' ImportJob (Maybe UTCTime)
ijExpireEventTime
= lens _ijExpireEventTime
(\ s a -> s{_ijExpireEventTime = a})
. mapping _DateTime
-- | Required. Immutable. The protection level of the ImportJob. This must
-- match the protection_level of the version_template on the CryptoKey you
-- attempt to import into.
ijProtectionLevel :: Lens' ImportJob (Maybe ImportJobProtectionLevel)
ijProtectionLevel
= lens _ijProtectionLevel
(\ s a -> s{_ijProtectionLevel = a})
-- | Output only. The time at which this ImportJob is scheduled for
-- expiration and can no longer be used to import key material.
ijExpireTime :: Lens' ImportJob (Maybe UTCTime)
ijExpireTime
= lens _ijExpireTime (\ s a -> s{_ijExpireTime = a})
. mapping _DateTime
-- | Output only. The time at which this ImportJob was created.
ijCreateTime :: Lens' ImportJob (Maybe UTCTime)
ijCreateTime
= lens _ijCreateTime (\ s a -> s{_ijCreateTime = a})
. mapping _DateTime
instance FromJSON ImportJob where
parseJSON
= withObject "ImportJob"
(\ o ->
ImportJob' <$>
(o .:? "state") <*> (o .:? "importMethod") <*>
(o .:? "attestation")
<*> (o .:? "publicKey")
<*> (o .:? "generateTime")
<*> (o .:? "name")
<*> (o .:? "expireEventTime")
<*> (o .:? "protectionLevel")
<*> (o .:? "expireTime")
<*> (o .:? "createTime"))
instance ToJSON ImportJob where
toJSON ImportJob'{..}
= object
(catMaybes
[("state" .=) <$> _ijState,
("importMethod" .=) <$> _ijImportMethod,
("attestation" .=) <$> _ijAttestation,
("publicKey" .=) <$> _ijPublicKey,
("generateTime" .=) <$> _ijGenerateTime,
("name" .=) <$> _ijName,
("expireEventTime" .=) <$> _ijExpireEventTime,
("protectionLevel" .=) <$> _ijProtectionLevel,
("expireTime" .=) <$> _ijExpireTime,
("createTime" .=) <$> _ijCreateTime])
-- | Request message for KeyManagementService.ImportCryptoKeyVersion.
--
-- /See:/ 'importCryptoKeyVersionRequest' smart constructor.
data ImportCryptoKeyVersionRequest =
ImportCryptoKeyVersionRequest'
{ _ickvrRsaAESWrAppedKey :: !(Maybe Bytes)
, _ickvrAlgorithm :: !(Maybe ImportCryptoKeyVersionRequestAlgorithm)
, _ickvrImportJob :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ImportCryptoKeyVersionRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ickvrRsaAESWrAppedKey'
--
-- * 'ickvrAlgorithm'
--
-- * 'ickvrImportJob'
importCryptoKeyVersionRequest
:: ImportCryptoKeyVersionRequest
importCryptoKeyVersionRequest =
ImportCryptoKeyVersionRequest'
{ _ickvrRsaAESWrAppedKey = Nothing
, _ickvrAlgorithm = Nothing
, _ickvrImportJob = Nothing
}
-- | Wrapped key material produced with RSA_OAEP_3072_SHA1_AES_256 or
-- RSA_OAEP_4096_SHA1_AES_256. This field contains the concatenation of two
-- wrapped keys: 1. An ephemeral AES-256 wrapping key wrapped with the
-- public_key using RSAES-OAEP with SHA-1, MGF1 with SHA-1, and an empty
-- label. 2. The key to be imported, wrapped with the ephemeral AES-256 key
-- using AES-KWP (RFC 5649). If importing symmetric key material, it is
-- expected that the unwrapped key contains plain bytes. If importing
-- asymmetric key material, it is expected that the unwrapped key is in
-- PKCS#8-encoded DER format (the PrivateKeyInfo structure from RFC 5208).
-- This format is the same as the format produced by PKCS#11 mechanism
-- CKM_RSA_AES_KEY_WRAP.
ickvrRsaAESWrAppedKey :: Lens' ImportCryptoKeyVersionRequest (Maybe ByteString)
ickvrRsaAESWrAppedKey
= lens _ickvrRsaAESWrAppedKey
(\ s a -> s{_ickvrRsaAESWrAppedKey = a})
. mapping _Bytes
-- | Required. The algorithm of the key being imported. This does not need to
-- match the version_template of the CryptoKey this version imports into.
ickvrAlgorithm :: Lens' ImportCryptoKeyVersionRequest (Maybe ImportCryptoKeyVersionRequestAlgorithm)
ickvrAlgorithm
= lens _ickvrAlgorithm
(\ s a -> s{_ickvrAlgorithm = a})
-- | Required. The name of the ImportJob that was used to wrap this key
-- material.
ickvrImportJob :: Lens' ImportCryptoKeyVersionRequest (Maybe Text)
ickvrImportJob
= lens _ickvrImportJob
(\ s a -> s{_ickvrImportJob = a})
instance FromJSON ImportCryptoKeyVersionRequest where
parseJSON
= withObject "ImportCryptoKeyVersionRequest"
(\ o ->
ImportCryptoKeyVersionRequest' <$>
(o .:? "rsaAesWrappedKey") <*> (o .:? "algorithm")
<*> (o .:? "importJob"))
instance ToJSON ImportCryptoKeyVersionRequest where
toJSON ImportCryptoKeyVersionRequest'{..}
= object
(catMaybes
[("rsaAesWrappedKey" .=) <$> _ickvrRsaAESWrAppedKey,
("algorithm" .=) <$> _ickvrAlgorithm,
("importJob" .=) <$> _ickvrImportJob])
-- | Request message for \`TestIamPermissions\` method.
--
-- /See:/ 'testIAMPermissionsRequest' smart constructor.
newtype TestIAMPermissionsRequest =
TestIAMPermissionsRequest'
{ _tiprPermissions :: Maybe [Text]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TestIAMPermissionsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiprPermissions'
testIAMPermissionsRequest
:: TestIAMPermissionsRequest
testIAMPermissionsRequest =
TestIAMPermissionsRequest' {_tiprPermissions = Nothing}
-- | The set of permissions to check for the \`resource\`. Permissions with
-- wildcards (such as \'*\' or \'storage.*\') are not allowed. For more
-- information see [IAM
-- Overview](https:\/\/cloud.google.com\/iam\/docs\/overview#permissions).
tiprPermissions :: Lens' TestIAMPermissionsRequest [Text]
tiprPermissions
= lens _tiprPermissions
(\ s a -> s{_tiprPermissions = a})
. _Default
. _Coerce
instance FromJSON TestIAMPermissionsRequest where
parseJSON
= withObject "TestIAMPermissionsRequest"
(\ o ->
TestIAMPermissionsRequest' <$>
(o .:? "permissions" .!= mempty))
instance ToJSON TestIAMPermissionsRequest where
toJSON TestIAMPermissionsRequest'{..}
= object
(catMaybes [("permissions" .=) <$> _tiprPermissions])
-- | ExternalProtectionLevelOptions stores a group of additional fields for
-- configuring a CryptoKeyVersion that are specific to the EXTERNAL
-- protection level.
--
-- /See:/ 'externalProtectionLevelOptions' smart constructor.
newtype ExternalProtectionLevelOptions =
ExternalProtectionLevelOptions'
{ _eploExternalKeyURI :: Maybe Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ExternalProtectionLevelOptions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eploExternalKeyURI'
externalProtectionLevelOptions
:: ExternalProtectionLevelOptions
externalProtectionLevelOptions =
ExternalProtectionLevelOptions' {_eploExternalKeyURI = Nothing}
-- | The URI for an external resource that this CryptoKeyVersion represents.
eploExternalKeyURI :: Lens' ExternalProtectionLevelOptions (Maybe Text)
eploExternalKeyURI
= lens _eploExternalKeyURI
(\ s a -> s{_eploExternalKeyURI = a})
instance FromJSON ExternalProtectionLevelOptions
where
parseJSON
= withObject "ExternalProtectionLevelOptions"
(\ o ->
ExternalProtectionLevelOptions' <$>
(o .:? "externalKeyUri"))
instance ToJSON ExternalProtectionLevelOptions where
toJSON ExternalProtectionLevelOptions'{..}
= object
(catMaybes
[("externalKeyUri" .=) <$> _eploExternalKeyURI])
-- | Response message for \`TestIamPermissions\` method.
--
-- /See:/ 'testIAMPermissionsResponse' smart constructor.
newtype TestIAMPermissionsResponse =
TestIAMPermissionsResponse'
{ _tiamprPermissions :: Maybe [Text]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TestIAMPermissionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiamprPermissions'
testIAMPermissionsResponse
:: TestIAMPermissionsResponse
testIAMPermissionsResponse =
TestIAMPermissionsResponse' {_tiamprPermissions = Nothing}
-- | A subset of \`TestPermissionsRequest.permissions\` that the caller is
-- allowed.
tiamprPermissions :: Lens' TestIAMPermissionsResponse [Text]
tiamprPermissions
= lens _tiamprPermissions
(\ s a -> s{_tiamprPermissions = a})
. _Default
. _Coerce
instance FromJSON TestIAMPermissionsResponse where
parseJSON
= withObject "TestIAMPermissionsResponse"
(\ o ->
TestIAMPermissionsResponse' <$>
(o .:? "permissions" .!= mempty))
instance ToJSON TestIAMPermissionsResponse where
toJSON TestIAMPermissionsResponse'{..}
= object
(catMaybes
[("permissions" .=) <$> _tiamprPermissions])
-- | A Digest holds a cryptographic message digest.
--
-- /See:/ 'digest' smart constructor.
data Digest =
Digest'
{ _dSha512 :: !(Maybe Bytes)
, _dSha384 :: !(Maybe Bytes)
, _dSha256 :: !(Maybe Bytes)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Digest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dSha512'
--
-- * 'dSha384'
--
-- * 'dSha256'
digest
:: Digest
digest = Digest' {_dSha512 = Nothing, _dSha384 = Nothing, _dSha256 = Nothing}
-- | A message digest produced with the SHA-512 algorithm.
dSha512 :: Lens' Digest (Maybe ByteString)
dSha512
= lens _dSha512 (\ s a -> s{_dSha512 = a}) .
mapping _Bytes
-- | A message digest produced with the SHA-384 algorithm.
dSha384 :: Lens' Digest (Maybe ByteString)
dSha384
= lens _dSha384 (\ s a -> s{_dSha384 = a}) .
mapping _Bytes
-- | A message digest produced with the SHA-256 algorithm.
dSha256 :: Lens' Digest (Maybe ByteString)
dSha256
= lens _dSha256 (\ s a -> s{_dSha256 = a}) .
mapping _Bytes
instance FromJSON Digest where
parseJSON
= withObject "Digest"
(\ o ->
Digest' <$>
(o .:? "sha512") <*> (o .:? "sha384") <*>
(o .:? "sha256"))
instance ToJSON Digest where
toJSON Digest'{..}
= object
(catMaybes
[("sha512" .=) <$> _dSha512,
("sha384" .=) <$> _dSha384,
("sha256" .=) <$> _dSha256])
-- | An Identity and Access Management (IAM) policy, which specifies access
-- controls for Google Cloud resources. A \`Policy\` is a collection of
-- \`bindings\`. A \`binding\` binds one or more \`members\` to a single
-- \`role\`. Members can be user accounts, service accounts, Google groups,
-- and domains (such as G Suite). A \`role\` is a named list of
-- permissions; each \`role\` can be an IAM predefined role or a
-- user-created custom role. For some types of Google Cloud resources, a
-- \`binding\` can also specify a \`condition\`, which is a logical
-- expression that allows access to a resource only if the expression
-- evaluates to \`true\`. A condition can add constraints based on
-- attributes of the request, the resource, or both. To learn which
-- resources support conditions in their IAM policies, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
-- **JSON example:** { \"bindings\": [ { \"role\":
-- \"roles\/resourcemanager.organizationAdmin\", \"members\": [
-- \"user:mike\'example.com\", \"group:admins\'example.com\",
-- \"domain:google.com\",
-- \"serviceAccount:my-project-id\'appspot.gserviceaccount.com\" ] }, {
-- \"role\": \"roles\/resourcemanager.organizationViewer\", \"members\": [
-- \"user:eve\'example.com\" ], \"condition\": { \"title\": \"expirable
-- access\", \"description\": \"Does not grant access after Sep 2020\",
-- \"expression\": \"request.time \<
-- timestamp(\'2020-10-01T00:00:00.000Z\')\", } } ], \"etag\":
-- \"BwWWja0YfJA=\", \"version\": 3 } **YAML example:** bindings: -
-- members: - user:mike\'example.com - group:admins\'example.com -
-- domain:google.com -
-- serviceAccount:my-project-id\'appspot.gserviceaccount.com role:
-- roles\/resourcemanager.organizationAdmin - members: -
-- user:eve\'example.com role: roles\/resourcemanager.organizationViewer
-- condition: title: expirable access description: Does not grant access
-- after Sep 2020 expression: request.time \<
-- timestamp(\'2020-10-01T00:00:00.000Z\') - etag: BwWWja0YfJA= - version:
-- 3 For a description of IAM and its features, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/docs\/).
--
-- /See:/ 'policy' smart constructor.
data Policy =
Policy'
{ _pAuditConfigs :: !(Maybe [AuditConfig])
, _pEtag :: !(Maybe Bytes)
, _pVersion :: !(Maybe (Textual Int32))
, _pBindings :: !(Maybe [Binding])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Policy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pAuditConfigs'
--
-- * 'pEtag'
--
-- * 'pVersion'
--
-- * 'pBindings'
policy
:: Policy
policy =
Policy'
{ _pAuditConfigs = Nothing
, _pEtag = Nothing
, _pVersion = Nothing
, _pBindings = Nothing
}
-- | Specifies cloud audit logging configuration for this policy.
pAuditConfigs :: Lens' Policy [AuditConfig]
pAuditConfigs
= lens _pAuditConfigs
(\ s a -> s{_pAuditConfigs = a})
. _Default
. _Coerce
-- | \`etag\` is used for optimistic concurrency control as a way to help
-- prevent simultaneous updates of a policy from overwriting each other. It
-- is strongly suggested that systems make use of the \`etag\` in the
-- read-modify-write cycle to perform policy updates in order to avoid race
-- conditions: An \`etag\` is returned in the response to \`getIamPolicy\`,
-- and systems are expected to put that etag in the request to
-- \`setIamPolicy\` to ensure that their change will be applied to the same
-- version of the policy. **Important:** If you use IAM Conditions, you
-- must include the \`etag\` field whenever you call \`setIamPolicy\`. If
-- you omit this field, then IAM allows you to overwrite a version \`3\`
-- policy with a version \`1\` policy, and all of the conditions in the
-- version \`3\` policy are lost.
pEtag :: Lens' Policy (Maybe ByteString)
pEtag
= lens _pEtag (\ s a -> s{_pEtag = a}) .
mapping _Bytes
-- | Specifies the format of the policy. Valid values are \`0\`, \`1\`, and
-- \`3\`. Requests that specify an invalid value are rejected. Any
-- operation that affects conditional role bindings must specify version
-- \`3\`. This requirement applies to the following operations: * Getting a
-- policy that includes a conditional role binding * Adding a conditional
-- role binding to a policy * Changing a conditional role binding in a
-- policy * Removing any role binding, with or without a condition, from a
-- policy that includes conditions **Important:** If you use IAM
-- Conditions, you must include the \`etag\` field whenever you call
-- \`setIamPolicy\`. If you omit this field, then IAM allows you to
-- overwrite a version \`3\` policy with a version \`1\` policy, and all of
-- the conditions in the version \`3\` policy are lost. If a policy does
-- not include any conditions, operations on that policy may specify any
-- valid version or leave the field unset. To learn which resources support
-- conditions in their IAM policies, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
pVersion :: Lens' Policy (Maybe Int32)
pVersion
= lens _pVersion (\ s a -> s{_pVersion = a}) .
mapping _Coerce
-- | Associates a list of \`members\` to a \`role\`. Optionally, may specify
-- a \`condition\` that determines how and when the \`bindings\` are
-- applied. Each of the \`bindings\` must contain at least one member.
pBindings :: Lens' Policy [Binding]
pBindings
= lens _pBindings (\ s a -> s{_pBindings = a}) .
_Default
. _Coerce
instance FromJSON Policy where
parseJSON
= withObject "Policy"
(\ o ->
Policy' <$>
(o .:? "auditConfigs" .!= mempty) <*> (o .:? "etag")
<*> (o .:? "version")
<*> (o .:? "bindings" .!= mempty))
instance ToJSON Policy where
toJSON Policy'{..}
= object
(catMaybes
[("auditConfigs" .=) <$> _pAuditConfigs,
("etag" .=) <$> _pEtag, ("version" .=) <$> _pVersion,
("bindings" .=) <$> _pBindings])
-- | Cross-service attributes for the location. For example
-- {\"cloud.googleapis.com\/region\": \"us-east1\"}
--
-- /See:/ 'locationLabels' smart constructor.
newtype LocationLabels =
LocationLabels'
{ _llAddtional :: HashMap Text Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LocationLabels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llAddtional'
locationLabels
:: HashMap Text Text -- ^ 'llAddtional'
-> LocationLabels
locationLabels pLlAddtional_ =
LocationLabels' {_llAddtional = _Coerce # pLlAddtional_}
llAddtional :: Lens' LocationLabels (HashMap Text Text)
llAddtional
= lens _llAddtional (\ s a -> s{_llAddtional = a}) .
_Coerce
instance FromJSON LocationLabels where
parseJSON
= withObject "LocationLabels"
(\ o -> LocationLabels' <$> (parseJSONObject o))
instance ToJSON LocationLabels where
toJSON = toJSON . _llAddtional
-- | Cloud KMS metadata for the given google.cloud.location.Location.
--
-- /See:/ 'locationMetadata' smart constructor.
data LocationMetadata =
LocationMetadata'
{ _lmHSMAvailable :: !(Maybe Bool)
, _lmEkmAvailable :: !(Maybe Bool)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LocationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lmHSMAvailable'
--
-- * 'lmEkmAvailable'
locationMetadata
:: LocationMetadata
locationMetadata =
LocationMetadata' {_lmHSMAvailable = Nothing, _lmEkmAvailable = Nothing}
-- | Indicates whether CryptoKeys with protection_level HSM can be created in
-- this location.
lmHSMAvailable :: Lens' LocationMetadata (Maybe Bool)
lmHSMAvailable
= lens _lmHSMAvailable
(\ s a -> s{_lmHSMAvailable = a})
-- | Indicates whether CryptoKeys with protection_level EXTERNAL can be
-- created in this location.
lmEkmAvailable :: Lens' LocationMetadata (Maybe Bool)
lmEkmAvailable
= lens _lmEkmAvailable
(\ s a -> s{_lmEkmAvailable = a})
instance FromJSON LocationMetadata where
parseJSON
= withObject "LocationMetadata"
(\ o ->
LocationMetadata' <$>
(o .:? "hsmAvailable") <*> (o .:? "ekmAvailable"))
instance ToJSON LocationMetadata where
toJSON LocationMetadata'{..}
= object
(catMaybes
[("hsmAvailable" .=) <$> _lmHSMAvailable,
("ekmAvailable" .=) <$> _lmEkmAvailable])
-- | Provides the configuration for logging a type of permissions. Example: {
-- \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\",
-- \"exempted_members\": [ \"user:jose\'example.com\" ] }, { \"log_type\":
-- \"DATA_WRITE\" } ] } This enables \'DATA_READ\' and \'DATA_WRITE\'
-- logging, while exempting jose\'example.com from DATA_READ logging.
--
-- /See:/ 'auditLogConfig' smart constructor.
data AuditLogConfig =
AuditLogConfig'
{ _alcLogType :: !(Maybe AuditLogConfigLogType)
, _alcExemptedMembers :: !(Maybe [Text])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AuditLogConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alcLogType'
--
-- * 'alcExemptedMembers'
auditLogConfig
:: AuditLogConfig
auditLogConfig =
AuditLogConfig' {_alcLogType = Nothing, _alcExemptedMembers = Nothing}
-- | The log type that this config enables.
alcLogType :: Lens' AuditLogConfig (Maybe AuditLogConfigLogType)
alcLogType
= lens _alcLogType (\ s a -> s{_alcLogType = a})
-- | Specifies the identities that do not cause logging for this type of
-- permission. Follows the same format of Binding.members.
alcExemptedMembers :: Lens' AuditLogConfig [Text]
alcExemptedMembers
= lens _alcExemptedMembers
(\ s a -> s{_alcExemptedMembers = a})
. _Default
. _Coerce
instance FromJSON AuditLogConfig where
parseJSON
= withObject "AuditLogConfig"
(\ o ->
AuditLogConfig' <$>
(o .:? "logType") <*>
(o .:? "exemptedMembers" .!= mempty))
instance ToJSON AuditLogConfig where
toJSON AuditLogConfig'{..}
= object
(catMaybes
[("logType" .=) <$> _alcLogType,
("exemptedMembers" .=) <$> _alcExemptedMembers])
-- | Response message for KeyManagementService.ListCryptoKeys.
--
-- /See:/ 'listCryptoKeysResponse' smart constructor.
data ListCryptoKeysResponse =
ListCryptoKeysResponse'
{ _lckrCryptoKeys :: !(Maybe [CryptoKey])
, _lckrNextPageToken :: !(Maybe Text)
, _lckrTotalSize :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListCryptoKeysResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lckrCryptoKeys'
--
-- * 'lckrNextPageToken'
--
-- * 'lckrTotalSize'
listCryptoKeysResponse
:: ListCryptoKeysResponse
listCryptoKeysResponse =
ListCryptoKeysResponse'
{ _lckrCryptoKeys = Nothing
, _lckrNextPageToken = Nothing
, _lckrTotalSize = Nothing
}
-- | The list of CryptoKeys.
lckrCryptoKeys :: Lens' ListCryptoKeysResponse [CryptoKey]
lckrCryptoKeys
= lens _lckrCryptoKeys
(\ s a -> s{_lckrCryptoKeys = a})
. _Default
. _Coerce
-- | A token to retrieve next page of results. Pass this value in
-- ListCryptoKeysRequest.page_token to retrieve the next page of results.
lckrNextPageToken :: Lens' ListCryptoKeysResponse (Maybe Text)
lckrNextPageToken
= lens _lckrNextPageToken
(\ s a -> s{_lckrNextPageToken = a})
-- | The total number of CryptoKeys that matched the query.
lckrTotalSize :: Lens' ListCryptoKeysResponse (Maybe Int32)
lckrTotalSize
= lens _lckrTotalSize
(\ s a -> s{_lckrTotalSize = a})
. mapping _Coerce
instance FromJSON ListCryptoKeysResponse where
parseJSON
= withObject "ListCryptoKeysResponse"
(\ o ->
ListCryptoKeysResponse' <$>
(o .:? "cryptoKeys" .!= mempty) <*>
(o .:? "nextPageToken")
<*> (o .:? "totalSize"))
instance ToJSON ListCryptoKeysResponse where
toJSON ListCryptoKeysResponse'{..}
= object
(catMaybes
[("cryptoKeys" .=) <$> _lckrCryptoKeys,
("nextPageToken" .=) <$> _lckrNextPageToken,
("totalSize" .=) <$> _lckrTotalSize])
-- | Request message for KeyManagementService.AsymmetricDecrypt.
--
-- /See:/ 'asymmetricDecryptRequest' smart constructor.
data AsymmetricDecryptRequest =
AsymmetricDecryptRequest'
{ _adrCiphertext :: !(Maybe Bytes)
, _adrCiphertextCrc32c :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AsymmetricDecryptRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'adrCiphertext'
--
-- * 'adrCiphertextCrc32c'
asymmetricDecryptRequest
:: AsymmetricDecryptRequest
asymmetricDecryptRequest =
AsymmetricDecryptRequest'
{_adrCiphertext = Nothing, _adrCiphertextCrc32c = Nothing}
-- | Required. The data encrypted with the named CryptoKeyVersion\'s public
-- key using OAEP.
adrCiphertext :: Lens' AsymmetricDecryptRequest (Maybe ByteString)
adrCiphertext
= lens _adrCiphertext
(\ s a -> s{_adrCiphertext = a})
. mapping _Bytes
-- | Optional. An optional CRC32C checksum of the
-- AsymmetricDecryptRequest.ciphertext. If specified, KeyManagementService
-- will verify the integrity of the received
-- AsymmetricDecryptRequest.ciphertext using this checksum.
-- KeyManagementService will report an error if the checksum verification
-- fails. If you receive a checksum error, your client should verify that
-- CRC32C(AsymmetricDecryptRequest.ciphertext) is equal to
-- AsymmetricDecryptRequest.ciphertext_crc32c, and if so, perform a limited
-- number of retries. A persistent mismatch may indicate an issue in your
-- computation of the CRC32C checksum. Note: This field is defined as int64
-- for reasons of compatibility across different languages. However, it is
-- a non-negative integer, which will never exceed 2^32-1, and can be
-- safely downconverted to uint32 in languages that support this type.
adrCiphertextCrc32c :: Lens' AsymmetricDecryptRequest (Maybe Int64)
adrCiphertextCrc32c
= lens _adrCiphertextCrc32c
(\ s a -> s{_adrCiphertextCrc32c = a})
. mapping _Coerce
instance FromJSON AsymmetricDecryptRequest where
parseJSON
= withObject "AsymmetricDecryptRequest"
(\ o ->
AsymmetricDecryptRequest' <$>
(o .:? "ciphertext") <*> (o .:? "ciphertextCrc32c"))
instance ToJSON AsymmetricDecryptRequest where
toJSON AsymmetricDecryptRequest'{..}
= object
(catMaybes
[("ciphertext" .=) <$> _adrCiphertext,
("ciphertextCrc32c" .=) <$> _adrCiphertextCrc32c])
-- | A CryptoKeyVersion represents an individual cryptographic key, and the
-- associated key material. An ENABLED version can be used for
-- cryptographic operations. For security reasons, the raw cryptographic
-- key material represented by a CryptoKeyVersion can never be viewed or
-- exported. It can only be used to encrypt, decrypt, or sign data when an
-- authorized user or application invokes Cloud KMS.
--
-- /See:/ 'cryptoKeyVersion' smart constructor.
data CryptoKeyVersion =
CryptoKeyVersion'
{ _ckvState :: !(Maybe CryptoKeyVersionState)
, _ckvAttestation :: !(Maybe KeyOperationAttestation)
, _ckvGenerateTime :: !(Maybe DateTime')
, _ckvImportFailureReason :: !(Maybe Text)
, _ckvName :: !(Maybe Text)
, _ckvAlgorithm :: !(Maybe CryptoKeyVersionAlgorithm)
, _ckvDestroyTime :: !(Maybe DateTime')
, _ckvImportJob :: !(Maybe Text)
, _ckvProtectionLevel :: !(Maybe CryptoKeyVersionProtectionLevel)
, _ckvImportTime :: !(Maybe DateTime')
, _ckvExternalProtectionLevelOptions :: !(Maybe ExternalProtectionLevelOptions)
, _ckvDestroyEventTime :: !(Maybe DateTime')
, _ckvCreateTime :: !(Maybe DateTime')
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CryptoKeyVersion' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ckvState'
--
-- * 'ckvAttestation'
--
-- * 'ckvGenerateTime'
--
-- * 'ckvImportFailureReason'
--
-- * 'ckvName'
--
-- * 'ckvAlgorithm'
--
-- * 'ckvDestroyTime'
--
-- * 'ckvImportJob'
--
-- * 'ckvProtectionLevel'
--
-- * 'ckvImportTime'
--
-- * 'ckvExternalProtectionLevelOptions'
--
-- * 'ckvDestroyEventTime'
--
-- * 'ckvCreateTime'
cryptoKeyVersion
:: CryptoKeyVersion
cryptoKeyVersion =
CryptoKeyVersion'
{ _ckvState = Nothing
, _ckvAttestation = Nothing
, _ckvGenerateTime = Nothing
, _ckvImportFailureReason = Nothing
, _ckvName = Nothing
, _ckvAlgorithm = Nothing
, _ckvDestroyTime = Nothing
, _ckvImportJob = Nothing
, _ckvProtectionLevel = Nothing
, _ckvImportTime = Nothing
, _ckvExternalProtectionLevelOptions = Nothing
, _ckvDestroyEventTime = Nothing
, _ckvCreateTime = Nothing
}
-- | The current state of the CryptoKeyVersion.
ckvState :: Lens' CryptoKeyVersion (Maybe CryptoKeyVersionState)
ckvState = lens _ckvState (\ s a -> s{_ckvState = a})
-- | Output only. Statement that was generated and signed by the HSM at key
-- creation time. Use this statement to verify attributes of the key as
-- stored on the HSM, independently of Google. Only provided for key
-- versions with protection_level HSM.
ckvAttestation :: Lens' CryptoKeyVersion (Maybe KeyOperationAttestation)
ckvAttestation
= lens _ckvAttestation
(\ s a -> s{_ckvAttestation = a})
-- | Output only. The time this CryptoKeyVersion\'s key material was
-- generated.
ckvGenerateTime :: Lens' CryptoKeyVersion (Maybe UTCTime)
ckvGenerateTime
= lens _ckvGenerateTime
(\ s a -> s{_ckvGenerateTime = a})
. mapping _DateTime
-- | Output only. The root cause of an import failure. Only present if state
-- is IMPORT_FAILED.
ckvImportFailureReason :: Lens' CryptoKeyVersion (Maybe Text)
ckvImportFailureReason
= lens _ckvImportFailureReason
(\ s a -> s{_ckvImportFailureReason = a})
-- | Output only. The resource name for this CryptoKeyVersion in the format
-- \`projects\/*\/locations\/*\/keyRings\/*\/cryptoKeys\/*\/cryptoKeyVersions\/*\`.
ckvName :: Lens' CryptoKeyVersion (Maybe Text)
ckvName = lens _ckvName (\ s a -> s{_ckvName = a})
-- | Output only. The CryptoKeyVersionAlgorithm that this CryptoKeyVersion
-- supports.
ckvAlgorithm :: Lens' CryptoKeyVersion (Maybe CryptoKeyVersionAlgorithm)
ckvAlgorithm
= lens _ckvAlgorithm (\ s a -> s{_ckvAlgorithm = a})
-- | Output only. The time this CryptoKeyVersion\'s key material is scheduled
-- for destruction. Only present if state is DESTROY_SCHEDULED.
ckvDestroyTime :: Lens' CryptoKeyVersion (Maybe UTCTime)
ckvDestroyTime
= lens _ckvDestroyTime
(\ s a -> s{_ckvDestroyTime = a})
. mapping _DateTime
-- | Output only. The name of the ImportJob used to import this
-- CryptoKeyVersion. Only present if the underlying key material was
-- imported.
ckvImportJob :: Lens' CryptoKeyVersion (Maybe Text)
ckvImportJob
= lens _ckvImportJob (\ s a -> s{_ckvImportJob = a})
-- | Output only. The ProtectionLevel describing how crypto operations are
-- performed with this CryptoKeyVersion.
ckvProtectionLevel :: Lens' CryptoKeyVersion (Maybe CryptoKeyVersionProtectionLevel)
ckvProtectionLevel
= lens _ckvProtectionLevel
(\ s a -> s{_ckvProtectionLevel = a})
-- | Output only. The time at which this CryptoKeyVersion\'s key material was
-- imported.
ckvImportTime :: Lens' CryptoKeyVersion (Maybe UTCTime)
ckvImportTime
= lens _ckvImportTime
(\ s a -> s{_ckvImportTime = a})
. mapping _DateTime
-- | ExternalProtectionLevelOptions stores a group of additional fields for
-- configuring a CryptoKeyVersion that are specific to the EXTERNAL
-- protection level.
ckvExternalProtectionLevelOptions :: Lens' CryptoKeyVersion (Maybe ExternalProtectionLevelOptions)
ckvExternalProtectionLevelOptions
= lens _ckvExternalProtectionLevelOptions
(\ s a -> s{_ckvExternalProtectionLevelOptions = a})
-- | Output only. The time this CryptoKeyVersion\'s key material was
-- destroyed. Only present if state is DESTROYED.
ckvDestroyEventTime :: Lens' CryptoKeyVersion (Maybe UTCTime)
ckvDestroyEventTime
= lens _ckvDestroyEventTime
(\ s a -> s{_ckvDestroyEventTime = a})
. mapping _DateTime
-- | Output only. The time at which this CryptoKeyVersion was created.
ckvCreateTime :: Lens' CryptoKeyVersion (Maybe UTCTime)
ckvCreateTime
= lens _ckvCreateTime
(\ s a -> s{_ckvCreateTime = a})
. mapping _DateTime
instance FromJSON CryptoKeyVersion where
parseJSON
= withObject "CryptoKeyVersion"
(\ o ->
CryptoKeyVersion' <$>
(o .:? "state") <*> (o .:? "attestation") <*>
(o .:? "generateTime")
<*> (o .:? "importFailureReason")
<*> (o .:? "name")
<*> (o .:? "algorithm")
<*> (o .:? "destroyTime")
<*> (o .:? "importJob")
<*> (o .:? "protectionLevel")
<*> (o .:? "importTime")
<*> (o .:? "externalProtectionLevelOptions")
<*> (o .:? "destroyEventTime")
<*> (o .:? "createTime"))
instance ToJSON CryptoKeyVersion where
toJSON CryptoKeyVersion'{..}
= object
(catMaybes
[("state" .=) <$> _ckvState,
("attestation" .=) <$> _ckvAttestation,
("generateTime" .=) <$> _ckvGenerateTime,
("importFailureReason" .=) <$>
_ckvImportFailureReason,
("name" .=) <$> _ckvName,
("algorithm" .=) <$> _ckvAlgorithm,
("destroyTime" .=) <$> _ckvDestroyTime,
("importJob" .=) <$> _ckvImportJob,
("protectionLevel" .=) <$> _ckvProtectionLevel,
("importTime" .=) <$> _ckvImportTime,
("externalProtectionLevelOptions" .=) <$>
_ckvExternalProtectionLevelOptions,
("destroyEventTime" .=) <$> _ckvDestroyEventTime,
("createTime" .=) <$> _ckvCreateTime])
-- | Request message for KeyManagementService.Encrypt.
--
-- /See:/ 'encryptRequest' smart constructor.
data EncryptRequest =
EncryptRequest'
{ _erAdditionalAuthenticatedData :: !(Maybe Bytes)
, _erAdditionalAuthenticatedDataCrc32c :: !(Maybe (Textual Int64))
, _erPlaintextCrc32c :: !(Maybe (Textual Int64))
, _erPlaintext :: !(Maybe Bytes)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'EncryptRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'erAdditionalAuthenticatedData'
--
-- * 'erAdditionalAuthenticatedDataCrc32c'
--
-- * 'erPlaintextCrc32c'
--
-- * 'erPlaintext'
encryptRequest
:: EncryptRequest
encryptRequest =
EncryptRequest'
{ _erAdditionalAuthenticatedData = Nothing
, _erAdditionalAuthenticatedDataCrc32c = Nothing
, _erPlaintextCrc32c = Nothing
, _erPlaintext = Nothing
}
-- | Optional. Optional data that, if specified, must also be provided during
-- decryption through DecryptRequest.additional_authenticated_data. The
-- maximum size depends on the key version\'s protection_level. For
-- SOFTWARE keys, the AAD must be no larger than 64KiB. For HSM keys, the
-- combined length of the plaintext and additional_authenticated_data
-- fields must be no larger than 8KiB.
erAdditionalAuthenticatedData :: Lens' EncryptRequest (Maybe ByteString)
erAdditionalAuthenticatedData
= lens _erAdditionalAuthenticatedData
(\ s a -> s{_erAdditionalAuthenticatedData = a})
. mapping _Bytes
-- | Optional. An optional CRC32C checksum of the
-- EncryptRequest.additional_authenticated_data. If specified,
-- KeyManagementService will verify the integrity of the received
-- EncryptRequest.additional_authenticated_data using this checksum.
-- KeyManagementService will report an error if the checksum verification
-- fails. If you receive a checksum error, your client should verify that
-- CRC32C(EncryptRequest.additional_authenticated_data) is equal to
-- EncryptRequest.additional_authenticated_data_crc32c, and if so, perform
-- a limited number of retries. A persistent mismatch may indicate an issue
-- in your computation of the CRC32C checksum. Note: This field is defined
-- as int64 for reasons of compatibility across different languages.
-- However, it is a non-negative integer, which will never exceed 2^32-1,
-- and can be safely downconverted to uint32 in languages that support this
-- type.
erAdditionalAuthenticatedDataCrc32c :: Lens' EncryptRequest (Maybe Int64)
erAdditionalAuthenticatedDataCrc32c
= lens _erAdditionalAuthenticatedDataCrc32c
(\ s a ->
s{_erAdditionalAuthenticatedDataCrc32c = a})
. mapping _Coerce
-- | Optional. An optional CRC32C checksum of the EncryptRequest.plaintext.
-- If specified, KeyManagementService will verify the integrity of the
-- received EncryptRequest.plaintext using this checksum.
-- KeyManagementService will report an error if the checksum verification
-- fails. If you receive a checksum error, your client should verify that
-- CRC32C(EncryptRequest.plaintext) is equal to
-- EncryptRequest.plaintext_crc32c, and if so, perform a limited number of
-- retries. A persistent mismatch may indicate an issue in your computation
-- of the CRC32C checksum. Note: This field is defined as int64 for reasons
-- of compatibility across different languages. However, it is a
-- non-negative integer, which will never exceed 2^32-1, and can be safely
-- downconverted to uint32 in languages that support this type.
erPlaintextCrc32c :: Lens' EncryptRequest (Maybe Int64)
erPlaintextCrc32c
= lens _erPlaintextCrc32c
(\ s a -> s{_erPlaintextCrc32c = a})
. mapping _Coerce
-- | Required. The data to encrypt. Must be no larger than 64KiB. The maximum
-- size depends on the key version\'s protection_level. For SOFTWARE keys,
-- the plaintext must be no larger than 64KiB. For HSM keys, the combined
-- length of the plaintext and additional_authenticated_data fields must be
-- no larger than 8KiB.
erPlaintext :: Lens' EncryptRequest (Maybe ByteString)
erPlaintext
= lens _erPlaintext (\ s a -> s{_erPlaintext = a}) .
mapping _Bytes
instance FromJSON EncryptRequest where
parseJSON
= withObject "EncryptRequest"
(\ o ->
EncryptRequest' <$>
(o .:? "additionalAuthenticatedData") <*>
(o .:? "additionalAuthenticatedDataCrc32c")
<*> (o .:? "plaintextCrc32c")
<*> (o .:? "plaintext"))
instance ToJSON EncryptRequest where
toJSON EncryptRequest'{..}
= object
(catMaybes
[("additionalAuthenticatedData" .=) <$>
_erAdditionalAuthenticatedData,
("additionalAuthenticatedDataCrc32c" .=) <$>
_erAdditionalAuthenticatedDataCrc32c,
("plaintextCrc32c" .=) <$> _erPlaintextCrc32c,
("plaintext" .=) <$> _erPlaintext])
-- | Associates \`members\` with a \`role\`.
--
-- /See:/ 'binding' smart constructor.
data Binding =
Binding'
{ _bMembers :: !(Maybe [Text])
, _bRole :: !(Maybe Text)
, _bCondition :: !(Maybe Expr)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Binding' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bMembers'
--
-- * 'bRole'
--
-- * 'bCondition'
binding
:: Binding
binding =
Binding' {_bMembers = Nothing, _bRole = Nothing, _bCondition = Nothing}
-- | Specifies the identities requesting access for a Cloud Platform
-- resource. \`members\` can have the following values: * \`allUsers\`: A
-- special identifier that represents anyone who is on the internet; with
-- or without a Google account. * \`allAuthenticatedUsers\`: A special
-- identifier that represents anyone who is authenticated with a Google
-- account or a service account. * \`user:{emailid}\`: An email address
-- that represents a specific Google account. For example,
-- \`alice\'example.com\` . * \`serviceAccount:{emailid}\`: An email
-- address that represents a service account. For example,
-- \`my-other-app\'appspot.gserviceaccount.com\`. * \`group:{emailid}\`: An
-- email address that represents a Google group. For example,
-- \`admins\'example.com\`. * \`deleted:user:{emailid}?uid={uniqueid}\`: An
-- email address (plus unique identifier) representing a user that has been
-- recently deleted. For example,
-- \`alice\'example.com?uid=123456789012345678901\`. If the user is
-- recovered, this value reverts to \`user:{emailid}\` and the recovered
-- user retains the role in the binding. *
-- \`deleted:serviceAccount:{emailid}?uid={uniqueid}\`: An email address
-- (plus unique identifier) representing a service account that has been
-- recently deleted. For example,
-- \`my-other-app\'appspot.gserviceaccount.com?uid=123456789012345678901\`.
-- If the service account is undeleted, this value reverts to
-- \`serviceAccount:{emailid}\` and the undeleted service account retains
-- the role in the binding. * \`deleted:group:{emailid}?uid={uniqueid}\`:
-- An email address (plus unique identifier) representing a Google group
-- that has been recently deleted. For example,
-- \`admins\'example.com?uid=123456789012345678901\`. If the group is
-- recovered, this value reverts to \`group:{emailid}\` and the recovered
-- group retains the role in the binding. * \`domain:{domain}\`: The G
-- Suite domain (primary) that represents all the users of that domain. For
-- example, \`google.com\` or \`example.com\`.
bMembers :: Lens' Binding [Text]
bMembers
= lens _bMembers (\ s a -> s{_bMembers = a}) .
_Default
. _Coerce
-- | Role that is assigned to \`members\`. For example, \`roles\/viewer\`,
-- \`roles\/editor\`, or \`roles\/owner\`.
bRole :: Lens' Binding (Maybe Text)
bRole = lens _bRole (\ s a -> s{_bRole = a})
-- | The condition that is associated with this binding. If the condition
-- evaluates to \`true\`, then this binding applies to the current request.
-- If the condition evaluates to \`false\`, then this binding does not
-- apply to the current request. However, a different role binding might
-- grant the same role to one or more of the members in this binding. To
-- learn which resources support conditions in their IAM policies, see the
-- [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
bCondition :: Lens' Binding (Maybe Expr)
bCondition
= lens _bCondition (\ s a -> s{_bCondition = a})
instance FromJSON Binding where
parseJSON
= withObject "Binding"
(\ o ->
Binding' <$>
(o .:? "members" .!= mempty) <*> (o .:? "role") <*>
(o .:? "condition"))
instance ToJSON Binding where
toJSON Binding'{..}
= object
(catMaybes
[("members" .=) <$> _bMembers,
("role" .=) <$> _bRole,
("condition" .=) <$> _bCondition])
| brendanhay/gogol | gogol-cloudkms/gen/Network/Google/CloudKMS/Types/Product.hs | mpl-2.0 | 114,554 | 0 | 23 | 24,642 | 16,712 | 9,711 | 7,001 | 1,874 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.Metros.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of metros.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.metros.list@.
module Network.Google.Resource.DFAReporting.Metros.List
(
-- * REST Resource
MetrosListResource
-- * Creating a Request
, metrosList
, MetrosList
-- * Request Lenses
, mlProFileId
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.metros.list@ method which the
-- 'MetrosList' request conforms to.
type MetrosListResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"metros" :>
QueryParam "alt" AltJSON :>
Get '[JSON] MetrosListResponse
-- | Retrieves a list of metros.
--
-- /See:/ 'metrosList' smart constructor.
newtype MetrosList = MetrosList'
{ _mlProFileId :: Textual Int64
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MetrosList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mlProFileId'
metrosList
:: Int64 -- ^ 'mlProFileId'
-> MetrosList
metrosList pMlProFileId_ =
MetrosList'
{ _mlProFileId = _Coerce # pMlProFileId_
}
-- | User profile ID associated with this request.
mlProFileId :: Lens' MetrosList Int64
mlProFileId
= lens _mlProFileId (\ s a -> s{_mlProFileId = a}) .
_Coerce
instance GoogleRequest MetrosList where
type Rs MetrosList = MetrosListResponse
type Scopes MetrosList =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient MetrosList'{..}
= go _mlProFileId (Just AltJSON) dFAReportingService
where go
= buildClient (Proxy :: Proxy MetrosListResource)
mempty
| rueshyna/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/Metros/List.hs | mpl-2.0 | 2,748 | 0 | 13 | 638 | 320 | 194 | 126 | 50 | 1 |
{-# Language CPP #-}
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings where
import ClassyPrelude.Yesod
import qualified Control.Exception as Exception
import Data.Aeson (Result (..), fromJSON, withObject, (.!=),
(.:?))
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeEither')
import Database.Persist.Postgresql (PostgresConf)
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Config2 (applyEnvValue, configSettingsYml)
import Yesod.Default.Util (WidgetFileSettings, widgetFileNoReload,
widgetFileReload)
-- | Runtime settings to configure this application. These settings can be
-- loaded from various sources: defaults, environment variables, config files,
-- theoretically even a database.
data AppSettings = AppSettings
{ appStaticDir :: String
-- ^ Directory from which to serve static files.
, appDatabaseConf :: PostgresConf
-- ^ Configuration settings for accessing the database.
, appRoot :: Maybe Text
-- ^ Base for all generated URLs. If @Nothing@, determined
-- from the request headers.
, appHost :: HostPreference
-- ^ Host/interface the server should bind to.
, appPort :: Int
-- ^ Port to listen on
, appIpFromHeader :: Bool
-- ^ Get the IP address from the header when logging. Useful when sitting
-- behind a reverse proxy.
, appDetailedRequestLogging :: Bool
-- ^ Use detailed request logging system
, appShouldLogAll :: Bool
-- ^ Should all log messages be displayed?
, appReloadTemplates :: Bool
-- ^ Use the reload version of templates
, appMutableStatic :: Bool
-- ^ Assume that files in the static dir may change after compilation
, appSkipCombining :: Bool
-- ^ Perform no stylesheet/script combining
-- Example app-specific configuration values.
, appCopyright :: Text
-- ^ Copyright text to appear in the footer of the page
, appAnalytics :: Maybe Text
-- ^ Google Analytics code
, appStaticRoot :: Maybe Text
-- ^ Static asset serving site.
}
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev =
#if DEVELOPMENT
True
#else
False
#endif
appStaticDir' <- o .: "static-dir"
appDatabaseConf' <- o .: "database"
appRoot' <- o .:? "approot"
appHost' <- fromString <$> o .: "host"
appPort' <- o .: "port"
appIpFromHeader' <- o .: "ip-from-header"
appDetailedRequestLogging' <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll' <- o .:? "should-log-all" .!= defaultDev
appReloadTemplates' <- o .:? "reload-templates" .!= defaultDev
appMutableStatic' <- o .:? "mutable-static" .!= defaultDev
appSkipCombining' <- o .:? "skip-combining" .!= defaultDev
appCopyright' <- o .: "copyright"
appAnalytics' <- o .:? "analytics"
appStaticRoot' <- o .:? "static-root"
return $ AppSettings appStaticDir'
appDatabaseConf'
appRoot'
appHost'
appPort'
appIpFromHeader'
appDetailedRequestLogging'
appShouldLogAll'
appReloadTemplates'
appMutableStatic'
appSkipCombining'
appCopyright'
appAnalytics'
appStaticRoot'
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
--
-- For more information on modifying behavior, see:
--
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
-- | How static files should be combined.
combineSettings :: CombineSettings
combineSettings = def
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if appReloadTemplates compileTimeAppSettings
then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
-- | Raw bytes at compile time of @config/settings.yml@
configSettingsYmlBS :: ByteString
configSettingsYmlBS = $(embedFile configSettingsYml)
-- | @config/settings.yml@, parsed to a @Value@.
configSettingsYmlValue :: Value
configSettingsYmlValue = either Exception.throw id $ decodeEither' configSettingsYmlBS
-- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@.
compileTimeAppSettings :: AppSettings
compileTimeAppSettings =
case fromJSON $ applyEnvValue False mempty configSettingsYmlValue of
Error e -> error e
Success settings -> settings
-- The following two functions can be used to combine multiple CSS or JS files
-- at compile time to decrease the number of http requests.
-- Sample usage (inside a Widget):
--
-- > $(combineStylesheets 'StaticR [style1_css, style2_css])
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets = combineStylesheets'
(appSkipCombining compileTimeAppSettings)
combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts = combineScripts'
(appSkipCombining compileTimeAppSettings)
combineSettings
| sajith/betty-web | Settings.hs | agpl-3.0 | 6,212 | 0 | 12 | 1,899 | 752 | 429 | 323 | -1 | -1 |
module Blockchain.Node.AccountSpec where
import Control.Concurrent.STM.TVar (readTVarIO)
import qualified Data.Map.Strict as Map
import Data.Text (Text)
import Data.Time (getCurrentTime)
import Test.Hspec
import Blockchain.Node.Account ((<+|), Account)
import qualified Blockchain.Node.Account as Acc
import Blockchain.Node.Config (BlockchainConfig(..), defaultConfig)
import qualified Blockchain.Node.Core as Core
import qualified Blockchain.Node.MemPool as MemPool
import Blockchain.Node.Transaction (Transaction, Operation(..))
import qualified Blockchain.Node.Transaction as T
import qualified Blockchain.Node.Signature as Signature
testConfig :: BlockchainConfig
testConfig = defaultConfig { miningDifficulty = 1 }
newTransfer :: Signature.KeyPair -> Text -> Text -> Int -> IO Transaction
newTransfer (pubKey, privKey) sender recipient amount = do
currentTime <- getCurrentTime
let operation = Transfer sender recipient amount currentTime
signature <- T.sign privKey operation
return $ T.Transaction
pubKey
signature
operation
spec :: Spec
spec = do
describe "Transaction validation" $ do
it "creates new mining reward, new transaction and validates mempool" $ do
currentTime <- getCurrentTime
nodeState <- Core.newNodeState testConfig
_ <- Core.runApp nodeState $ do
reward <- Core.getMiningReward currentTime
_ <- Core.addTransaction reward -- add to mempool
Core.newTransaction "randomguy123" 1 currentTime
memPool <- readTVarIO $ Core.memPool nodeState
let memPoolTransactions = MemPool.unMemPool memPool
(length memPoolTransactions) `shouldBe` 2
describe "Addition operator <+| test" $ do
it "creates empty account for sender and recipient, and inserts transaction" $ do
let senderId = "sender"
recipientId = "recipient"
senderAccount = Acc.getOrCreate senderId Map.empty
recipientAccount = Acc.getOrCreate recipientId Map.empty
keyPair <- Signature.newKeyPair
transaction <- newTransfer keyPair senderId recipientId 1
let accountMap = Acc.toAccountsByAddress [transaction]
Acc.isNotGeneratingNegativeBalance transaction Map.empty `shouldBe` False
-- recipient has positive balance
Acc.isValid (recipientAccount <+| transaction) `shouldBe` True
Acc.balance (recipientAccount <+| transaction) `shouldBe` 1
-- sender has negative balance
Acc.isValid (senderAccount <+| transaction) `shouldBe` False
Acc.balance (senderAccount <+| transaction) `shouldBe` -1
it "creates empty account for sender and recipient (which are the same), \
\and inserts transaction" $ do
let senderId = "sender"
recipientId = senderId
senderAccount = Acc.getOrCreate senderId Map.empty
recipientAccount = Acc.getOrCreate recipientId Map.empty
keyPair <- Signature.newKeyPair
transaction <- newTransfer keyPair senderId recipientId 1
let accountMap = Acc.toAccountsByAddress [transaction]
Acc.isNotGeneratingNegativeBalance transaction Map.empty `shouldBe` True
-- recipient has positive balance
Acc.isValid (recipientAccount <+| transaction) `shouldBe` True
Acc.balance (recipientAccount <+| transaction) `shouldBe` 0
-- sender has negative balance
Acc.isValid (senderAccount <+| transaction) `shouldBe` True
Acc.balance (senderAccount <+| transaction) `shouldBe` 0
| carbolymer/blockchain | blockchain-node/test/Blockchain/Node/AccountSpec.hs | apache-2.0 | 3,524 | 0 | 19 | 731 | 824 | 433 | 391 | 66 | 1 |
main = do
putStrLn "What's your name?"
name <- getLine
putStrLn ("Nice to meet you, " ++ name ++ ".")
| Oscarzhao/haskell | learnyouahaskell/hello.hs | apache-2.0 | 108 | 0 | 10 | 26 | 36 | 16 | 20 | 4 | 1 |
module Import
( module Import
) where
import Prelude as Import hiding (head, init, last,
readFile, tail, writeFile)
import Yesod as Import hiding (Route (..))
import Control.Applicative as Import (pure, (<$>), (<*>))
import Data.Text as Import (Text)
import Foundation as Import
import Model as Import
import Settings as Import
import Settings.Development as Import
import Settings.StaticFiles as Import
#if __GLASGOW_HASKELL__ >= 704
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat),
(<>))
#else
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat))
infixr 5 <>
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
#endif
import Network.HTTP.Types as Import
( status200
, status201
, status400
, status403
, status404
)
| burz/sonada | Import.hs | apache-2.0 | 1,180 | 0 | 6 | 550 | 167 | 121 | 46 | -1 | -1 |
module Staircase.A282574 (a282574) where
import Helpers.Stairs (finalStaircaseState)
a282574 n = position where
(_, position, _) = finalStaircaseState n
| peterokagey/haskellOEIS | src/Staircase/A282574.hs | apache-2.0 | 156 | 0 | 7 | 21 | 49 | 28 | 21 | 4 | 1 |
-----------------------------------------------------------------------------
-- |
-- module : Graphics.UI.MainUI
-- copyright :
-- license :
--
-- maintainer :
-- stability : unstable
-- portability : portable
--
-----------------------------------------------------------------------------
module Graphics.UI.MainUI(
loadUI
) where
import System.Directory
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Builder
data MainUI = MainUI { mainWindow :: Window }
-- |Loads the main ui file for the gui.
loadUI :: FilePath -> IO ()
loadUI gladepath = do
initGUI
builder <- builderNew
builderAddFromFile builder gladepath
ui <- buildUI builder
widgetShowAll $ mainWindow ui
mainGUI
buildUI :: Builder -> IO MainUI
buildUI builder = do
mw <- builderGetObject builder castToWindow "mainWindow"
onDestroy mw mainQuit
return $ MainUI mw
| creichert/hsgui | src/Graphics/UI/MainUI.hs | bsd-2-clause | 933 | 0 | 8 | 212 | 172 | 91 | 81 | 19 | 1 |
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Data.Time
import Database.Persist.Quasi
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlSettings, mkMigrate "migrateAll"]
$(persistFileWith lowerCaseSettings "config/models")
| seizans/ppl | Model.hs | bsd-2-clause | 428 | 0 | 8 | 59 | 64 | 37 | 27 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances #-} -- Why?
-----------------------------------------------------------------------------
-- |
-- Module : Parsimony.Char
-- Copyright : (c) Iavor S. Diatchki 2009
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : provisional
--
-- A generic way to extract tokens from a stream.
--
-----------------------------------------------------------------------------
module Parsimony.Stream (Token(..), Stream(..)) where
import Parsimony.Prim
import Parsimony.Pos
import Parsimony.Error
import qualified Data.ByteString as Strict (ByteString,uncons)
import qualified Data.ByteString.Lazy as Lazy (ByteString,uncons)
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Data.Word (Word8)
import Numeric (showHex)
-- | A class describing useful token operations.
class Token token where
-- | How tokens affect file positions.
updatePos :: token -> SourcePos -> SourcePos
-- | How to display tokens.
showToken :: token -> String
instance Token Char where
updatePos c p = updatePosChar p c
showToken = show
instance Token Word8 where
updatePos _ p = incSourceColumn p 1
showToken b = "0x" ++ showHex b ""
-- We have the fun. dep. here because otherwise multiple
-- reads from a stream could give potentially different types of
-- tokens which leads to ambiguities.
-- | Streams of tokens.
class Token token => Stream stream token | stream -> token where
getToken :: PrimParser stream token
eof_err :: SourcePos -> Reply s a
eof_err p = Error $ newErrorMessage (UnExpect "end of input") p
{-# INLINE genToken #-}
genToken :: Token t => (i -> Maybe (t,i)) -> PrimParser i t
genToken unc (State i p) =
case unc i of
Nothing -> eof_err p
Just (t,ts) -> Ok t State { stateInput = ts
, statePos = updatePos t p
}
instance Token a => Stream [a] a where
getToken = genToken (\xs -> case xs of
[] -> Nothing
c : cs -> Just (c,cs))
instance Stream Strict.ByteString Word8 where
getToken = genToken Strict.uncons
instance Stream Lazy.ByteString Word8 where
getToken = genToken Lazy.uncons
instance Stream T.Text Char where
getToken = genToken T.uncons
instance Stream LT.Text Char where
getToken = genToken LT.uncons
| yav/parsimony | src/Parsimony/Stream.hs | bsd-2-clause | 2,614 | 0 | 13 | 660 | 564 | 314 | 250 | 45 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TupleSections #-}
module Language.Haskell.Liquid.Bare.OfType (
ofBareType
, ofMeaSort
, ofBSort
, ofBPVar
, mkSpecType
, mkSpecType'
) where
import Prelude hiding (error)
import BasicTypes
import Name
import TyCon hiding (synTyConRhs_maybe)
import Type (expandTypeSynonyms)
import TysWiredIn
import Control.Monad.Reader hiding (forM)
import Control.Monad.State hiding (forM)
import Data.Maybe (fromMaybe)
import Data.Traversable (forM)
import Text.Parsec.Pos
import Text.Printf
import qualified Control.Exception as Ex
import qualified Data.HashMap.Strict as M
import Language.Fixpoint.Types (Expr(..), Reftable, Symbol, meet, mkSubst, subst, symbol, mkEApp)
import Language.Haskell.Liquid.GHC.Misc
import Language.Haskell.Liquid.Misc (secondM)
import Language.Haskell.Liquid.Types.RefType
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.Types.Bounds
import Language.Haskell.Liquid.Bare.Env
import Language.Haskell.Liquid.Bare.Expand
import Language.Haskell.Liquid.Bare.Lookup
import Language.Haskell.Liquid.Bare.Resolve
-- import Language.Haskell.Liquid.Bare.RefToLogic
--------------------------------------------------------------------------------
ofBareType :: SourcePos -> BareType -> BareM SpecType
ofBareType l
= ofBRType expandRTAliasApp (resolve l <=< expandReft)
ofMeaSort :: BareType -> BareM SpecType
ofMeaSort
= ofBRType failRTAliasApp return
ofBSort :: BSort -> BareM RSort
ofBSort
= ofBRType failRTAliasApp return
--------------------------------------------------------------------------------
ofBPVar :: BPVar -> BareM RPVar
ofBPVar
= mapMPvar ofBSort
mapMPvar :: (Monad m) => (a -> m b) -> PVar a -> m (PVar b)
mapMPvar f (PV x t v txys)
= do t' <- forM t f
txys' <- mapM (\(t, x, y) -> liftM (, x, y) (f t)) txys
return $ PV x t' v txys'
--------------------------------------------------------------------------------
mkSpecType :: SourcePos -> BareType -> BareM SpecType
mkSpecType l t
= mkSpecType' l (ty_preds $ toRTypeRep t) t
mkSpecType' :: SourcePos -> [PVar BSort] -> BareType -> BareM SpecType
mkSpecType' l πs t
= ofBRType expandRTAliasApp resolveReft t
where
resolveReft
= (resolve l <=< expandReft) . txParam subvUReft (uPVar <$> πs) t
txParam f πs t = f (txPvar (predMap πs t))
txPvar :: M.HashMap Symbol UsedPVar -> UsedPVar -> UsedPVar
txPvar m π = π { pargs = args' }
where args' | not (null (pargs π)) = zipWith (\(_,x ,_) (t,_,y) -> (t, x, y)) (pargs π') (pargs π)
| otherwise = pargs π'
π' = fromMaybe (panic Nothing err) $ M.lookup (pname π) m
err = "Bare.replaceParams Unbound Predicate Variable: " ++ show π
predMap πs t = M.fromList [(pname π, π) | π <- πs ++ rtypePredBinds t]
rtypePredBinds = map uPVar . ty_preds . toRTypeRep
--------------------------------------------------------------------------------
ofBRType :: (PPrint r, UReftable r)
=> (SourcePos -> RTAlias RTyVar SpecType -> [BRType r] -> r -> BareM (RRType r))
-> (r -> BareM r)
-> BRType r
-> BareM (RRType r)
ofBRType appRTAlias resolveReft
= go
where
go t@(RApp _ _ _ _)
= do aliases <- (typeAliases . rtEnv) <$> get
goRApp aliases t
go (RAppTy t1 t2 r)
= RAppTy <$> go t1 <*> go t2 <*> resolveReft r
go (RFun x t1 t2 r)
= do env <- get
goRFun (bounds env) x t1 t2 r
go (RVar a r)
= RVar (symbolRTyVar a) <$> resolveReft r
go (RAllT a t)
= RAllT (symbolRTyVar a) <$> go t
go (RAllP a t)
= RAllP <$> ofBPVar a <*> go t
go (RAllS x t)
= RAllS x <$> go t
go (RAllE x t1 t2)
= RAllE x <$> go t1 <*> go t2
go (REx x t1 t2)
= REx x <$> go t1 <*> go t2
go (RRTy e r o t)
= RRTy <$> mapM (secondM go) e <*> resolveReft r <*> pure o <*> go t
go (RHole r)
= RHole <$> resolveReft r
go (RExprArg (Loc l l' e))
= RExprArg . Loc l l' <$> resolve l e
go_ref (RProp ss (RHole r))
= rPropP <$> mapM go_syms ss <*> resolveReft r
go_ref (RProp ss t)
= RProp <$> mapM go_syms ss <*> go t
go_syms
= secondM ofBSort
goRFun bounds _ (RApp c ps' _ _) t _ | Just bnd <- M.lookup c bounds
= do let (ts', ps) = splitAt (length $ tyvars bnd) ps'
ts <- mapM go ts'
makeBound bnd ts [x | RVar x _ <- ps] <$> go t
goRFun _ x t1 t2 r
= RFun x <$> go t1 <*> go t2 <*> resolveReft r
goRApp aliases (RApp (Loc l _ c) ts _ r) | Just rta <- M.lookup c aliases
= appRTAlias l rta ts =<< resolveReft r
goRApp _ (RApp lc ts rs r)
= do let l = loc lc
r' <- resolveReft r
lc' <- Loc l l <$> matchTyCon lc (length ts)
rs' <- mapM go_ref rs
ts' <- mapM go ts
bareTCApp r' lc' rs' ts'
goRApp _ _ = impossible Nothing "goRApp failed through to final case"
matchTyCon :: LocSymbol -> Int -> BareM TyCon
matchTyCon lc@(Loc _ _ c) arity
| isList c && arity == 1
= return listTyCon
| isTuple c
= return $ tupleTyCon BoxedTuple arity
| otherwise
= lookupGhcTyCon lc
--------------------------------------------------------------------------------
failRTAliasApp :: SourcePos -> RTAlias RTyVar SpecType -> [BRType r] -> r -> BareM (RRType r)
failRTAliasApp l rta _ _
= Ex.throw err
where
err :: Error
err = ErrIllegalAliasApp (sourcePosSrcSpan l) (pprint $ rtName rta) (sourcePosSrcSpan $ rtPos rta)
expandRTAliasApp :: SourcePos -> RTAlias RTyVar SpecType -> [BareType] -> RReft -> BareM SpecType
expandRTAliasApp l rta args r
| length args == length αs + length εs
= do ts <- mapM (ofBareType l) $ take (length αs) args
es <- mapM (resolve l . exprArg (show err)) $ drop (length αs) args
let tsu = zipWith (\α t -> (α, toRSort t, t)) αs ts
let esu = mkSubst $ zip (symbol <$> εs) es
return $ subst esu . (`strengthen` r) . subsTyVars_meet tsu $ rtBody rta
| otherwise
= Ex.throw err
where
αs = rtTArgs rta
εs = rtVArgs rta
err :: Error
err = ErrAliasApp (sourcePosSrcSpan l) (length args) (pprint $ rtName rta) (sourcePosSrcSpan $ rtPos rta) (length αs + length εs)
-- | exprArg converts a tyVar to an exprVar because parser cannot tell
-- HORRIBLE HACK To allow treating upperCase X as value variables X
-- e.g. type Matrix a Row Col = List (List a Row) Col
exprArg _ (RExprArg e)
= val e
exprArg _ (RVar x _)
= EVar (symbol x)
exprArg _ (RApp x [] [] _)
= EVar (symbol x)
exprArg msg (RApp f ts [] _)
= mkEApp (symbol <$> f) (exprArg msg <$> ts)
exprArg msg (RAppTy (RVar f _) t _)
= mkEApp (dummyLoc $ symbol f) [exprArg msg t]
exprArg msg z
= panic Nothing $ printf "Unexpected expression parameter: %s in %s" (show z) msg
--------------------------------------------------------------------------------
bareTCApp r (Loc l _ c) rs ts | Just rhs <- synTyConRhs_maybe c
= do when (realTcArity c < length ts) (Ex.throw err)
return $ tyApp (subsTyVars_meet su $ ofType rhs) (drop nts ts) rs r
where
tvs = tyConTyVarsDef c
su = zipWith (\a t -> (rTyVar a, toRSort t, t)) tvs ts
nts = length tvs
err :: Error
err = ErrAliasApp (sourcePosSrcSpan l) (length ts) (pprint c) (getSrcSpan c) (realTcArity c)
-- TODO expandTypeSynonyms here to
bareTCApp r (Loc _ _ c) rs ts | isFamilyTyCon c && isTrivial t
= return $ expandRTypeSynonyms $ t `strengthen` r
where t = rApp c ts rs mempty
bareTCApp r (Loc _ _ c) rs ts
= return $ rApp c ts rs r
tyApp (RApp c ts rs r) ts' rs' r' = RApp c (ts ++ ts') (rs ++ rs') (r `meet` r')
tyApp t [] [] r = t `strengthen` r
tyApp _ _ _ _ = panic Nothing $ "Bare.Type.tyApp on invalid inputs"
expandRTypeSynonyms :: (PPrint r, Reftable r) => RRType r -> RRType r
expandRTypeSynonyms = ofType . expandTypeSynonyms . toType
| ssaavedra/liquidhaskell | src/Language/Haskell/Liquid/Bare/OfType.hs | bsd-3-clause | 8,063 | 0 | 16 | 2,008 | 3,081 | 1,547 | 1,534 | 179 | 16 |
{-# LANGUAGE ScopedTypeVariables #-}
module Language.Haskell.Refact.HaRe
(
-- * Data Structures
RefactSettings(..)
, VerboseLevel (..)
, defaultSettings
, SimpPos
-- * Refactorings
-- |Note: the 'Cradle' in the type signatures is the one from ghc-mod
, ifToCase
{-
, duplicateDef
, liftToTopLevel
, liftOneLevel
, demote
, rename
, swapArgs
-}
, roundTrip
)
where
import Language.Haskell.Refact.Refactoring.Case
-- import Language.Haskell.Refact.Refactoring.DupDef
-- import Language.Haskell.Refact.Refactoring.MoveDef
-- import Language.Haskell.Refact.Refactoring.Renaming
-- import Language.Haskell.Refact.Refactoring.SwapArgs
import Language.Haskell.Refact.Refactoring.RoundTrip
import Language.Haskell.Refact.Utils.Monad
import Language.Haskell.Refact.Utils.Types
| mpickering/HaRe | src/Language/Haskell/Refact/HaRe.hs | bsd-3-clause | 792 | 0 | 5 | 99 | 77 | 58 | 19 | 13 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.System
-- Copyright : Duncan Coutts 2007-2008
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- Cabal often needs to do slightly different things on specific platforms. You
-- probably know about the 'System.Info.os' however using that is very
-- inconvenient because it is a string and different Haskell implementations
-- do not agree on using the same strings for the same platforms! (In
-- particular see the controversy over \"windows\" vs \"ming32\"). So to make it
-- more consistent and easy to use we have an 'OS' enumeration.
--
module Distribution.System (
-- * Operating System
OS(..),
buildOS,
-- * Machine Architecture
Arch(..),
buildArch,
-- * Platform is a pair of arch and OS
Platform(..),
buildPlatform,
platformFromTriple
) where
import qualified System.Info (os, arch)
import qualified Data.Char as Char (toLower, isAlphaNum)
import Data.Data (Data)
import Data.Typeable (Typeable)
import Data.Maybe (fromMaybe, listToMaybe)
import Distribution.Text (Text(..), display)
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint ((<>))
-- | How strict to be when classifying strings into the 'OS' and 'Arch' enums.
--
-- The reason we have multiple ways to do the classification is because there
-- are two situations where we need to do it.
--
-- For parsing os and arch names in .cabal files we really want everyone to be
-- referring to the same or or arch by the same name. Variety is not a virtue
-- in this case. We don't mind about case though.
--
-- For the System.Info.os\/arch different Haskell implementations use different
-- names for the same or\/arch. Also they tend to distinguish versions of an
-- os\/arch which we just don't care about.
--
-- The 'Compat' classification allows us to recognise aliases that are already
-- in common use but it allows us to distinguish them from the canonical name
-- which enables us to warn about such deprecated aliases.
--
data ClassificationStrictness = Permissive | Compat | Strict
-- ------------------------------------------------------------
-- * Operating System
-- ------------------------------------------------------------
data OS = Linux | Windows | OSX -- tier 1 desktop OSs
| FreeBSD | OpenBSD | NetBSD -- other free unix OSs
| Solaris | AIX | HPUX | IRIX -- ageing Unix OSs
| HaLVM -- bare metal / VMs / hypervisors
| IOS -- iOS
| OtherOS String
deriving (Eq, Ord, Show, Read, Typeable, Data)
--TODO: decide how to handle Android and iOS.
-- They are like Linux and OSX but with some differences.
-- Should they be separate from linux/osx, or a subtype?
-- e.g. should we have os(linux) && os(android) true simultaneously?
knownOSs :: [OS]
knownOSs = [Linux, Windows, OSX
,FreeBSD, OpenBSD, NetBSD
,Solaris, AIX, HPUX, IRIX
,HaLVM
,IOS]
osAliases :: ClassificationStrictness -> OS -> [String]
osAliases Permissive Windows = ["mingw32", "cygwin32"]
osAliases Compat Windows = ["mingw32", "win32"]
osAliases _ OSX = ["darwin"]
osAliases _ IOS = ["ios"]
osAliases Permissive FreeBSD = ["kfreebsdgnu"]
osAliases Permissive Solaris = ["solaris2"]
osAliases _ _ = []
instance Text OS where
disp (OtherOS name) = Disp.text name
disp other = Disp.text (lowercase (show other))
parse = fmap (classifyOS Compat) ident
classifyOS :: ClassificationStrictness -> String -> OS
classifyOS strictness s =
fromMaybe (OtherOS s) $ lookup (lowercase s) osMap
where
osMap = [ (name, os)
| os <- knownOSs
, name <- display os : osAliases strictness os ]
buildOS :: OS
buildOS = classifyOS Permissive System.Info.os
-- ------------------------------------------------------------
-- * Machine Architecture
-- ------------------------------------------------------------
data Arch = I386 | X86_64 | PPC | PPC64 | Sparc
| Arm | Mips | SH
| IA64 | S390
| Alpha | Hppa | Rs6000
| M68k | Vax
| OtherArch String
deriving (Eq, Ord, Show, Read, Typeable, Data)
knownArches :: [Arch]
knownArches = [I386, X86_64, PPC, PPC64, Sparc
,Arm, Mips, SH
,IA64, S390
,Alpha, Hppa, Rs6000
,M68k, Vax]
archAliases :: ClassificationStrictness -> Arch -> [String]
archAliases Strict _ = []
archAliases Compat _ = []
archAliases _ PPC = ["powerpc"]
archAliases _ PPC64 = ["powerpc64"]
archAliases _ Sparc = ["sparc64", "sun4"]
archAliases _ Mips = ["mipsel", "mipseb"]
archAliases _ Arm = ["armeb", "armel"]
archAliases _ _ = []
instance Text Arch where
disp (OtherArch name) = Disp.text name
disp other = Disp.text (lowercase (show other))
parse = fmap (classifyArch Strict) ident
classifyArch :: ClassificationStrictness -> String -> Arch
classifyArch strictness s =
fromMaybe (OtherArch s) $ lookup (lowercase s) archMap
where
archMap = [ (name, arch)
| arch <- knownArches
, name <- display arch : archAliases strictness arch ]
buildArch :: Arch
buildArch = classifyArch Permissive System.Info.arch
-- ------------------------------------------------------------
-- * Platform
-- ------------------------------------------------------------
data Platform = Platform Arch OS
deriving (Eq, Ord, Show, Read, Typeable, Data)
instance Text Platform where
disp (Platform arch os) = disp arch <> Disp.char '-' <> disp os
parse = do
arch <- parse
_ <- Parse.char '-'
os <- parse
return (Platform arch os)
-- | The platform Cabal was compiled on. In most cases,
-- @LocalBuildInfo.hostPlatform@ should be used instead (the platform we're
-- targeting).
buildPlatform :: Platform
buildPlatform = Platform buildArch buildOS
-- Utils:
ident :: Parse.ReadP r String
ident = Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-')
--TODO: probably should disallow starting with a number
lowercase :: String -> String
lowercase = map Char.toLower
platformFromTriple :: String -> Maybe Platform
platformFromTriple triple =
fmap fst (listToMaybe $ Parse.readP_to_S parseTriple triple)
where parseWord = Parse.munch1 (\c -> Char.isAlphaNum c || c == '_')
parseTriple = do
arch <- fmap (classifyArch Strict) parseWord
_ <- Parse.char '-'
_ <- parseWord -- Skip vendor
_ <- Parse.char '-'
os <- fmap (classifyOS Compat) ident -- OS may have hyphens, like
-- 'nto-qnx'
return $ Platform arch os
| fpco/cabal | Cabal/Distribution/System.hs | bsd-3-clause | 6,920 | 0 | 13 | 1,616 | 1,444 | 816 | 628 | 112 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
{-|
Module : Numeric.AERN.Misc.QuickCheck
Description : miscellaneous utilities for QuickCheck
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
miscellaneous utilities for QuickCheck
-}
module Numeric.AERN.Misc.QuickCheck where
import qualified Data.List as List
import Test.QuickCheck
import Test.QuickCheck.Gen (unGen)
import qualified System.Random as R
import qualified Test.QuickCheck.Random as QCR
sampleOfLength ::
(Gen a) ->
Int ->
IO [a]
sampleOfLength gen n =
do
rnd0 <- QCR.newQCGen
return [(m r n) | (r,n) <- rnds rnd0 `zip` [0,2..2*n] ]
where
m = unGen gen
rnds rnd = rnd1 : rnds rnd2
where (rnd1,rnd2) = R.split rnd
{-| Run the generator with size increased by 1 (useful for avoiding
too narrow selection at size 0 - in particular Double "randomly" generates
0 with probability 1 at size 0.
-}
incrSize :: Gen t -> Gen t
incrSize gen = sized (\size -> resize (size + 1) gen)
{-|
Probability of True is @n/m@. Precondition: @0<n<m@
-}
arbitraryBoolRatio :: Int {-^ @n@ -} -> Int {-^ @m@ -} -> Gen Bool
arbitraryBoolRatio n m | 0 < n && n < m =
frequency [(n, return True), (m - n, return False)]
arbitraryOrder :: (Ord t) => [t] -> Gen [t]
arbitraryOrder elems =
do
nums <- mapM (const arbitrary) elems
return $ permuteBy (nums :: [Int]) elems
where
permuteBy nums elems =
map snd $ List.sort $ zip nums elems
{-|
Have a fairly long and hairy sequence of elements of increasing complexity
pre-generated and fixed and then pick from it randomly.
This deals with the problem that the random generation takes a long time
when the elements' construction is expensive, eg when functions are built
using a fairly large sequence of multiplications and additions.
-}
fixedRandSeq ::
(Int -> Int) -> Gen a -> [a]
fixedRandSeq fixedRandSeqQuantityOfSize gen =
aux 0 0
where
aux prevQuantity size
= newSeqPortion ++ (aux currQuantity (size + 1))
where
newSeqPortion
=
take (currQuantity - prevQuantity) $
map (\g -> unGen gen g size)
randomGens
currQuantity = fixedRandSeqQuantityOfSize size
randomGens
= map snd $ drop 13 $ iterate (R.next . snd) (0,g)
g = QCR.mkQCGen 754657854089 -- no magic, just bashed at the keyboard at random
class ArbitraryWithParam t param
where
arbitraryWithParam :: param -> Gen t
instance
(ArbitraryWithParam t param,
Show t,
Testable prop)
=>
Testable (param, t -> prop)
where
property (param, fn) =
forAll (arbitraryWithParam param) fn
| michalkonecny/aern | aern-order/src/Numeric/AERN/Misc/QuickCheck.hs | bsd-3-clause | 2,881 | 0 | 13 | 779 | 681 | 361 | 320 | 54 | 1 |
module System.Build.Access.Bottom where
class Bottom r where
bottom ::
Maybe String
-> r
-> r
getBottom ::
r
-> Maybe String
| tonymorris/lastik | System/Build/Access/Bottom.hs | bsd-3-clause | 156 | 0 | 8 | 52 | 45 | 24 | 21 | 9 | 0 |
-- | Grover is a simple example program that shows how to write a
-- parser for commands with multiple modes. You build such parsers
-- using "Multiarg.Mode". It provides an example for the
-- documentation, and it also provides fodder for the QuickCheck
-- tests. You will want to look at the source code.
--
-- Grover has three modes: @int@, @string@, and @maybe@. Each of
-- these modes has three options: @-z@ or @--zero@, which takes no
-- arguments; @-s@ or @--single@, which takes one argument; @-d@ or
-- @--double@, which takes two arguments; and @-t@ or @--triple@,
-- which takes three arguments. The type of the argument depends on
-- the mode. For @int@, the argument or arguments must be an integer;
-- for @string@ the arguments can be any string; and for @maybe@ the
-- arguments must be a Maybe Int, such as @Nothing@ or @Just 5@.
--
-- Each mode also accepts any number of positional arguments, which
-- can be any string.
--
-- Grover handles simple errors right inside the parser by using the
-- @Either@ type as a return value.
module Multiarg.Examples.Grover where
import Control.Applicative
import Multiarg.Mode
import Text.Read (readMaybe)
-- | Grover's global options.
data Global
= Help
| Verbose Int
-- ^ The Int would indicate, for example, the desired level of
-- verbosity.
| Version
deriving (Eq, Ord, Show)
-- | Handles all options and positional arguments for any Grover mode.
data GroverOpt a
= Zero
| Single a
| Double a a
| Triple a a a
| PosArg String
deriving (Eq, Ord, Show)
instance Functor GroverOpt where
fmap f g = case g of
Zero -> Zero
Single a -> Single (f a)
Double a b -> Double (f a) (f b)
Triple a b c -> Triple (f a) (f b) (f c)
PosArg a -> PosArg a
-- | All of Grover's global options. The 'OptSpec' is parameterized
-- on an 'Either' to allow for error handling. If the user enters a
-- non-integer argument for the @--verbose@ option, a @Left@ with an
-- error message is returned.
globalOptSpecs :: [OptSpec (Either String Global)]
globalOptSpecs =
[ optSpec "h" ["help"] . ZeroArg . return $ Help
, optSpec "v" ["verbose"] . OneArg $ \s ->
Verbose <$> readErr s
, optSpec "" ["version"] . ZeroArg . return $ Version
]
-- | A list of 'OptSpec' that works for any 'Mode'.
modeOptSpecs :: Read a => [OptSpec (Either String (GroverOpt a))]
modeOptSpecs =
[ optSpec "z" ["zero"] . ZeroArg . Right $ Zero
, optSpec "s" ["single"] . OneArg $ \s -> Single <$> readErr s
, optSpec "d" ["double"] . TwoArg $ \s1 s2 ->
Double <$> readErr s1 <*> readErr s2
, optSpec "t" ["triple"] . ThreeArg $ \s1 s2 s3 ->
Triple <$> readErr s1 <*> readErr s2 <*> readErr s3
]
-- | Holds the results of parsing Grover's modes.
data Result
= Ints [Either String (GroverOpt Int)]
| Strings [Either String (GroverOpt String)]
| Maybes [Either String (GroverOpt (Maybe Int))]
deriving (Eq, Ord, Show)
-- | All Grover modes.
modes :: [Mode Result]
modes =
[ mode "int" modeOptSpecs (return . PosArg) Ints
, mode "string" modeOptSpecs (return . PosArg) Strings
, mode "maybe" modeOptSpecs (return . PosArg) Maybes
]
-- | Reads a value. If it cannot be read, returns an error message.
readErr :: Read a => String -> Either String a
readErr s = case readMaybe s of
Nothing -> Left $ "could not read value: " ++ s
Just a -> Right a
-- | Parses all of Grover's options and modes.
parseGrover
:: [String]
-- ^ Command line arguments, presumably from 'getArgs'
-> Either (String, [String])
(ModeResult (Either String Global) Result)
-- ^ Returns a 'Left' if there are errors, or a 'Right' if there are
-- no errors. (In an actual application, further processing of a
-- 'Right' would be necessary to determine whether all entered
-- arguments were valid.)
parseGrover = parseModeLine globalOptSpecs modes
| massysett/multiarg | lib/Multiarg/Examples/Grover.hs | bsd-3-clause | 3,871 | 0 | 12 | 820 | 818 | 441 | 377 | 56 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Generics.EMGM.Data.Maybe
-- Copyright : (c) 2008, 2009 Universiteit Utrecht
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Summary: Generic representation and instances for 'Maybe'.
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverlappingInstances #-}
{-# OPTIONS -fno-warn-orphans #-}
{- OPTIONS -ddump-splices -}
module Generics.EMGM.Data.Maybe (
MaybeS,
conNothing,
conJust,
repMaybe,
frepMaybe,
frep2Maybe,
frep3Maybe,
bifrep2Maybe,
) where
import Control.Applicative (Alternative, pure)
import Generics.EMGM.Base
import Generics.EMGM.Functions.Collect
import Generics.EMGM.Functions.Everywhere
import Generics.EMGM.Functions.Meta
-----------------------------------------------------------------------------
-- Embedding-projection pair
-----------------------------------------------------------------------------
-- Structure representation type for 'Maybe'.
type MaybeS a = Unit :+: a
epMaybe :: EP (Maybe a) (MaybeS a)
epMaybe = EP fromMaybe toMaybe
where
fromMaybe Nothing = L Unit
fromMaybe (Just a) = R a
toMaybe (L Unit) = Nothing
toMaybe (R a) = Just a
instance HasEP (Maybe a) (MaybeS a) where
epOf _ = epMaybe
-----------------------------------------------------------------------------
-- Representation values
-----------------------------------------------------------------------------
-- | Constructor description for 'Nothing'.
conNothing :: ConDescr
conNothing = ConDescr "Nothing" 0 False Prefix
-- | Constructor description for 'Just'.
conJust :: ConDescr
conJust = ConDescr "Just" 1 False Prefix
-- | Representation of 'Maybe' for 'frep'.
frepMaybe :: (Generic g) => g a -> g (Maybe a)
frepMaybe ra =
rtype
epMaybe
(rcon conNothing runit `rsum` rcon conJust ra)
-- | Representation of 'Maybe' for 'rep'.
repMaybe :: (Generic g, Rep g a) => g (Maybe a)
repMaybe =
rtype
epMaybe
(rcon conNothing rep `rsum` rcon conJust rep)
-- | Representation of 'Maybe' for 'frep2'.
frep2Maybe :: (Generic2 g) => g a b -> g (Maybe a) (Maybe b)
frep2Maybe ra =
rtype2
epMaybe epMaybe
(rcon2 conNothing runit2 `rsum2` rcon2 conJust ra)
-- | Representation of 'Maybe' for 'bifrep2'.
bifrep2Maybe :: (Generic2 g) => g a b -> g (Maybe a) (Maybe b)
bifrep2Maybe =
frep2Maybe
-- | Representation of 'Maybe' for 'frep3'.
frep3Maybe :: (Generic3 g) => g a b c -> g (Maybe a) (Maybe b) (Maybe c)
frep3Maybe ra =
rtype3
epMaybe epMaybe epMaybe
(rcon3 conNothing runit3 `rsum3` rcon3 conJust ra)
-----------------------------------------------------------------------------
-- Instance declarations
-----------------------------------------------------------------------------
instance (Generic g, Rep g a) => Rep g (Maybe a) where
rep = repMaybe
instance (Generic g) => FRep g Maybe where
frep = frepMaybe
instance (Generic2 g) => FRep2 g Maybe where
frep2 = frep2Maybe
instance (Generic3 g) => FRep3 g Maybe where
frep3 = frep3Maybe
instance (Alternative f) => Rep (Collect f (Maybe a)) (Maybe a) where
rep = Collect pure
instance (Rep (Everywhere (Maybe a)) a) => Rep (Everywhere (Maybe a)) (Maybe a) where
rep = Everywhere app
where
app f x =
case x of
Nothing -> f Nothing
Just v1 -> f (Just (selEverywhere rep f v1))
instance Rep (Everywhere' (Maybe a)) (Maybe a) where
rep = Everywhere' ($)
| spl/emgm | src/Generics/EMGM/Data/Maybe.hs | bsd-3-clause | 3,842 | 0 | 15 | 705 | 919 | 499 | 420 | 76 | 3 |
module Sigym4.Geometry.QuadTree (
QuadTree
, Box
, Quadrant (..)
, QtError (..)
, Node (..)
, Level (Level)
, generate
, grow
, growToInclude
, empty
, lookupByPoint
, traceRay
, traceRay2
, qtExtent
, qtLevel
, qtMinBox
) where
import Sigym4.Geometry.QuadTree.Internal.Types
import Sigym4.Geometry.QuadTree.Internal.Algorithms
| meteogrid/sigym4-geometry | src/Sigym4/Geometry/QuadTree.hs | bsd-3-clause | 366 | 0 | 5 | 82 | 89 | 62 | 27 | 21 | 0 |
----------------------------------------------------------------------------
-- |
-- Module : Text.RawString.QQ.Text
-- Copyright : (c) Sergey Vinokurov 2016
-- License : BSD3-style (see LICENSE)
-- Maintainer : [email protected]
-- Created : Friday, 23 September 2016
----------------------------------------------------------------------------
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module Text.RawString.QQ.Text (text, textLazy) where
import qualified Data.List as L
import Data.Text as T
import Data.Text.Lazy as TL
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
text :: QuasiQuoter
text = mkQQ "text" $ \str -> [e| T.pack $(liftString $ stripPrefixes str) |]
textLazy :: QuasiQuoter
textLazy = mkQQ "textLazy" $ \str -> [e| TL.pack $(liftString $ stripPrefixes str) |]
mkQQ :: String -> (String -> Q Exp) -> QuasiQuoter
mkQQ qqName mkExp = QuasiQuoter
{ quoteExp = mkExp
, quotePat = \_ -> fail $ "Cannot use " ++ qqName ++ " in patterns"
, quoteType = \_ -> fail $ "Cannot use " ++ qqName ++ " in type"
, quoteDec = \_ -> fail $ "Cannot use " ++ qqName ++ " in declarations"
}
stripPrefixes :: String -> String
stripPrefixes = L.unlines . L.map (L.drop 1 . L.dropWhile (/= '|')) . L.lines
| sergv/tags-server | tests/Text/RawString/QQ/Text.hs | bsd-3-clause | 1,285 | 0 | 11 | 228 | 277 | 168 | 109 | 20 | 1 |
--------------------------------------------------------------------------------
-- Copyright © 2011 National Institute of Aerospace / Galois, Inc.
--------------------------------------------------------------------------------
-- | Let expressions.
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE ExistentialQuantification #-}
module Copilot.Core.Locals
( Loc (..)
, locals
) where
import Copilot.Core
import Data.DList (DList, empty, singleton, append, concat, toList)
import Data.List (nubBy)
import Prelude hiding (concat, foldr)
--------------------------------------------------------------------------------
data Loc = forall a . Loc
{ localName :: Name
, localType :: Type a }
instance Show Loc where
show Loc { localName = name } = name
--------------------------------------------------------------------------------
locals :: Spec -> [Loc]
locals
Spec
{ specStreams = streams
, specTriggers = triggers
, specObservers = observers
} = nubBy eqLoc . toList $
concat (fmap locsStream streams) `append`
concat (fmap locsTrigger triggers) `append`
concat (fmap locsObserver observers)
where
eqLoc :: Loc -> Loc -> Bool
eqLoc Loc { localName = name1 } Loc { localName = name2 } =
name1 == name2
--------------------------------------------------------------------------------
locsStream :: Stream -> DList Loc
locsStream Stream { streamExpr = e } = locsExpr e
--------------------------------------------------------------------------------
locsTrigger :: Trigger -> DList Loc
locsTrigger Trigger { triggerGuard = e, triggerArgs = args } =
locsExpr e `append` concat (fmap locsUExpr args)
where
locsUExpr :: UExpr -> DList Loc
locsUExpr (UExpr _ e1) = locsExpr e1
--------------------------------------------------------------------------------
locsObserver :: Observer -> DList Loc
locsObserver Observer { observerExpr = e } = locsExpr e
--------------------------------------------------------------------------------
locsExpr :: Expr a -> DList Loc
locsExpr e0 = case e0 of
Const _ _ -> empty
Drop _ _ _ -> empty
Local t _ name e1 e2 -> singleton (Loc name t)
`append` locsExpr e1
`append` locsExpr e2
Var _ _ -> empty
ExternVar _ _ _ -> empty
ExternFun _ _ _ _ _ -> empty
ExternArray _ _ _ _ _ _ _ -> empty
Op1 _ e -> locsExpr e
Op2 _ e1 e2 -> locsExpr e1 `append` locsExpr e2
Op3 _ e1 e2 e3 -> locsExpr e1 `append` locsExpr e2
`append` locsExpr e3
--------------------------------------------------------------------------------
| leepike/copilot-core | src/Copilot/Core/Locals.hs | bsd-3-clause | 2,797 | 0 | 12 | 686 | 668 | 355 | 313 | 51 | 10 |
module ETests.Pretty.Type
( testPrettyTypeSignature
, testPrettyType
, prettyTypeSignatureSpecs
, prettyTypeSpecs
) where
import Control.Applicative
import Text.Parsec
import Text.PrettyPrint.HughesPJ
import Language.TheExperiment.Parser
import Language.TheExperiment.Parser.Type
import Language.TheExperiment.Pretty.Type
import ETests.Utils
import Test.Hspec
import Test.Hspec.HUnit()
testPrettyTypeSignature :: IO Specs
testPrettyTypeSignature = hspec prettyTypeSignatureSpecs
prettyTypeSignatureSpecs :: Specs
prettyTypeSignatureSpecs = describe "prettyTypeSignature" $
prettyTypeTestCases prettyFrom ++
[ it "pretty prints a type variable with constraints" $
"a : Int32 | UInt32 => a" `prettyFrom` "a : Int32 | UInt32 => a"
, it "pretty prints a function type with constraints" $
"a : Float | Double => a, a -> a" `prettyFrom`
"a : Float | Double => a, a -> a"
, it "pretty prints a function type with multiple constraints" $
"a : Float | Double, b : Int32 | UInt32 => a, a -> b" `prettyFrom`
"a : Float | Double, b : Int32 | UInt32 => a, a -> b"
]
where
prettyFrom expected input =
case runEParser "tests" input (aTypeSignature <* eof) of
Right result -> eTestAssertEqual "prettyTypeSignature"
expected
(render $ prettyTypeSignature result)
Left e -> error $ "fix your stupid test: " ++ show e
testPrettyType :: IO Specs
testPrettyType = hspec prettyTypeSpecs
prettyTypeTestCases :: (String -> String -> IO ()) -> [Specs]
prettyTypeTestCases prettyFrom =
[ it "pretty prints a type variable" $
"a" `prettyFrom` "a"
, it "pretty prints a type name" $
"Foo" `prettyFrom` "Foo"
, it "pretty prints a type function" $
"a, X, c -> c" `prettyFrom` "a, X, c -> c"
, it "pretty prints a function with no parameters" $
"-> Foo" `prettyFrom` "-> Foo"
, it "strips off unnecessary parens" $
"-> Foo" `prettyFrom` "(-> Foo)"
, it "doesn't strip off necessary parens" $
"a, (-> Int32) -> b" `prettyFrom` "a, (-> Int32) -> b"
, it "doesn't strip off necessary parens in function type" $
"a, Foo (f a) Int32 -> b" `prettyFrom` "a, Foo (f a) Int32 -> b"
, it "doesn't strip off necessary parens in function type" $
"a, Foo (a -> b) Int32 -> b" `prettyFrom` "a, Foo (a -> b) Int32 -> b"
, it "doesn't strip off necessary parens in type call" $
"Foo (Boo a) Int32" `prettyFrom` "Foo (Boo a) Int32"
, it "works in weird cases" $
"a -> (-> b)" `prettyFrom` "a -> (-> b)"
]
prettyTypeSpecs :: Specs
prettyTypeSpecs = describe "prettyType" $ prettyTypeTestCases prettyFrom
where
prettyFrom expected input =
case runEParser "tests" input (aType <* eof) of
Right result -> eTestAssertEqual "prettyType"
expected
(render $ prettyType result)
Left e -> error $ "fix your stupid test: " ++ show e
| jvranish/TheExperiment | test/ETests/Pretty/Type.hs | bsd-3-clause | 3,010 | 0 | 13 | 764 | 548 | 296 | 252 | 65 | 2 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
[lq| type Range Lo Hi = {v:Int | Lo <= v && v < Hi} |]
[lq| bow :: Range 0 100 |]
bow :: Int
bow = 12
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/tyExpr.hs | bsd-3-clause | 158 | 0 | 4 | 41 | 29 | 19 | 10 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Prelude
import CompareForm
import Web.Scotty.Trans
import qualified Arch
import Common
import Control.Monad.Trans.Reader
import System.Environment (getArgs)
import System.Directory (getCurrentDirectory)
import Routes
getBaseUrlArg :: IO String
getBaseUrlArg = do
args <- getArgs
case args of
(arg:[]) -> return arg
_ -> error "Base url arg not passed"
main :: IO ()
main = do
baseURl <- getBaseUrlArg
getCurrentDirectory >>= print
statisticsStore <- Arch.getPackagesStats "packageStatistics.json"
case statisticsStore of
Right y -> do
let readState = ArchCompareReadState baseURl y
scottyT 3000 (\x -> runReaderT x readState) $ do
get (literal "/comparePackage") $ comparePackageHandler
get "/comparePackage" $ comparePackageFormHandler
get "" $ comparePackageFormHandler
get (comparePackageRouteMatcher "/comparePackage") $ comparePackageHandler
Left e -> error $ "Unable to find data store: " ++ e
| chrissound/ArchPackageCompareStats | src/Main.hs | bsd-3-clause | 1,062 | 0 | 18 | 225 | 273 | 136 | 137 | 31 | 2 |
markup = <div>
<h1>Songs</h1>
<table>
<% do ss <- getViewDataValue_u "songs" :: View [String]
mapM genRow ss %>
</table>
</div>
genRow s = <tr>
<td>
<% show s %>
</td>
</tr>
| alsonkemp/turbinado-website | App/Views/Test/Index.hs | bsd-3-clause | 301 | 22 | 17 | 158 | 118 | 58 | 60 | -1 | -1 |
module BowlingKata.Day3Spec (spec) where
import Test.Hspec
import BowlingKata.Day3 (score)
spec :: Spec
spec = do
it "is a gutter game"
((score . replicate 20 $ 0) == 0)
it "rolls all ones"
((score . replicate 20 $ 1) == 20)
it "rolls one spare"
((score $ 5:5:3:(replicate 17 $ 0)) == 16)
it "rolls one strike"
((score $ 10:4:3:(replicate 16 $ 0)) == 24)
it "is a perfect game"
((score . replicate 12 $ 10) == 300)
| Alex-Diez/haskell-tdd-kata | old-katas/test/BowlingKata/Day3Spec.hs | bsd-3-clause | 537 | 0 | 14 | 199 | 211 | 108 | 103 | 15 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Diagrams.Backend.OpenSCad where
import Graphics.OpenSCAD as O
import Diagrams.Core.Transform
import Diagrams.Core.Types
import Diagrams.Prelude as D
import Diagrams.ThreeD
import Control.Lens
import Data.Tree
import Data.Typeable
#if __GLASGOW_HASKELL__ < 710
import Data.Foldable (foldMap)
#endif
import qualified Text.PrettyPrint.HughesPJ as PP
data OpenSCad = OpenSCad
deriving (Eq,Ord,Read,Show,Typeable)
type instance V OpenSCad = V3
type instance N OpenSCad = Double
instance Monoid (Render OpenSCad V3 Double) where
mempty = Osc mempty
(Osc i1) `mappend` (Osc i2) = Osc (i1 <> i2)
instance Backend OpenSCad V3 Double where
data Render OpenSCad V3 Double = Osc Model3d
type Result OpenSCad V3 Double = String
data Options OpenSCad V3 Double = OscOptions
renderRTree _ _ rt = O.render . go $ rt where
go :: RTree OpenSCad V3 Double a -> Model3d
go (Node (RPrim p) _) = unOsc $ D.render OpenSCad p
go (Node (RStyle s) ts) = setColor s $ foldMap go ts
go (Node _ ts) = foldMap go ts
unOsc :: Render OpenSCad V3 Double -> Model3d
unOsc (Osc is) = is
model3d :: (Renderable t OpenSCad, V t ~ V3, N t ~ Double) => t -> Model3d
model3d = unOsc . D.render OpenSCad
instance Renderable (Ellipsoid Double) OpenSCad where
render _ (Ellipsoid t) = Osc . multMatrix (asMatrix t) $ O.sphere 1 (fs 0.1)
instance Renderable (Box Double) OpenSCad where
render _ (Box t) = Osc . multMatrix (asMatrix t) $ box 1 1 1
instance Renderable (Frustum Double) OpenSCad where
render _ (Frustum r0 r1 t) = Osc . multMatrix (asMatrix t) $ obCylinder r0 1 r1 (fs 0.1)
instance Renderable (CSG Double) OpenSCad where
render _ (CsgEllipsoid p) = D.render OpenSCad p
render _ (CsgBox p) = D.render OpenSCad p
render _ (CsgFrustum p) = D.render OpenSCad p
render _ (CsgUnion csgs) = Osc . O.union . map model3d $ csgs
render _ (CsgIntersection csgs) = Osc . O.intersection . map model3d $ csgs
render _ (CsgDifference a b) = Osc $ O.difference (model3d a) (model3d b)
-- null instances so that the same Diagram can be rendered in image and geometry backends
instance Renderable (Camera l Double) OpenSCad where
render _ _ = mempty
instance Renderable (ParallelLight Double) OpenSCad where
render _ _ = mempty
instance Renderable (PointLight Double) OpenSCad where
render _ _ = mempty
asMatrix :: T3 Double -> TransMatrix
asMatrix tr = ((c1^._x, c2^._x, c3^._x, t^._x),
(c1^._y, c2^._y, c3^._y, t^._y),
(c1^._z, c2^._z, c3^._z, t^._z),
(0,0,0,1)
) where
([c1, c2, c3], t) = onBasis tr
setColor :: Style V3 Double -> Model3d -> Model3d
setColor sty m = case sty ^. _sc of
Nothing -> m
Just c -> O.color c m
| bergey/diagrams-openscad | src/Diagrams/Backend/OpenSCad.hs | bsd-3-clause | 3,185 | 0 | 12 | 830 | 1,136 | 591 | 545 | 67 | 2 |
-- |
{-# LANGUAGE TemplateHaskell,
FlexibleContexts, FlexibleInstances, TypeSynonymInstances
#-}
module Insomnia.ToF.Env (
Insomnia.Common.FreshName.withFreshName
, Insomnia.Common.FreshName.withFreshNames
, Env(..)
, tyConEnv
, sigEnv
, modEnv
, toplevelEnv
, tyVarEnv
, valConEnv
, valEnv
, TermVarProvenance(..)
, emptyToFEnv
, ToFM
, ToF
, runToFM
, followUserPathAnything
, Control.Monad.Except.throwError
) where
import Control.Lens
import Control.Monad.Reader
import Control.Monad.Except (MonadError(..), ExceptT, runExceptT)
import qualified Data.List as List
import qualified Data.Map as M
import Data.Monoid
import qualified Unbound.Generics.LocallyNameless as U
import Unbound.Generics.LocallyNameless (LFresh)
import Insomnia.Identifier
import Insomnia.Types
import Insomnia.Expr (Var)
import Insomnia.ValueConstructor (ValConName)
import Insomnia.Common.FreshName
import qualified FOmega.Syntax as F
import qualified FOmega.SemanticSig as F
-- | when we translate insomnia term variables, we keep track of
-- whether they came from a local binding or from a previous
-- definition in the current module.
data TermVarProvenance = LocalTermVar
| StructureTermVar !F.SemanticSig
deriving (Show)
data Env = Env { _tyConEnv :: M.Map TyConName F.SemanticSig
, _sigEnv :: M.Map SigIdentifier F.AbstractSig
, _modEnv :: M.Map Identifier (F.SemanticSig, F.Var)
, _toplevelEnv :: M.Map TopRef (F.SemanticSig, F.Var)
, _tyVarEnv :: M.Map TyVar (F.TyVar, F.Kind)
, _valConEnv :: M.Map ValConName (F.Var, F.Field)
, _valEnv :: M.Map Var (F.Var, TermVarProvenance)
}
$(makeLenses ''Env)
emptyToFEnv :: Env
emptyToFEnv = Env initialTyConEnv mempty mempty mempty mempty mempty mempty
initialTyConEnv :: M.Map TyConName F.SemanticSig
initialTyConEnv = M.fromList [(U.s2n "->",
F.TypeSem arrowLam ([F.KType, F.KType] `F.kArrs` F.KType))
, (U.s2n "Dist", F.TypeSem distLam (F.KType `F.KArr` F.KType))
, (U.s2n "Real", F.TypeSem (F.TV $ U.s2n "Real") F.KType)
, (U.s2n "Int", F.TypeSem (F.TV $ U.s2n "Int") F.KType)
]
where
arrowLam = F.TLam $ U.bind (alpha, U.embed F.KType) $
F.TLam $ U.bind (beta, U.embed F.KType) $
F.TArr (F.TV alpha) (F.TV beta)
distLam = F.TLam $ U.bind (alpha, U.embed F.KType) $ F.TDist (F.TV alpha)
alpha = U.s2n "α"
beta = U.s2n "β"
class (Functor m, LFresh m, MonadReader Env m, MonadError String m, MonadPlus m) => ToF m
type ToFM = ExceptT String (ReaderT Env U.LFreshM)
instance ToF ToFM
runToFM :: ToFM a -> a
runToFM m =
case U.runLFreshM (runReaderT (runExceptT m) emptyToFEnv) of
Left s -> error $ "unexpected failure in ToF.runToFM: “" ++ s ++ "”"
Right a -> a
followUserPathAnything :: (MonadError String m, MonadReader Env m) =>
(Identifier -> m (F.SemanticSig, F.Term))
-> Path -> m (F.SemanticSig, F.Term)
followUserPathAnything rootLookup (IdP ident) = rootLookup ident
followUserPathAnything rootLookup (ProjP path f) = do
(mod1, m) <- followUserPathAnything rootLookup path
case mod1 of
(F.ModSem flds) -> do
let p (F.FUser f', _) | f == f' = True
p _ = False
case List.find p flds of
Just (_, mod2) -> return (mod2, F.Proj m (F.FUser f))
Nothing -> throwError ("unexpected failure in followUserPathAnything: field "
++ show f ++ " not found in " ++ show path)
_ -> throwError "unexpected failure in followUserPathAnything: not a module record"
followUserPathAnything _rootLookup (TopRefP topref) = do
m <- view (toplevelEnv . at topref)
case m of
Just (sig, x) -> return (sig, F.V x)
Nothing -> throwError ("unexpected failure in followUserPathAnything: toplevel "
++ show topref ++ " not in environment")
| lambdageek/insomnia | src/Insomnia/ToF/Env.hs | bsd-3-clause | 4,136 | 0 | 20 | 1,086 | 1,265 | 682 | 583 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Syncthing.Types.UsageReport
( UsageReport(..)
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (MonadPlus (mzero))
import Data.Aeson (FromJSON, Value (..), parseJSON, (.:))
import Data.Text (Text)
-- | Information about the data sent in the anonymous usage report.
data UsageReport = UsageReport {
getFolderMaxFiles :: Integer
, getFolderMaxMiB :: Integer
, getLongVersionR :: Text
, getMemorySize :: Integer
, getMemoryUsageMiB :: Integer
, getNumDevices :: Int
, getNumFolders :: Int
, getPlatform :: Text
, getSHA256Perf :: Double
, getTotFiles :: Integer
, getTotMiB :: Integer
, getUniqueId :: Text
, getVersionR :: Text
} deriving (Eq, Show)
instance FromJSON UsageReport where
parseJSON (Object v) =
UsageReport <$> (v .: "folderMaxFiles")
<*> (v .: "folderMaxMiB")
<*> (v .: "longVersion")
<*> (v .: "memorySize")
<*> (v .: "memoryUsageMiB")
<*> (v .: "numDevices")
<*> (v .: "numFolders")
<*> (v .: "platform")
<*> (v .: "sha256Perf")
<*> (v .: "totFiles")
<*> (v .: "totMiB")
<*> (v .: "uniqueID")
<*> (v .: "version")
parseJSON _ = mzero
| jetho/syncthing-hs | Network/Syncthing/Types/UsageReport.hs | bsd-3-clause | 1,681 | 0 | 20 | 725 | 356 | 214 | 142 | 38 | 0 |
{-|
Module : Pipes.KeyValueCsv
Copyright : (c) Marcin Mrotek, 2015
License : BSD3
Maintainer : [email protected]
Stability : experimental
Parse CSV files with key-value headers.
-}
{-# LANGUAGE
DataKinds
, ExplicitForAll
, PolyKinds
, TypeOperators
#-}
module Pipes.KeyValueCsv
( breakLines
, parseKeyValueCsv
, module Pipes.KeyValueCsv.Csv
, module Pipes.KeyValueCsv.KeyValue
, module Pipes.KeyValueCsv.Types
, Record (..)
-- * Re-exports
, Validation(..)
) where
import Prelude hiding (lines)
import Pipes.KeyValueCsv.Internal
import Pipes.KeyValueCsv.Internal.Types
import Pipes.KeyValueCsv.Common
import Pipes.KeyValueCsv.Csv
import Pipes.KeyValueCsv.KeyValue
import Pipes.KeyValueCsv.Types
import Control.Lens
import Data.Validation
import Data.Vinyl
import Data.Vinyl.Functor
import Data.Vinyl.Utils.Proxy
breakLines
:: Monad m
=> (Text -> Bool)
-> Lines m r
-> Lines m (Lines m r)
{-^
Break a stream of lines into two parts, on a line that satisfies the given predicate.
All input lines up to the breaking one will be fully read, and each (not including the breaking one) will be re-'yield'ed.
-}
breakLines p (Lines l) = Lines $ Lines <$> breakLines' p l
parseKeyValueCsv
:: forall (m :: * -> *) (f :: k -> *) (g :: j -> *) (hs :: [k]) (rs :: [j]) (r :: *)
. ( Monad m
, Record hs
)
=> Options m f g hs rs
-> Producer Text m r
-> m
( Rec (WithKeyValueError :. f) hs
, Producer (Rec (WithCsvError :. g) rs) m r
)
-- ^Read a CSV file preceded by key-value pairs.
parseKeyValueCsv options producer = useDelimiter (options^.delimiter) $ do
(hdr, remaining)
<- parseKeyValues (options^.kvOptions)
. breakLines (options^.predicate)
$ lines producer
pure (hdr, parseCsv (options^.csvOptions) remaining)
| marcinmrotek/pipes-key-value-csv | src/Pipes/KeyValueCsv.hs | bsd-3-clause | 1,832 | 0 | 15 | 373 | 447 | 257 | 190 | 46 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Buildsome.BuildMaps
( TargetRep(..), computeTargetRep
, TargetDesc(..), descOfTarget
, DirectoryBuildMap(..)
, BuildMaps(..)
, make
, TargetKind(..)
, find
, findDirectory
) where
import qualified Buildsome.Print as Print
import Control.Monad
import qualified Data.ByteString.Char8 as BS8
import Data.List (nub)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (mapMaybe)
import Data.Monoid ((<>))
import Lib.FilePath (FilePath, takeDirectory)
import Lib.Makefile (Makefile(..), TargetType(..), Target, Pattern)
import qualified Lib.Makefile as Makefile
import qualified Lib.StringPattern as StringPattern
import Control.DeepSeq (NFData (..))
import Control.DeepSeq.Generics (genericRnf)
import GHC.Generics
import Prelude.Compat hiding (FilePath)
-- | Unique identifier of the target.
newtype TargetRep = TargetRep { targetRepPath :: FilePath } -- We use the minimum output path as the
-- target key/representative. It's ok to
-- do this because each target outputs
-- can't overlap
deriving (Eq, Ord, Show, Generic)
instance NFData TargetRep where
rnf = genericRnf
computeTargetRep :: Target -> TargetRep
computeTargetRep = TargetRep . minimum . targetOutputs
data TargetDesc = TargetDesc
{ tdRep :: TargetRep
, tdTarget :: Target
} deriving (Show, Generic)
instance NFData TargetDesc where
rnf = genericRnf
descOfTarget :: Target -> TargetDesc
descOfTarget target = TargetDesc (computeTargetRep target) target
data DirectoryBuildMap = DirectoryBuildMap
{ dbmTargets :: [TargetDesc]
, dbmPatterns :: [Pattern]
} deriving (Show, Generic)
instance NFData DirectoryBuildMap where
rnf = genericRnf
instance Monoid DirectoryBuildMap where
mempty = DirectoryBuildMap mempty mempty
mappend (DirectoryBuildMap x0 x1) (DirectoryBuildMap y0 y1) =
DirectoryBuildMap (mappend x0 y0) (mappend x1 y1)
data BuildMaps = BuildMaps
{ _bmBuildMap :: Map FilePath TargetDesc -- output paths -> min(representative) path and original spec
, _bmChildrenMap :: Map FilePath DirectoryBuildMap
} deriving Generic
instance NFData BuildMaps where
rnf = genericRnf
data TargetKind = TargetPattern | TargetSimple
deriving (Eq)
find :: BuildMaps -> FilePath -> Maybe (TargetKind, TargetDesc)
find (BuildMaps buildMap childrenMap) path =
-- Allow specific/simple matches to override pattern matches
((,) TargetSimple <$> simpleMatch) `mplus`
((,) TargetPattern <$> patternMatch)
where
simpleMatch = path `M.lookup` buildMap
patterns = dbmPatterns $ M.findWithDefault mempty (takeDirectory path) childrenMap
instantiate pattern = (,) pattern <$> Makefile.instantiatePatternByOutput path pattern
patternMatch =
case mapMaybe instantiate patterns of
[] -> Nothing
[(_, target)] -> Just $ descOfTarget target
targets ->
error $ BS8.unpack $ mconcat
[ "Multiple matching patterns for: ", BS8.pack (show path), "\n"
, BS8.unlines $
map (showPattern . fst) targets
]
showPattern pattern =
Print.posText (targetPos pattern) <> showPatternOutputs pattern
showPatternOutputs pattern =
BS8.unwords $
map (StringPattern.toString . Makefile.filePatternFile) $
targetOutputs pattern
findDirectory :: BuildMaps -> FilePath -> DirectoryBuildMap
findDirectory (BuildMaps _ childrenMap) path =
M.findWithDefault mempty path childrenMap
make :: Makefile -> BuildMaps
make makefile = BuildMaps buildMap childrenMap
where
outputs =
[ (outputPath, descOfTarget target)
| target <- makefileTargets makefile
, outputPath <- targetOutputs target
]
childrenMap =
M.fromListWith mappend $
[ (takeDirectory outputPath, mempty { dbmTargets = [targetDesc] })
| (outputPath, targetDesc) <- outputs
] ++
[ (outPatDir, mempty { dbmPatterns = [targetPattern] })
| targetPattern <- makefilePatterns makefile
, outPatDir <- nub (map Makefile.filePatternDirectory (targetOutputs targetPattern))
]
overlappingOutputs path (TargetDesc _ a) (TargetDesc _ b) =
error $ "Overlapping output paths for: " ++ show path ++ " at:\n" ++
show (targetPos a) ++ "vs.\n" ++ show (targetPos b)
buildMap =
M.fromListWithKey overlappingOutputs
[ (outputPath, targetDesc)
| (outputPath, targetDesc) <- outputs ]
| da-x/buildsome-tst | app/Buildsome/BuildMaps.hs | bsd-3-clause | 4,788 | 0 | 16 | 1,158 | 1,184 | 664 | 520 | 106 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Main where
import IsInstance
import Data.Proxy
writeTryCxt ''Show
f x = ifHasInstance (Proxy :: Proxy Show) "NO SHOW" show x
main = do
putStrLn (f ((+1), "y"))
putStrLn (f ('x', "y"))
| aavogt/IsInstance | test/test2.hs | bsd-3-clause | 438 | 0 | 11 | 71 | 98 | 55 | 43 | 15 | 1 |
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.PackageIndex
-- Copyright : (c) David Himmelstrup 2005,
-- Bjorn Bringert 2007,
-- Duncan Coutts 2008-2009
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- An index of packages whose primary key is 'UnitId'. Public libraries
-- are additionally indexed by 'PackageName' and 'Version'.
-- Technically, these are an index of *units* (so we should eventually
-- rename it to 'UnitIndex'); but in the absence of internal libraries
-- or Backpack each unit is equivalent to a package.
--
-- 'PackageIndex' is parametric over what it actually records, and it
-- is used in two ways:
--
-- * The 'InstalledPackageIndex' (defined here) contains a graph of
-- 'InstalledPackageInfo's representing the packages in a
-- package database stack. It is used in a variety of ways:
--
-- * The primary use to let Cabal access the same installed
-- package database which is used by GHC during compilation.
-- For example, this data structure is used by 'ghc-pkg'
-- and 'Cabal' to do consistency checks on the database
-- (are the references closed).
--
-- * Given a set of dependencies, we can compute the transitive
-- closure of dependencies. This is to check if the versions
-- of packages are consistent, and also needed by multiple
-- tools (Haddock must be explicitly told about the every
-- transitive package to do cross-package linking;
-- preprocessors must know about the include paths of all
-- transitive dependencies.)
--
-- * The 'PlanIndex' (defined in 'Distribution.Client.InstallPlan'),
-- contains a graph of 'GenericPlanPackage'. Ignoring its type
-- parameters for a moment, a 'PlanIndex' is an extension of the
-- 'InstalledPackageIndex' to also record nodes for packages
-- which are *planned* to be installed, but not actually
-- installed yet. A 'PlanIndex' containing only 'PreExisting'
-- packages is essentially a 'PackageIndex'.
--
-- 'PlanIndex'es actually require some auxiliary information, so
-- most users interact with a 'GenericInstallPlan'. This type is
-- specialized as an 'ElaboratedInstallPlan' (for @cabal
-- new-build@) or an 'InstallPlan' (for @cabal install@).
--
-- This 'PackageIndex' is NOT to be confused with
-- 'Distribution.Client.PackageIndex', which indexes packages only by
-- 'PackageName' (this makes it suitable for indexing source packages,
-- for which we don't know 'UnitId's.)
--
module Distribution.Simple.PackageIndex (
-- * Package index data type
InstalledPackageIndex,
PackageIndex,
-- * Creating an index
fromList,
-- * Updates
merge,
insert,
deleteUnitId,
deleteSourcePackageId,
deletePackageName,
-- deleteDependency,
-- * Queries
-- ** Precise lookups
lookupUnitId,
lookupComponentId,
lookupSourcePackageId,
lookupPackageId,
lookupPackageName,
lookupDependency,
-- ** Case-insensitive searches
searchByName,
SearchResult(..),
searchByNameSubstring,
-- ** Bulk queries
allPackages,
allPackagesByName,
allPackagesBySourcePackageId,
-- ** Special queries
brokenPackages,
dependencyClosure,
reverseDependencyClosure,
topologicalOrder,
reverseTopologicalOrder,
dependencyInconsistencies,
dependencyCycles,
dependencyGraph,
moduleNameIndex,
-- * Backwards compatibility
deleteInstalledPackageId,
lookupInstalledPackageId,
) where
import Prelude ()
import Distribution.Compat.Prelude hiding (lookup)
import Distribution.Package
import Distribution.ModuleName
import qualified Distribution.InstalledPackageInfo as IPI
import Distribution.Version
import Distribution.Simple.Utils
import Control.Exception (assert)
import Data.Array ((!))
import qualified Data.Array as Array
import qualified Data.Graph as Graph
import Data.List as List ( groupBy, deleteBy, deleteFirstsBy )
import qualified Data.Map as Map
import qualified Data.Tree as Tree
-- | The collection of information about packages from one or more 'PackageDB's.
-- These packages generally should have an instance of 'PackageInstalled'
--
-- Packages are uniquely identified in by their 'UnitId', they can
-- also be efficiently looked up by package name or by name and version.
--
data PackageIndex a = PackageIndex {
-- The primary index. Each InstalledPackageInfo record is uniquely identified
-- by its UnitId.
--
unitIdIndex :: !(Map UnitId a),
-- This auxiliary index maps package names (case-sensitively) to all the
-- versions and instances of that package. This allows us to find all
-- versions satisfying a dependency.
--
-- It is a three-level index. The first level is the package name,
-- the second is the package version and the final level is instances
-- of the same package version. These are unique by UnitId
-- and are kept in preference order.
--
-- FIXME: Clarify what "preference order" means. Check that this invariant is
-- preserved. See #1463 for discussion.
packageIdIndex :: !(Map PackageName (Map Version [a]))
} deriving (Eq, Generic, Show, Read)
instance Binary a => Binary (PackageIndex a)
-- | The default package index which contains 'InstalledPackageInfo'. Normally
-- use this.
type InstalledPackageIndex = PackageIndex IPI.InstalledPackageInfo
instance HasUnitId a => Monoid (PackageIndex a) where
mempty = PackageIndex Map.empty Map.empty
mappend = (<>)
--save one mappend with empty in the common case:
mconcat [] = mempty
mconcat xs = foldr1 mappend xs
instance HasUnitId a => Semigroup (PackageIndex a) where
(<>) = merge
invariant :: HasUnitId a => PackageIndex a -> Bool
invariant (PackageIndex pids pnames) =
map installedUnitId (Map.elems pids)
== sort
[ assert pinstOk (installedUnitId pinst)
| (pname, pvers) <- Map.toList pnames
, let pversOk = not (Map.null pvers)
, (pver, pinsts) <- assert pversOk $ Map.toList pvers
, let pinsts' = sortBy (comparing installedUnitId) pinsts
pinstsOk = all (\g -> length g == 1)
(groupBy (equating installedUnitId) pinsts')
, pinst <- assert pinstsOk $ pinsts'
, let pinstOk = packageName pinst == pname
&& packageVersion pinst == pver
]
--
-- * Internal helpers
--
mkPackageIndex :: HasUnitId a
=> Map UnitId a
-> Map PackageName (Map Version [a])
-> PackageIndex a
mkPackageIndex pids pnames = assert (invariant index) index
where index = PackageIndex pids pnames
--
-- * Construction
--
-- | Build an index out of a bunch of packages.
--
-- If there are duplicates by 'UnitId' then later ones mask earlier
-- ones.
--
fromList :: HasUnitId a => [a] -> PackageIndex a
fromList pkgs = mkPackageIndex pids pnames
where
pids = Map.fromList [ (installedUnitId pkg, pkg) | pkg <- pkgs ]
pnames =
Map.fromList
[ (packageName (head pkgsN), pvers)
| pkgsN <- groupBy (equating packageName)
. sortBy (comparing packageId)
$ pkgs
, let pvers =
Map.fromList
[ (packageVersion (head pkgsNV),
nubBy (equating installedUnitId) (reverse pkgsNV))
| pkgsNV <- groupBy (equating packageVersion) pkgsN
]
]
--
-- * Updates
--
-- | Merge two indexes.
--
-- Packages from the second mask packages from the first if they have the exact
-- same 'UnitId'.
--
-- For packages with the same source 'PackageId', packages from the second are
-- \"preferred\" over those from the first. Being preferred means they are top
-- result when we do a lookup by source 'PackageId'. This is the mechanism we
-- use to prefer user packages over global packages.
--
merge :: HasUnitId a => PackageIndex a -> PackageIndex a
-> PackageIndex a
merge (PackageIndex pids1 pnames1) (PackageIndex pids2 pnames2) =
mkPackageIndex (Map.unionWith (\_ y -> y) pids1 pids2)
(Map.unionWith (Map.unionWith mergeBuckets) pnames1 pnames2)
where
-- Packages in the second list mask those in the first, however preferred
-- packages go first in the list.
mergeBuckets xs ys = ys ++ (xs \\ ys)
(\\) = deleteFirstsBy (equating installedUnitId)
-- | Inserts a single package into the index.
--
-- This is equivalent to (but slightly quicker than) using 'mappend' or
-- 'merge' with a singleton index.
--
insert :: HasUnitId a => a -> PackageIndex a -> PackageIndex a
insert pkg (PackageIndex pids pnames) =
mkPackageIndex pids' pnames'
where
pids' = Map.insert (installedUnitId pkg) pkg pids
pnames' = insertPackageName pnames
insertPackageName =
Map.insertWith' (\_ -> insertPackageVersion)
(packageName pkg)
(Map.singleton (packageVersion pkg) [pkg])
insertPackageVersion =
Map.insertWith' (\_ -> insertPackageInstance)
(packageVersion pkg) [pkg]
insertPackageInstance pkgs =
pkg : deleteBy (equating installedUnitId) pkg pkgs
-- | Removes a single installed package from the index.
--
deleteUnitId :: HasUnitId a
=> UnitId -> PackageIndex a
-> PackageIndex a
deleteUnitId ipkgid original@(PackageIndex pids pnames) =
case Map.updateLookupWithKey (\_ _ -> Nothing) ipkgid pids of
(Nothing, _) -> original
(Just spkgid, pids') -> mkPackageIndex pids'
(deletePkgName spkgid pnames)
where
deletePkgName spkgid =
Map.update (deletePkgVersion spkgid) (packageName spkgid)
deletePkgVersion spkgid =
(\m -> if Map.null m then Nothing else Just m)
. Map.update deletePkgInstance (packageVersion spkgid)
deletePkgInstance =
(\xs -> if null xs then Nothing else Just xs)
. List.deleteBy (\_ pkg -> installedUnitId pkg == ipkgid) undefined
-- | Backwards compatibility wrapper for Cabal pre-1.24.
{-# DEPRECATED deleteInstalledPackageId "Use deleteUnitId instead" #-}
deleteInstalledPackageId :: HasUnitId a
=> UnitId -> PackageIndex a
-> PackageIndex a
deleteInstalledPackageId = deleteUnitId
-- | Removes all packages with this source 'PackageId' from the index.
--
deleteSourcePackageId :: HasUnitId a => PackageId -> PackageIndex a
-> PackageIndex a
deleteSourcePackageId pkgid original@(PackageIndex pids pnames) =
case Map.lookup (packageName pkgid) pnames of
Nothing -> original
Just pvers -> case Map.lookup (packageVersion pkgid) pvers of
Nothing -> original
Just pkgs -> mkPackageIndex
(foldl' (flip (Map.delete . installedUnitId)) pids pkgs)
(deletePkgName pnames)
where
deletePkgName =
Map.update deletePkgVersion (packageName pkgid)
deletePkgVersion =
(\m -> if Map.null m then Nothing else Just m)
. Map.delete (packageVersion pkgid)
-- | Removes all packages with this (case-sensitive) name from the index.
--
deletePackageName :: HasUnitId a => PackageName -> PackageIndex a
-> PackageIndex a
deletePackageName name original@(PackageIndex pids pnames) =
case Map.lookup name pnames of
Nothing -> original
Just pvers -> mkPackageIndex
(foldl' (flip (Map.delete . installedUnitId)) pids
(concat (Map.elems pvers)))
(Map.delete name pnames)
{-
-- | Removes all packages satisfying this dependency from the index.
--
deleteDependency :: Dependency -> PackageIndex -> PackageIndex
deleteDependency (Dependency name verstionRange) =
delete' name (\pkg -> packageVersion pkg `withinRange` verstionRange)
-}
--
-- * Bulk queries
--
-- | Get all the packages from the index.
--
allPackages :: PackageIndex a -> [a]
allPackages = Map.elems . unitIdIndex
-- | Get all the packages from the index.
--
-- They are grouped by package name (case-sensitively).
--
allPackagesByName :: PackageIndex a -> [(PackageName, [a])]
allPackagesByName index =
[ (pkgname, concat (Map.elems pvers))
| (pkgname, pvers) <- Map.toList (packageIdIndex index) ]
-- | Get all the packages from the index.
--
-- They are grouped by source package id (package name and version).
--
allPackagesBySourcePackageId :: HasUnitId a => PackageIndex a
-> [(PackageId, [a])]
allPackagesBySourcePackageId index =
[ (packageId ipkg, ipkgs)
| pvers <- Map.elems (packageIdIndex index)
, ipkgs@(ipkg:_) <- Map.elems pvers ]
--
-- * Lookups
--
-- | Does a lookup by unit identifier.
--
-- Since multiple package DBs mask each other by 'UnitId',
-- then we get back at most one package.
--
lookupUnitId :: PackageIndex a -> UnitId
-> Maybe a
lookupUnitId index uid = Map.lookup uid (unitIdIndex index)
-- | Does a lookup by component identifier. In the absence
-- of Backpack, this is just a 'lookupUnitId'.
--
lookupComponentId :: PackageIndex a -> ComponentId
-> Maybe a
lookupComponentId index uid = Map.lookup (SimpleUnitId uid) (unitIdIndex index)
-- | Backwards compatibility for Cabal pre-1.24.
{-# DEPRECATED lookupInstalledPackageId "Use lookupUnitId instead" #-}
lookupInstalledPackageId :: PackageIndex a -> UnitId
-> Maybe a
lookupInstalledPackageId = lookupUnitId
-- | Does a lookup by source package id (name & version).
--
-- There can be multiple installed packages with the same source 'PackageId'
-- but different 'UnitId'. They are returned in order of
-- preference, with the most preferred first.
--
lookupSourcePackageId :: PackageIndex a -> PackageId -> [a]
lookupSourcePackageId index pkgid =
case Map.lookup (packageName pkgid) (packageIdIndex index) of
Nothing -> []
Just pvers -> case Map.lookup (packageVersion pkgid) pvers of
Nothing -> []
Just pkgs -> pkgs -- in preference order
-- | Convenient alias of 'lookupSourcePackageId', but assuming only
-- one package per package ID.
lookupPackageId :: PackageIndex a -> PackageId -> Maybe a
lookupPackageId index pkgid = case lookupSourcePackageId index pkgid of
[] -> Nothing
[pkg] -> Just pkg
_ -> error "Distribution.Simple.PackageIndex: multiple matches found"
-- | Does a lookup by source package name.
--
lookupPackageName :: PackageIndex a -> PackageName
-> [(Version, [a])]
lookupPackageName index name =
case Map.lookup name (packageIdIndex index) of
Nothing -> []
Just pvers -> Map.toList pvers
-- | Does a lookup by source package name and a range of versions.
--
-- We get back any number of versions of the specified package name, all
-- satisfying the version range constraint.
--
lookupDependency :: PackageIndex a -> Dependency
-> [(Version, [a])]
lookupDependency index (Dependency name versionRange) =
case Map.lookup name (packageIdIndex index) of
Nothing -> []
Just pvers -> [ entry
| entry@(ver, _) <- Map.toList pvers
, ver `withinRange` versionRange ]
--
-- * Case insensitive name lookups
--
-- | Does a case-insensitive search by package name.
--
-- If there is only one package that compares case-insensitively to this name
-- then the search is unambiguous and we get back all versions of that package.
-- If several match case-insensitively but one matches exactly then it is also
-- unambiguous.
--
-- If however several match case-insensitively and none match exactly then we
-- have an ambiguous result, and we get back all the versions of all the
-- packages. The list of ambiguous results is split by exact package name. So
-- it is a non-empty list of non-empty lists.
--
searchByName :: PackageIndex a -> String -> SearchResult [a]
searchByName index name =
case [ pkgs | pkgs@(PackageName name',_) <- Map.toList (packageIdIndex index)
, lowercase name' == lname ] of
[] -> None
[(_,pvers)] -> Unambiguous (concat (Map.elems pvers))
pkgss -> case find ((PackageName name==) . fst) pkgss of
Just (_,pvers) -> Unambiguous (concat (Map.elems pvers))
Nothing -> Ambiguous (map (concat . Map.elems . snd) pkgss)
where lname = lowercase name
data SearchResult a = None | Unambiguous a | Ambiguous [a]
-- | Does a case-insensitive substring search by package name.
--
-- That is, all packages that contain the given string in their name.
--
searchByNameSubstring :: PackageIndex a -> String -> [a]
searchByNameSubstring index searchterm =
[ pkg
| (PackageName name, pvers) <- Map.toList (packageIdIndex index)
, lsearchterm `isInfixOf` lowercase name
, pkgs <- Map.elems pvers
, pkg <- pkgs ]
where lsearchterm = lowercase searchterm
--
-- * Special queries
--
-- None of the stuff below depends on the internal representation of the index.
--
-- | Find if there are any cycles in the dependency graph. If there are no
-- cycles the result is @[]@.
--
-- This actually computes the strongly connected components. So it gives us a
-- list of groups of packages where within each group they all depend on each
-- other, directly or indirectly.
--
dependencyCycles :: PackageInstalled a => PackageIndex a -> [[a]]
dependencyCycles index =
[ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ]
where
adjacencyList = [ (pkg, installedUnitId pkg, installedDepends pkg)
| pkg <- allPackages index ]
-- | All packages that have immediate dependencies that are not in the index.
--
-- Returns such packages along with the dependencies that they're missing.
--
brokenPackages :: PackageInstalled a => PackageIndex a
-> [(a, [UnitId])]
brokenPackages index =
[ (pkg, missing)
| pkg <- allPackages index
, let missing = [ pkg' | pkg' <- installedDepends pkg
, isNothing (lookupUnitId index pkg') ]
, not (null missing) ]
-- | Tries to take the transitive closure of the package dependencies.
--
-- If the transitive closure is complete then it returns that subset of the
-- index. Otherwise it returns the broken packages as in 'brokenPackages'.
--
-- * Note that if the result is @Right []@ it is because at least one of
-- the original given 'PackageId's do not occur in the index.
--
dependencyClosure :: PackageInstalled a => PackageIndex a
-> [UnitId]
-> Either (PackageIndex a)
[(a, [UnitId])]
dependencyClosure index pkgids0 = case closure mempty [] pkgids0 of
(completed, []) -> Left completed
(completed, _) -> Right (brokenPackages completed)
where
closure completed failed [] = (completed, failed)
closure completed failed (pkgid:pkgids) = case lookupUnitId index pkgid of
Nothing -> closure completed (pkgid:failed) pkgids
Just pkg -> case lookupUnitId completed (installedUnitId pkg) of
Just _ -> closure completed failed pkgids
Nothing -> closure completed' failed pkgids'
where completed' = insert pkg completed
pkgids' = installedDepends pkg ++ pkgids
-- | Takes the transitive closure of the packages reverse dependencies.
--
-- * The given 'PackageId's must be in the index.
--
reverseDependencyClosure :: PackageInstalled a => PackageIndex a
-> [UnitId]
-> [a]
reverseDependencyClosure index =
map vertexToPkg
. concatMap Tree.flatten
. Graph.dfs reverseDepGraph
. map (fromMaybe noSuchPkgId . pkgIdToVertex)
where
(depGraph, vertexToPkg, pkgIdToVertex) = dependencyGraph index
reverseDepGraph = Graph.transposeG depGraph
noSuchPkgId = error "reverseDependencyClosure: package is not in the graph"
topologicalOrder :: PackageInstalled a => PackageIndex a -> [a]
topologicalOrder index = map toPkgId
. Graph.topSort
$ graph
where (graph, toPkgId, _) = dependencyGraph index
reverseTopologicalOrder :: PackageInstalled a => PackageIndex a -> [a]
reverseTopologicalOrder index = map toPkgId
. Graph.topSort
. Graph.transposeG
$ graph
where (graph, toPkgId, _) = dependencyGraph index
-- | Builds a graph of the package dependencies.
--
-- Dependencies on other packages that are not in the index are discarded.
-- You can check if there are any such dependencies with 'brokenPackages'.
--
dependencyGraph :: PackageInstalled a => PackageIndex a
-> (Graph.Graph,
Graph.Vertex -> a,
UnitId -> Maybe Graph.Vertex)
dependencyGraph index = (graph, vertex_to_pkg, id_to_vertex)
where
graph = Array.listArray bounds
[ [ v | Just v <- map id_to_vertex (installedDepends pkg) ]
| pkg <- pkgs ]
pkgs = sortBy (comparing packageId) (allPackages index)
vertices = zip (map installedUnitId pkgs) [0..]
vertex_map = Map.fromList vertices
id_to_vertex pid = Map.lookup pid vertex_map
vertex_to_pkg vertex = pkgTable ! vertex
pkgTable = Array.listArray bounds pkgs
topBound = length pkgs - 1
bounds = (0, topBound)
-- | Given a package index where we assume we want to use all the packages
-- (use 'dependencyClosure' if you need to get such a index subset) find out
-- if the dependencies within it use consistent versions of each package.
-- Return all cases where multiple packages depend on different versions of
-- some other package.
--
-- Each element in the result is a package name along with the packages that
-- depend on it and the versions they require. These are guaranteed to be
-- distinct.
--
dependencyInconsistencies :: PackageInstalled a => PackageIndex a
-> [(PackageName, [(PackageId, Version)])]
dependencyInconsistencies index =
[ (name, [ (pid,packageVersion dep) | (dep,pids) <- uses, pid <- pids])
| (name, ipid_map) <- Map.toList inverseIndex
, let uses = Map.elems ipid_map
, reallyIsInconsistent (map fst uses) ]
where -- for each PackageName,
-- for each package with that name,
-- the InstalledPackageInfo and the package Ids of packages
-- that depend on it.
inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b')))
[ (packageName dep,
Map.fromList [(ipid,(dep,[packageId pkg]))])
| pkg <- allPackages index
, ipid <- installedDepends pkg
, Just dep <- [lookupUnitId index ipid]
]
-- Added in 991e52a474e2b8280432257c1771dc474a320a30,
-- this is a special case to handle the base 3 compatibility
-- package which shipped with GHC 6.10 and GHC 6.12
-- (it was removed in GHC 7.0). Remove this when GHC 6.12
-- goes out of our support window.
reallyIsInconsistent :: PackageInstalled a => [a] -> Bool
reallyIsInconsistent [] = False
reallyIsInconsistent [_p] = False
reallyIsInconsistent [p1, p2] =
let pid1 = installedUnitId p1
pid2 = installedUnitId p2
in pid1 `notElem` installedDepends p2
&& pid2 `notElem` installedDepends p1
reallyIsInconsistent _ = True
-- | A rough approximation of GHC's module finder, takes a
-- 'InstalledPackageIndex' and turns it into a map from module names to their
-- source packages. It's used to initialize the @build-deps@ field in @cabal
-- init@.
moduleNameIndex :: InstalledPackageIndex -> Map ModuleName [IPI.InstalledPackageInfo]
moduleNameIndex index =
Map.fromListWith (++) $ do
pkg <- allPackages index
IPI.ExposedModule m reexport <- IPI.exposedModules pkg
case reexport of
Nothing -> return (m, [pkg])
Just (Module _ m') | m == m' -> []
| otherwise -> return (m', [pkg])
-- The heuristic is this: we want to prefer the original package
-- which originally exported a module. However, if a reexport
-- also *renamed* the module (m /= m'), then we have to use the
-- downstream package, since the upstream package has the wrong
-- module name!
| sopvop/cabal | Cabal/Distribution/Simple/PackageIndex.hs | bsd-3-clause | 24,614 | 0 | 19 | 6,000 | 4,586 | 2,488 | 2,098 | 328 | 5 |
module Tests where
import AST
import Render
-- t0 = Config { cTypes = [ MTApp (MTyCon "PING") [PVar (V "x")]
-- , MTApp (MTyCon "PONG") [PVar (V "x")]
-- ]
-- , cSets = []
-- , cProcs = [(PConc 0,
-- [ Choose (V "pi") (S "ps")
-- [Send (PVar (V "pi")) [(MTApp (MTyCon "PING") [PConc 0],
-- [Recv [(MTApp (MTyCon "PONG") [PVar (V "x")], [])] ()])] ()] ()]),
-- (PAbs (V "p") (S "ps"),
-- [Loop (V "X")
-- [Recv [(MTApp (MTyCon "PING") [PVar (V "x")],
-- [Send (PVar (V "x")) [(MTApp (MTyCon "PONG") [PVar (V "x")], [Goto (V "X") ()])] ()])] ()] ()])]
-- }
-- ---
mPing, mPong :: Pid -> MType
mPing v = MTApp (MTyCon "Ping") [v]
mPong v = MTApp (MTyCon "Pong") [v]
tpid0, tpid1, tpid2 :: Pid
tpid0 = PConc 0
tpid1 = PConc 1
tpid2 = PAbs (V "p") (S "ps")
pvar :: String -> Pid
pvar x = PVar (V x)
test0 :: Config ()
test0 = Config {
cTypes = [mPing (pvar "x"),
mPong (pvar "x"),
MTApp (MTyCon "Unit") []]
, cSets = []
, cUnfold = []
, cProcs = [
(tpid0, Send tpid1 [(mPing tpid0, Recv [(mPong (pvar "x"), Skip ())] ())] ())
,(tpid1, Recv [(mPing (pvar "x"), Send (pvar "x") [(mPong tpid1, Skip ())] ())] ())
]
} :: Config ()
-- test0a = Config {
-- cTypes = [mPing (pvar "x"), mPong (pvar "x"), MTApp (MTyCon "Unit") []]
-- , cSets = []
-- , cProcs = [(tpid0,
-- [Loop (V "X")
-- [Send tpid1 [(mPing tpid0, [Recv [(mPong (pvar "x"), [Goto (V "X") ()])] ()])] ()] ()])
-- ,(tpid1,
-- [Loop (V "Y")
-- [Recv [(mPing (pvar "x"), [Send (pvar "x") [(mPong tpid1, [Goto (V "Y") ()])] ()])] ()] ()])]
-- }
test1 :: Config ()
test1= Config {
cTypes = [mPing (pvar "x"), mPong (pvar "x"), MTApp (MTyCon "Unit") []]
, cSets = []
, cUnfold = []
, cProcs = [(tpid0,
Iter (V "pi") (S "ps")
(Block [Send (PVar (V "pi")) [(mPing tpid0, Recv [(mPong (pvar "x"), Skip ())] ())] ()] ()) ())
,(tpid2, Recv [(mPing (pvar "x"), Send (pvar "x") [(mPong tpid2, Skip ())] ())] ())]
}
test1unfold :: Config ()
test1unfold = Config {
cTypes = [mPing (pvar "x"), mPong (pvar "x")]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [(tpid0,
Iter (V "pi") (S "ps")
(Send (PVar (V "pi")) [(mPing tpid0, Recv [(mPong (pvar "x"), Skip ())] ())] ()) ())
,(tpid2, Recv [(mPing (pvar "x"), Send (pvar "x") [(mPong tpid2, Skip ())] ())] ())]
}
-- ---
pingMany = Config {
cTypes = [mPing (pvar "x"), mPong (pvar "x")]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [( tpid0
, Block [ Iter (V "pi") (S "ps")
(Send (pvar "pi") [(mPing tpid0, Skip ())] ()) ()
, Iter (V "pii") (S "ps")
(Recv [(mPong (pvar "x"), Skip ())] ()) ()] ())
,( tpid2
, Recv [(mPing (pvar "x"), Send (pvar "x") [(mPong (pvar "p"), Skip ())] ())] ())
]
}
-- ---
pingManyBad = Config {
cTypes = [mPing (pvar "x"), mPong (pvar "x")]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [( tpid0
, Block [ Iter (V "pi") (S "ps")
(Send (pvar "pi") [(mPing tpid0, Skip ())] ()) ()
, Iter (V "pii") (S "ps")
(Recv [(mPong (pvar "x"), Skip ())] ()) ()] ())
,( tpid2
, Recv [(mPing (pvar "x"), Skip ())] ())
]
}
-- ---
mInt :: MType
mInt = MTApp (MTyCon "Int") []
masterSlave = Config {
cTypes = [mInt]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [(tpid0, Iter (V "pi") (S "ps")
(Recv [(mInt, Skip ())] ()) ())
,(tpid2, Send tpid0 [(mInt, Skip ())] ())]
}
-- ---
mPid :: Pid -> MType
mPid x = MTApp (MTyCon "Pid") [x]
mTT :: MType
mTT = MTApp (MTyCon "tt") []
mOK :: MType
mOK = MTApp (MTyCon "OK") []
proc_0, proc_1, proc_2 :: Stmt ()
proc_0 = Block [ Iter (V "pi") (S "ps")
(Recv [(mPid (pvar "x"), Send (pvar "x") [(mInt, Skip ())] ())] ()) ()
, Loop (LV "end_X")
(Recv [(mPid (pvar "y"), Send (pvar "y") [(mTT, Goto (LV "end_X") ())] ())] ()) ()
]
()
proc_2 = Loop (LV "Y")
(Send tpid0 [(mPid tpid2, Recv [(mInt, Send tpid1 [(mInt, Goto (LV "Y") ())] ())
,(mTT, Skip ())] ())] ()) ()
proc_1 = Iter (V "pi") (S "ps")
(Recv [(mInt, Skip ())] ()) ()
workStealing :: Config ()
workStealing = Config {
cTypes = [mInt, mPid (pvar "x"), mTT]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [(tpid0, proc_0)
,(tpid1, proc_1)
,(tpid2, proc_2)]
}
-- ----
ssend p m r = Send p [(m, r)] ()
srecv m r = Recv [(m, r)] ()
sloop x y = Loop x y ()
siter x y z = Iter x y z ()
schoice x y z = Choose x y z ()
workPushing = Config {
cTypes = [mInt]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [ (tpid0, Block [ siter (V "p") (S "ps")
(schoice (V "x") (S "ps") (ssend (pvar "x") mInt (Skip ())))
, siter (V "pp") (S "ps")
(srecv mInt (Skip ()))
] ())
, (tpid2, sloop (LV "end_X") (srecv mInt (ssend tpid0 mInt (Goto (LV "end_X") ()))))
]
}
--
mutex :: Config ()
mutex = Config
{
cTypes = [mPid (pvar "x"), mTT, mOK]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [mutexMaster, mutexPs]
}
where
mutexMaster = (tpid0, mutexMasterS)
mutexPs = (tpid2, mutexPS)
-- Master Process
mutexMasterS :: Stmt ()
mutexMasterS = Loop (LV "end_loop")
(Recv [(mPid (pvar "x"),
Send (pvar "x") [(mTT,
Recv [(mOK, Goto (LV "end_loop") ())] ())] ())] ()) ()
-- Slave Processes
mutexPS :: Stmt ()
mutexPS = Send tpid0 [(mPid tpid2, Recv [(mTT, Send tpid0 [(mOK, Skip ())] ())] ())] ()
mRelease :: Pid -> MType
mRelease v = MTApp (MTyCon "Rel") [v]
mutexBad :: Config ()
mutexBad = Config
{
cTypes = [mPid (pvar "x"), mTT, mRelease (pvar "x")]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [mutexMaster, mutexPs]
}
where
mutexMaster = (tpid0, mutexMasterS)
mutexPs = (tpid2, mutexPS)
-- Master Process
mutexMasterS :: Stmt ()
mutexMasterS = Loop (LV "X")
(Recv [(mPid (pvar "x"),
Send (pvar "x") [(mTT, Skip ())] ())] ()) ()
-- Slave Processes
mutexPS :: Stmt ()
mutexPS = Send tpid0 [(mPid tpid2, Recv [(mTT, Skip ())] ())] ()
---
choiceBad :: Config ()
choiceBad
= Config { cTypes = [mTT, mOK]
, cUnfold = []
, cSets = []
, cProcs = [ (tpid0, choiceMaster)
, (tpid1, choiceSlave)
]
}
where
choiceMaster = Send tpid1 [ (mTT, Send tpid1 [ (mTT, Skip ())
, (mOK, Skip ())
] ())
, (mOK, Send tpid1 [ (mTT, Skip ())
, (mOK, Skip ())
] ())
] ()
choiceSlave = Recv [ (mTT, Recv [ (mTT, Skip ()) ] ())
, (mOK, Recv [ (mOK, Skip ()) ] ())
] ()
---
database :: Config ()
database = Config {
cTypes = [ makeType0 "Set"
, makeType0 "Value"
, makeType1 "Get"
]
, cSets = []
, cUnfold = [Conc (S "ps") 1]
, cProcs = [ (db, dbProc)
, (ps, psProc)
, (me, meProc)
]
}
where
ps = PAbs (V "p") (S "ps")
db = PConc 0
me = PConc 1
makeType0 t = MTApp (MTyCon t) []
makeType1 t = MTApp (MTyCon t) [PVar (V "x")]
val1 t v = MTApp (MTyCon t) [v]
psProc = Loop (LV "endX")
(Recv [ (makeType0 "Set", Goto (LV "endX") ())
, (makeType1 "Get", Send (PVar (V "x")) [(makeType0 "Value", Goto (LV "endX") ())] ())
] ())
()
dbProc = Loop (LV "endX")
(Recv [ (makeType0 "Set", Choose (V "y") (S "ps") (Send (PVar (V "y")) [(makeType0 "Set", Goto (LV "endX") ())] ()) ())
, (makeType1 "Get", Choose (V "y") (S "ps") (Send (PVar (V "y")) [(makeType1 "Get", Goto (LV "endX") ())] ()) ())
] ())
()
meProc = Loop (LV "endX")
(Send db [ (makeType0 "Set", Goto (LV "endX") ())
, (val1 "Get" me, Recv [ (makeType0 "Value", Goto (LV "endX") ()) ] ())
] ())
()
| abakst/symmetry | checker/src/tests.hs | mit | 10,128 | 0 | 21 | 4,563 | 3,919 | 2,122 | 1,797 | 191 | 1 |
{-# LANGUAGE CPP #-}
{- |
Module : ./GUI/GraphMenu.hs
Description : Menu creation functions for the Graphdisplay
Copyright : (c) Thiemo Wiedemeyer, Uni Bremen 2002-2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (imports Logic)
Menu creation
-}
module GUI.GraphMenu (createGraph) where
import qualified GUI.GraphAbstraction as GA
import GUI.GraphTypes
import GUI.GraphLogic
import GUI.Utils
import GUI.UDGUtils
#ifdef GTKGLADE
import GUI.GtkDisprove
import GUI.GtkConsistencyChecker
import GUI.GtkAutomaticProofs
import GUI.GtkAddSentence
#endif
import Data.IORef
import qualified Data.Map as Map
import System.Directory (getCurrentDirectory)
import System.FilePath
import Static.DevGraph
import Static.DgUtils
import Static.PrintDevGraph ()
import Static.ComputeTheory (computeTheory)
import Static.ConsInclusions
import qualified Proofs.VSE as VSE
import Common.DocUtils
import Driver.Options (HetcatsOpts, rmSuffix, prfSuffix)
import Driver.ReadFn (libNameToFile)
import Interfaces.DataTypes
import Interfaces.Command
import Interfaces.CmdAction
import GUI.ShowRefTree
-- | Adds to the DGNodeType list style options for each type
nodeTypes :: HetcatsOpts
-> [( DGNodeType -- Nodetype
, Shape GA.NodeValue -- Shape
, String -- Color
)]
nodeTypes opts = map
((\ (n, s) -> if isProvenNode n then -- Add color
if isProvenCons n
then (n, s, getColor opts Green True False)
else (n, s, getColor opts Yellow False True)
else (n, s, getColor opts Coral False $ isProvenCons n))
. \ n -> (n, if isRefType n then Box else Ellipse) -- Add shape
) listDGNodeTypes
-- | A Map of all nodetypes and their properties.
mapNodeTypes :: HetcatsOpts -> Map.Map DGNodeType (Shape GA.NodeValue, String)
mapNodeTypes = Map.fromList . map (\ (n, s, c) -> (n, (s, c))) . nodeTypes
-- | Adds to the DGEdgeType list style options for each type
edgeTypes :: HetcatsOpts
-> [( DGEdgeType -- Edgetype
, EdgePattern GA.EdgeValue -- Lineformat
, String -- Color
, Bool -- has conservativity
)]
edgeTypes opts = map
( (\ (e, l, c) -> case edgeTypeModInc e of -- Add menu options
ThmType { thmEdgeType = GlobalOrLocalThm _ _ } -> (e, l, c, True)
GlobalDef -> (e, l, c, True)
HetDef -> (e, l, c, True)
LocalDef -> (e, l, c, True)
_ -> (e, l, c, False)
)
. (\ (e, l) -> case edgeTypeModInc e of -- Add colors
HidingDef -> (e, l, getColor opts Blue True $ not $ isInc e)
FreeOrCofreeDef _ -> (e, l, getColor opts Blue False $ not $ isInc e)
ThmType { thmEdgeType = thmType
, isProvenEdge = False } -> case thmType of
GlobalOrLocalThm { thmScope = Local, isHomThm = False }
-> (e, l, getColor opts Coral True $ not $ isInc e)
HidingThm -> (e, l, getColor opts Yellow False $ not $ isInc e)
_ -> (e, l, getColor opts Coral False $ not $ isInc e)
ThmType { thmEdgeType = thmType
, isConservativ = False } -> case thmType of
GlobalOrLocalThm { thmScope = Local, isHomThm = False }
-> (e, l, getColor opts Yellow True $ not $ isInc e)
_ -> (e, l, getColor opts Yellow False $ not $ isInc e)
ThmType { thmEdgeType = thmType
, isPending = True } -> case thmType of
GlobalOrLocalThm { thmScope = Local, isHomThm = False }
-> (e, l, getColor opts Yellow True $ not $ isInc e)
_ -> (e, l, getColor opts Yellow False $ not $ isInc e)
ThmType { thmEdgeType = thmType } -> case thmType of
GlobalOrLocalThm { thmScope = Local, isHomThm = False }
-> (e, l, getColor opts Green True $ not $ isInc e)
HidingThm -> (e, l, getColor opts Green True $ not $ isInc e)
_ -> (e, l, getColor opts Green False $ not $ isInc e)
_ -> (e, l, getColor opts Black False $ not $ isInc e)
)
. (\ e -> case edgeTypeModInc e of -- Add lineformat
ThmType { thmEdgeType = GlobalOrLocalThm { thmScope = Local
, isHomThm = True } }
-> (e, Dashed)
ThmType { thmEdgeType = GlobalOrLocalThm { isHomThm = False } }
-> (e, Double)
LocalDef -> (e, Dashed)
HetDef -> (e, Double)
_ -> (e, Solid)
)
) listDGEdgeTypes
-- | A Map of all edgetypes and their properties.
mapEdgeTypes
:: HetcatsOpts -> Map.Map DGEdgeType (EdgePattern GA.EdgeValue, String)
mapEdgeTypes =
Map.fromList . map (\ (e, l, c, _) -> (e, (l, c))) . edgeTypes
-- | Creates the graph. Runs makegraph
createGraph :: GInfo -> String -> ConvFunc -> LibFunc -> IO ()
createGraph gi title convGraph showLib = do
ost <- readIORef $ intState gi
case i_state ost of
Nothing -> return ()
Just _ -> do
let ln = libName gi
opts = options gi
file = libNameToFile ln ++ prfSuffix
deselectEdgeTypes <- newIORef []
globMenu <- createGlobalMenu gi showLib deselectEdgeTypes
GA.makeGraph (graphInfo gi)
title
(createOpen gi file convGraph showLib)
(createSave gi file)
(createSaveAs gi file)
(createClose gi)
(Just (exitGInfo gi))
globMenu
(createNodeTypes gi convGraph showLib)
(createEdgeTypes gi)
(getColor (hetcatsOpts gi) Purple False False)
$ runAndLock gi $ do
flags <- readIORef opts
writeIORef opts $ flags { flagHideNodes = False}
updateGraph gi []
-- | Returns the open-function
createOpen :: GInfo -> FilePath -> ConvFunc -> LibFunc -> Maybe (IO ())
createOpen gi file convGraph showLib = Just (
do
maybeFilePath <- fileOpenDialog file [ ("Proof", ["*.prf"])
, ("All Files", ["*"])] Nothing
case maybeFilePath of
Just fPath -> do
openProofStatus gi fPath convGraph showLib
return ()
Nothing -> fail "Could not open file."
)
-- | Returns the save-function
createSave :: GInfo -> FilePath -> Maybe (IO ())
createSave gi = Just . saveProofStatus gi
-- | Returns the saveas-function
createSaveAs :: GInfo -> FilePath -> Maybe (IO ())
createSaveAs gi file = Just (
do
maybeFilePath <- fileSaveDialog file [ ("Proof", ["*.prf"])
, ("All Files", ["*"])] Nothing
case maybeFilePath of
Just fPath -> saveProofStatus gi fPath
Nothing -> fail "Could not save file."
)
-- | Returns the close-function
createClose :: GInfo -> IO Bool
createClose gi = do
let oGrRef = openGraphs gi
updateWindowCount gi pred
oGraphs <- readIORef oGrRef
writeIORef oGrRef $ Map.delete (libName gi) oGraphs
return True
-- | Creates the global menu
createGlobalMenu :: GInfo -> LibFunc -> IORef [String]
-> IO [GlobalMenu]
createGlobalMenu gi showLib _ = do
ost <- readIORef $ intState gi
case i_state ost of
Nothing -> return []
Just _ -> do
let ln = libName gi
opts = hetcatsOpts gi
ral = runAndLock gi
performProofMenuAction cmd =
ral . proofMenu gi cmd
mkGlobProofButton cmd =
Button (menuTextGlobCmd cmd) . performProofMenuAction (GlobCmd cmd)
return
[GlobalMenu (Menu Nothing
[ Button "Undo" $ ral $ undo gi True
, Button "Redo" $ ral $ undo gi False
, Menu (Just "Hide/Show names/nodes/edges")
[ Button "Hide/Show internal node names"
$ ral $ toggleHideNames gi
, Button "Hide/Show unnamed nodes without open proofs"
$ ral $ toggleHideNodes gi
, Button "Hide/Show newly added proven edges"
$ ral $ toggleHideEdges gi
]
, Button "Focus node" $ ral $ focusNode gi
#ifdef GTKGLADE
, Button "Consistency checker"
(performProofMenuAction (GlobCmd CheckConsistencyCurrent)
$ showConsistencyChecker Nothing gi)
, Button "Automatic proofs"
(performProofMenuAction (CommentCmd "generated by \"automatic proofs\"")
$ showAutomaticProofs gi)
#endif
, Menu (Just "Proofs") $ map (\ (cmd, act) ->
mkGlobProofButton cmd $ return . return . act ln) globLibAct
++ map (\ (cmd, act) -> mkGlobProofButton cmd $ return . act ln)
globLibResultAct
++ [ Menu (Just "Flattening") $ map ( \ (cmd, act) ->
mkGlobProofButton cmd $ return . act) globResultAct ]
, Button "Dump Development Graph" $ do
ost2 <- readIORef $ intState gi
case i_state ost2 of
Nothing -> putStrLn "no lib"
Just ist2 -> print . pretty . lookupDGraph (i_ln ist2)
$ i_libEnv ist2
, Button "Dump Cons Inclusions" $ do
ost2 <- readIORef $ intState gi
case i_state ost2 of
Nothing -> putStrLn "no lib"
Just ist2 -> dumpConsInclusions (hetcatsOpts gi)
$ lookupDGraph (i_ln ist2) $ i_libEnv ist2
, Button "Show Library Graph" $ ral $ showLibGraph gi showLib
, Button "Show RefinementTree" $ ral $ showLibGraph gi showRefTree
, Button "Save Graph for uDrawGraph" $ ral
$ saveUDGraph gi (mapNodeTypes opts) $ mapEdgeTypes opts
, Button "Save proof-script" $ ral
$ askSaveProofScript (graphInfo gi) $ intState gi
])
]
-- | A list of all Node Types
createNodeTypes :: GInfo -> ConvFunc -> LibFunc
-> [(DGNodeType, DaVinciNodeTypeParms GA.NodeValue)]
createNodeTypes gi cGraph showLib = map
(\ (n, s, c) -> (n, if isRefType n
then createMenuNodeRef s c gi cGraph showLib $ isInternalSpec n
else createMenuNode s c gi $ isInternalSpec n)) $ nodeTypes $ hetcatsOpts gi
-- | the edge types (share strings to avoid typos)
createEdgeTypes :: GInfo -> [(DGEdgeType, DaVinciArcTypeParms GA.EdgeValue)]
createEdgeTypes gi =
map (\ (title, look, color, hasCons) ->
(title, look
$$$ Color color
$$$ (if hasCons then createEdgeMenuConsEdge gi
else createEdgeMenu gi)
$$$ (if hasCons then createMenuValueTitleShowConservativity
$$$ emptyArcTypeParms :: DaVinciArcTypeParms GA.EdgeValue
else emptyArcTypeParms :: DaVinciArcTypeParms GA.EdgeValue))
) $ edgeTypes $ hetcatsOpts gi
-- * methods to create the local menus of the different nodetypes
titleNormal :: ValueTitle (String, t)
titleNormal = ValueTitle $ return . fst
titleInternal :: GInfo -> ValueTitleSource (String, t)
titleInternal gi =
ValueTitleSource (\ (s, _) -> do
b <- newSimpleBroadcaster ""
let updaterIORef = internalNames gi
updater <- readIORef updaterIORef
let upd = (s, applySimpleUpdate b)
writeIORef updaterIORef $ upd : updater
return $ toSimpleSource b)
-- | local menu for the nodetypes spec and locallyEmpty_spec
createMenuNode :: Shape GA.NodeValue -> String -> GInfo -> Bool
-> DaVinciNodeTypeParms GA.NodeValue
createMenuNode shape color gi internal = shape
$$$ Color color
$$$ (if internal then Just $ titleInternal gi else Nothing)
$$$? (if internal then Nothing else Just titleNormal)
$$$? LocalMenu (Menu Nothing (map ($ gi)
[ createMenuButtonShowNodeInfo
, createMenuButtonShowTheory
, createMenuButtonTranslateTheory
, createMenuTaxonomy
, createMenuButtonShowProofStatusOfNode
, createMenuButtonProveAtNode
, createMenuButtonProveStructured
#ifdef GTKGLADE
, createMenuButtonDisproveAtNode
, createMenuButtonAddSentence
, createMenuButtonCCCAtNode
#endif
, createMenuButtonCheckCons
]))
$$$ emptyNodeTypeParms
-- | local menu for the nodetypes dg_ref and locallyEmpty_dg_ref
createMenuNodeRef :: Shape GA.NodeValue -> String -> GInfo -> ConvFunc
-> LibFunc -> Bool -> DaVinciNodeTypeParms GA.NodeValue
createMenuNodeRef shape color gi convGraph showLib internal = shape
$$$ Color color
$$$ (if internal then Just $ titleInternal gi else Nothing)
$$$? (if internal then Nothing else Just titleNormal)
$$$? LocalMenu (Menu Nothing
[ createMenuButtonShowNodeInfo gi
, createMenuButtonShowTheory gi
, createMenuButtonShowProofStatusOfNode gi
, createMenuButtonProveAtNode gi
#ifdef GTKGLADE
, createMenuButtonDisproveAtNode gi
#endif
, Button "Show referenced library"
(\ (_, n) -> showReferencedLibrary n gi convGraph showLib)
])
$$$ emptyNodeTypeParms
type ButtonMenu a = MenuPrim (Maybe String) (a -> IO ())
-- | menu button for local menus
createMenuButton :: String -> (Int -> DGraph -> IO ())
-> GInfo -> ButtonMenu GA.NodeValue
createMenuButton title menuFun gi = Button title
$ \ (_, descr) -> do
ost <- readIORef $ intState gi
case i_state ost of
Nothing -> return ()
Just ist -> do
let le = i_libEnv ist
dGraph = lookupDGraph (libName gi) le
menuFun descr dGraph
return ()
createMenuButtonShowTheory :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonShowTheory gi =
createMenuButton "Show theory" (getTheoryOfNode gi) gi
createMenuButtonTranslateTheory :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonTranslateTheory gi =
createMenuButton "Translate theory" (translateTheoryOfNode gi) gi
-- | create a sub menu for taxonomy visualisation
createMenuTaxonomy :: GInfo -> ButtonMenu GA.NodeValue
createMenuTaxonomy gi = let
passTh displayFun descr _ = do
ost <- readIORef $ intState gi
case i_state ost of
Nothing -> return ()
Just ist -> case computeTheory (i_libEnv ist) (libName gi) descr of
Just th -> displayFun (show descr) th
Nothing -> errorDialog "Error"
$ "no global theory for node " ++ show descr
in Menu (Just "Taxonomy graphs")
[ createMenuButton "Subsort graph" (passTh displaySubsortGraph) gi
, createMenuButton "Concept graph" (passTh displayConceptGraph) gi ]
createMenuButtonShowProofStatusOfNode :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonShowProofStatusOfNode gi =
createMenuButton "Show proof status" (showProofStatusOfNode gi) gi
createMenuButtonProveAtNode :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonProveAtNode gi =
createMenuButton "Prove" (proveAtNode gi) gi
createMenuButtonProveStructured :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonProveStructured gi =
createMenuButton "Prove VSE Structured" (\ descr _ ->
proofMenu gi (SelectCmd Prover $ "VSE structured: " ++ show descr)
$ VSE.prove (libName gi, descr)) gi
#ifdef GTKGLADE
createMenuButtonDisproveAtNode :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonDisproveAtNode gi =
createMenuButton "Disprove" (disproveAtNode gi) gi
createMenuButtonCCCAtNode :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonCCCAtNode gi =
createMenuButton "Check consistency" (consCheckNode gi) gi
consCheckNode :: GInfo -> Int -> DGraph -> IO ()
consCheckNode gi descr _ = proofMenu gi (GlobCmd CheckConsistencyCurrent)
$ showConsistencyChecker (Just descr) gi
createMenuButtonAddSentence :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonAddSentence gi =
createMenuButton "Add sentence" (gtkAddSentence gi) gi
#endif
createMenuButtonCheckCons :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonCheckCons gi =
createMenuButton "Check conservativity"
(checkconservativityOfNode gi) gi
createMenuButtonShowNodeInfo :: GInfo -> ButtonMenu GA.NodeValue
createMenuButtonShowNodeInfo =
createMenuButton "Show node info" showNodeInfo
-- * methods to create the local menus for the edges
createEdgeMenu :: GInfo -> LocalMenu GA.EdgeValue
createEdgeMenu = LocalMenu . createMenuButtonShowEdgeInfo
createEdgeMenuConsEdge :: GInfo -> LocalMenu GA.EdgeValue
createEdgeMenuConsEdge gi = LocalMenu $ Menu Nothing
[ createMenuButtonShowEdgeInfo gi
, createMenuButtonCheckconservativityOfEdge gi]
createMenuButtonShowEdgeInfo :: GInfo -> ButtonMenu GA.EdgeValue
createMenuButtonShowEdgeInfo _ = Button "Show info"
(\ (_, EdgeId descr, maybeLEdge) -> showEdgeInfo descr maybeLEdge)
createMenuButtonCheckconservativityOfEdge :: GInfo -> ButtonMenu GA.EdgeValue
createMenuButtonCheckconservativityOfEdge gi =
Button "Check conservativity"
(\ (_, EdgeId descr, maybeLEdge) ->
checkconservativityOfEdge descr gi maybeLEdge)
createMenuValueTitleShowConservativity :: ValueTitle GA.EdgeValue
createMenuValueTitleShowConservativity = ValueTitle
(\ (_, _, maybeLEdge) -> return $ case maybeLEdge of
Just (_, _, edgelab) -> showConsStatus $ getEdgeConsStatus edgelab
Nothing -> "")
-- Suggests a proof-script filename.
getProofScriptFileName :: String -> IO FilePath
getProofScriptFileName f = let fn = f <.> "hpf" in
if isAbsolute fn then return fn else
fmap (</> fn) getCurrentDirectory
-- | Displays a Save-As dialog and writes the proof-script.
askSaveProofScript :: GA.GraphInfo -> IORef IntState -> IO ()
askSaveProofScript gi ch = do
h <- readIORef ch
case undoList $ i_hist h of
[] -> infoDialog "Information" "The history is empty. No file written."
_ -> do
ff <- getProofScriptFileName $ rmSuffix $ filename h
maybeFilePath <- fileSaveDialog ff [ ("Proof Script", ["*.hpf"])
, ("All Files", ["*"])] Nothing
case maybeFilePath of
Just fPath -> do
GA.showTemporaryMessage gi "Saving proof script ..."
saveCommandHistory ch fPath
GA.showTemporaryMessage gi $ "Proof script saved to " ++ fPath ++ "!"
Nothing -> GA.showTemporaryMessage gi "Aborted!"
-- Saves the history of commands in a file.
saveCommandHistory :: IORef IntState -> String -> IO ()
saveCommandHistory c f = do
h <- readIORef c
let history = [ "# automatically generated hets proof-script", ""
, "use " ++ filename h, ""]
++ reverse (map (showCmd . command) $ undoList $ i_hist h)
writeFile f $ unlines history
| spechub/Hets | GUI/GraphMenu.hs | gpl-2.0 | 18,461 | 0 | 28 | 4,812 | 5,038 | 2,588 | 2,450 | 345 | 21 |
module HyLoRes.Core.SMP.Dispatcher(
DispatcherChans(..), runDispatcher,
DispatchAlgorithm (..), fromDispatchAlgString
)
where
import Prelude hiding ( log )
import Control.Monad.State
import Control.Monad.Reader
import Control.Concurrent.Chan
import Control.Applicative ( (<$>) )
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Set ( Set )
import qualified Data.Set as Set
import Data.Array ( Array, (!), bounds, rangeSize )
import Data.List (foldl', partition, sortBy)
import Data.Foldable ( foldMap )
import Data.Maybe ( Maybe (..), catMaybes )
import HyLoRes.Util ( compareUsing )
import HyLo.Signature ( getSignature )
import HyLoRes.Clause ( size )
import HyLoRes.Clause.BasicClause ( BasicClause, mergeAllDiamonds)
import HyLoRes.Clause.FullClause ( FullClause, ClauseId,
distFormula, makeFullClause,
specialize,
opaqueClause )
import HyLoRes.Formula.TypeLevel ( Spec(..) )
import HyLoRes.Clause.SelFunction ( SelFunc )
import HyLoRes.ClauseSet.InUse ( InUseClauseSet, add, newSet )
import qualified HyLoRes.ClauseSet.InUse as IU
import HyLoRes.Util.Timeout ( TimeoutSignal, isTimeout )
import HyLoRes.Formula ( NomSym, At, Nom, Opaque, label,
flatten, Diam, level, nomId)
import HyLoRes.Util.Classify ( classifyListByM )
import HyLoRes.Core.Worker.Base ( Result (..) )
import HyLoRes.Core.SMP.Base ( WorkerId,
ChDispatcherToMain,
ChWorkerToDispatcher, ChDispatcherToWorker,
MsgDispatcherToWorker(..),
MsgWorkerToDispatcher(..) )
import HyLoRes.Logger ( MonadLogger, LoggerT, runLoggerT, LoggerCfg,
log, LoggerEvent(..) )
import HyLoRes.ModelGen ( buildHerbrandModel )
type DispatchFunc = NomSym -> Dispatcher (WorkerId)
type NomWorkerMap = Map NomSym WorkerId
type WorkerLoadMap = Map WorkerId Int
data DispatchAlgorithm = HASH | RROBIN | LOAD
data DispatcherChans = DC{dtoMain :: ChDispatcherToMain,
toWorkers :: Array WorkerId ChDispatcherToWorker,
fromWorkers :: ChWorkerToDispatcher}
data DispatcherEnv = E{dispatcherChans :: DispatcherChans,
workerCount :: Int,
timeout_signal :: TimeoutSignal,
dispatchFunc :: DispatchFunc}
newtype Dispatcher a = D{unD :: ReaderT DispatcherEnv (
LoggerT (
StateT DispatcherState (
IO))) a}
deriving (Functor, Monad, MonadLogger, MonadIO)
instance MonadReader DispatcherEnv Dispatcher where
ask = D ask
local r (D m) = D (local r m)
data DispatcherState =
S{newClauses :: [BasicClause],
nextClId :: ClauseId,
workerMap :: NomWorkerMap,
workerLoadMap :: WorkerLoadMap,
nextWorker :: WorkerId,
msgs :: Map WorkerId (Set (FullClause (At Opaque)))}
instance MonadState DispatcherState Dispatcher where
get = D (lift . lift $ get)
put s = D (lift . lift $ put s)
runDispatcher :: DispatcherChans
-> LoggerCfg
-> SelFunc
-> [BasicClause]
-> TimeoutSignal
-> DispatchAlgorithm
-> IO ()
runDispatcher chans lc sf init_cls ts da
= (`evalStateT` initialState)
. (`runLoggerT` ("[D]: ", lc))
. (`runReaderT` env)
. unD
$ dispatcherLoop sf init_cls
--
where env = E{dispatcherChans = chans,
workerCount = rangeSize . bounds $ toWorkers chans,
timeout_signal = ts,
dispatchFunc = selectAlg (workerCount env) da}
--
initialState = S{newClauses = [],
nextClId = 0,
workerMap = Map.empty,
workerLoadMap = Map.empty,
msgs = Map.empty,
nextWorker = 0}
fromDispatchAlgString :: String -> DispatchAlgorithm
fromDispatchAlgString "HASH" = HASH
fromDispatchAlgString "RROBIN" = RROBIN
fromDispatchAlgString "LOAD" = LOAD
fromDispatchAlgString _ = undefined
selectAlg :: Int -> DispatchAlgorithm -> DispatchFunc
selectAlg n HASH = hash n
selectAlg n RROBIN = rrobin n
selectAlg n LOAD = load n
hash :: Int -> NomSym -> Dispatcher (WorkerId)
hash n nom = do let wId = maybe (fromIntegral $ level nom) fromIntegral (nomId nom)
return $ wId `mod` (fromIntegral n)
rrobin :: Int -> NomSym -> Dispatcher (WorkerId)
rrobin n nom = do s <- get
case Map.lookup nom (workerMap s) of
Just i -> return i
Nothing -> do let nw = nextWorker s
put s{nextWorker = (nw + 1) `mod` (fromIntegral n),
workerMap = Map.insert nom nw (workerMap s) }
return nw
load :: Int -> NomSym -> Dispatcher (WorkerId)
load _ nom = do s <- get
case Map.lookup nom (workerMap s) of
Just i -> return i
Nothing -> do let l = Map.toList (workerLoadMap s)
log L_Comm $ show l
let nw = fst (foldl' f2 (0,10000000) l)
put s{workerMap = Map.insert nom nw (workerMap s)}
return nw
f2 :: (WorkerId, Int) -> (WorkerId, Int) -> (WorkerId, Int)
f2 a b = if (snd a) < (snd b) then a else b
dispatcherLoop :: SelFunc -> [BasicClause] -> Dispatcher ()
dispatcherLoop sf init_cls =
do addAllToNew init_cls
n <- fromIntegral <$> (asks workerCount)
forM_ [0..n-1] $ \workerId -> do
modify $ \s -> s{workerLoadMap = Map.insert workerId 0 (workerLoadMap s)}
wasSAT <- loopAux sf 0 0
--
sendStopToWorkers
response <- if wasSAT
then do inuse <- getInUseFromWorkers
let init_sig = foldMap getSignature init_cls
return (SAT (buildHerbrandModel init_sig inuse ))
--
else return INTERRUPTED
--
mch <- channel dtoMain
liftIO $ writeChan mch response
loopAux :: SelFunc -> Int -> Int -> Dispatcher Bool
loopAux sf counter waiting =
do timeout <- isTimeout =<< asks timeout_signal
if timeout
then return False
else do (dispatched, waiting') <- dispatchClauses waiting sf
log L_Comm $ concat [show dispatched, " more clauses dispatched"]
--
if (counter + dispatched > 0)
then do (processed, new) <- readWorkerMsgs
let num_received = length new
--
log L_Comm $ concat [show processed, " clauses processed by workers, ",
show num_received, " new clauses received"]
--
addAllToNew new
let currently_waiting = counter + dispatched - processed
log L_Comm $ concat ["Currently waiting for ", show currently_waiting,
" clauses, and ",
show $ length new, " to dispatch"]
--
if (currently_waiting > 0 || length new > 0)
then loopAux sf currently_waiting waiting'
else return True
else return True
dispatchClauses :: Int -> SelFunc -> Dispatcher (Int, Int)
dispatchClauses waiting sf =
do new <- gets newClauses
let sortedNew = sortBy (compareUsing size) new
fullCls <- mapM (mkFullClause sf) sortedNew
if ( length fullCls == 0)
then return (0, waiting)
else do n <- asks workerCount
if (n == 1)
then do workers <- channel toWorkers
liftIO $ writeChan (workers ! 0 ) (D2W_CLAUSES fullCls )
return $ ( length fullCls, waiting + (length fullCls))
else do df <- asks dispatchFunc
let (eq, notEq) = partition isEq fullCls
cl <- classifyListByM (df . label . distFormula) notEq
when (not . null $ eq) $
addToAllWorkers eq
let diam = catMaybes $ map asRelDiam notEq
cl2 <- classifyListByM (df . thrd . flatten . distFormula) diam
mapM_ addToWorkerMsg (map (\(a,b) -> (a, b)) $ (Map.toList cl))
mapM_ addToWorkerMsg (map (\(a,b) -> (a, (map opaqueClause b))) $ (Map.toList cl2))
dispatched <- sendMsgsToWorkers
return $ (dispatched,waiting + dispatched)
where thrd (_, _, x) = x
addToWorkerMsg :: (WorkerId , [FullClause (At Opaque)]) -> Dispatcher ()
addToWorkerMsg (n,clauses) =
do s <- get
let set = Map.findWithDefault Set.empty n (msgs s)
let set' = Set.union set (Set.fromList clauses)
put s {msgs = Map.insert n set' (msgs s)}
sendMsgsToWorkers :: Dispatcher (Int)
sendMsgsToWorkers = do n <- fromIntegral <$> (asks workerCount)
res <- forM [0..n-1] $ \workerId -> do
s <- get
workers <- channel toWorkers
let set = Map.findWithDefault Set.empty workerId (msgs s)
if (Set.size set) > 0
then do let fcls = Set.toList set
liftIO $ writeChan (workers ! workerId) (D2W_CLAUSES fcls)
put s{workerLoadMap = Map.insertWith (+) workerId (length fcls) (workerLoadMap s)}
log L_Comm $ concat ["Sent ", show (length $ (fcls)), " to Worker ", show workerId]
return $ length fcls
else return 0
modify $ \s -> s{msgs = Map.empty}
return $ sum res
isEq :: FullClause (At Opaque) -> Bool
isEq cl = case specialize cl of { AtNom c -> size c == 1; _ -> False }
asRelDiam :: FullClause (At Opaque) -> Maybe (FullClause (At (Diam Nom)))
asRelDiam cl = case specialize cl of { AtDiamNom c -> Just c; _ -> Nothing }
mkFullClause :: SelFunc -> BasicClause -> Dispatcher (FullClause (At Opaque))
mkFullClause sf cl = do s <- get
put s{nextClId = 1 + nextClId s}
return $ makeFullClause sf (nextClId s) cl
readWorkerMsgs :: Dispatcher (Int, [BasicClause])
readWorkerMsgs = do m <- asks workerCount
workerChan <- channel fromWorkers
res <- forM [0..m-1] $ \_ -> do
emptyChan <- liftIO $ isEmptyChan workerChan
if ( emptyChan )
then return (0,[])
else do NEW p n <- liftIO $ readChan workerChan
return (p,n)
let r = foldl' f1 (0,[]) res
if (fst r == 0)
then do NEW p n <- liftIO $ readChan workerChan
return (p,n)
else return r
f1 :: (Int, [BasicClause]) -> (Int, [BasicClause]) -> (Int, [BasicClause])
f1 a b = ((fst a) + (fst b), (snd a) ++ (snd b))
addToNew :: BasicClause -> Dispatcher ()
addToNew cl = do s <- get
let cl' = mergeAllDiamonds cl
put s{newClauses = (newClauses s) ++ [cl']}
{- addAllToNew: Given
- a list of BasicClauses
- a ClauseSet cs
adds all the given clauses to New (using addToNew)
-}
addAllToNew :: [BasicClause] -> Dispatcher ()
addAllToNew cls = do modify $ \s -> s{newClauses = []}
mapM_ addToNew cls
sendStopToWorkers :: Dispatcher ()
sendStopToWorkers =
do n <- fromIntegral <$> (asks workerCount)
forM_ [0..n-1] $ \workerId -> do
workers <- channel toWorkers
liftIO $ writeChan (workers ! workerId) GET_INUSE
addToAllWorkers :: [FullClause (At Opaque)] -> Dispatcher ()
addToAllWorkers fcls =
do n <- fromIntegral <$> (asks workerCount)
forM_ [0..n-1] $ \workerId -> do
addToWorkerMsg (workerId, fcls)
getInUseFromWorkers :: Dispatcher InUseClauseSet
getInUseFromWorkers =
do n <- (asks workerCount)
workerChan <- channel fromWorkers
clauses <- forM [1..n] $ \_ -> do
msg <- liftIO $ readChan workerChan
case msg of
INUSE cls -> return cls
NEW _ _ -> do INUSE cls' <- liftIO $ readChan workerChan
return cls'
let cls = foldl (++) [] clauses
return $ execState(mapM_ addToInUse cls) newSet
addToInUse ::FullClause (At Opaque) -> State InUseClauseSet ()
addToInUse cl = do iu <- get
put $ IU.add cl iu
channel :: (DispatcherChans -> a) -> Dispatcher a
channel f = asks (f . dispatcherChans)
| nevrenato/HyLoRes_Source | src/HyLoRes/Core/SMP/Dispatcher.hs | gpl-2.0 | 13,723 | 10 | 22 | 5,309 | 4,023 | 2,088 | 1,935 | -1 | -1 |
--
--
-- (C) 2011-14 Nicola Bonelli <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software Foundation,
-- Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
-- The full GNU General Public License is included in this distribution in
-- the file called "COPYING".
{-# LANGUAGE DeriveDataTypeable #-}
module Options where
import System.Console.CmdArgs
-- Command line options
--
data Options = Options
{
config_file :: String,
dont_rebuild :: Bool
} deriving (Data, Typeable, Show)
options :: Mode (CmdArgs Options)
options = cmdArgsMode $ Options
{
config_file = "" &= typ "FILE" &= help "Config file",
dont_rebuild = False &= help "Don't rebuild itself"
} &= summary "pfqd: pfq group manager." &= program "pfqd"
| pandaychen/PFQ | user/pfqd/src/Options.hs | gpl-2.0 | 1,409 | 0 | 12 | 308 | 141 | 87 | 54 | 12 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module SpecHelper ( module X
, valueOf
) where
import Test.Hspec as X
import Text.ParserCombinators.Parsec as X
import Text.ParserCombinators.Parsec.Error as X (ParseError,
errorMessages)
import Aar.Lexer.Token
import Aar.Parser.Numeric.Int
instance Eq ParseError where
a == b = errorMessages a == errorMessages b
valueOf (Right (TokInt _ v)) = v
| aar-lang/aar | test/SpecHelper.hs | gpl-3.0 | 571 | 0 | 9 | 237 | 110 | 66 | 44 | 12 | 1 |
module Carnap.Core.ModelChecking.Parser() where
import Carnap.Core.ModelChecking.ModelFinder
import Carnap.Languages.Util.LanguageClasses (BooleanLanguage)
import Carnap.Languages.Util.GenericParsers
import Text.Parsec
import Text.Parsec.Expr
import Text.Parsec.Char
| opentower/carnap | Carnap/src/Carnap/Core/ModelChecking/Parser.hs | gpl-3.0 | 268 | 0 | 5 | 17 | 54 | 37 | 17 | 7 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.S3.CreateBucket
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new bucket.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonS3/latest/API/CreateBucket.html AWS API Reference> for CreateBucket.
module Network.AWS.S3.CreateBucket
(
-- * Creating a Request
createBucket
, CreateBucket
-- * Request Lenses
, cbGrantReadACP
, cbGrantWriteACP
, cbGrantRead
, cbGrantFullControl
, cbCreateBucketConfiguration
, cbGrantWrite
, cbACL
, cbBucket
-- * Destructuring the Response
, createBucketResponse
, CreateBucketResponse
-- * Response Lenses
, cbrsLocation
, cbrsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.S3.Types
import Network.AWS.S3.Types.Product
-- | /See:/ 'createBucket' smart constructor.
data CreateBucket = CreateBucket'
{ _cbGrantReadACP :: !(Maybe Text)
, _cbGrantWriteACP :: !(Maybe Text)
, _cbGrantRead :: !(Maybe Text)
, _cbGrantFullControl :: !(Maybe Text)
, _cbCreateBucketConfiguration :: !(Maybe CreateBucketConfiguration)
, _cbGrantWrite :: !(Maybe Text)
, _cbACL :: !(Maybe BucketCannedACL)
, _cbBucket :: !BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateBucket' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cbGrantReadACP'
--
-- * 'cbGrantWriteACP'
--
-- * 'cbGrantRead'
--
-- * 'cbGrantFullControl'
--
-- * 'cbCreateBucketConfiguration'
--
-- * 'cbGrantWrite'
--
-- * 'cbACL'
--
-- * 'cbBucket'
createBucket
:: BucketName -- ^ 'cbBucket'
-> CreateBucket
createBucket pBucket_ =
CreateBucket'
{ _cbGrantReadACP = Nothing
, _cbGrantWriteACP = Nothing
, _cbGrantRead = Nothing
, _cbGrantFullControl = Nothing
, _cbCreateBucketConfiguration = Nothing
, _cbGrantWrite = Nothing
, _cbACL = Nothing
, _cbBucket = pBucket_
}
-- | Allows grantee to read the bucket ACL.
cbGrantReadACP :: Lens' CreateBucket (Maybe Text)
cbGrantReadACP = lens _cbGrantReadACP (\ s a -> s{_cbGrantReadACP = a});
-- | Allows grantee to write the ACL for the applicable bucket.
cbGrantWriteACP :: Lens' CreateBucket (Maybe Text)
cbGrantWriteACP = lens _cbGrantWriteACP (\ s a -> s{_cbGrantWriteACP = a});
-- | Allows grantee to list the objects in the bucket.
cbGrantRead :: Lens' CreateBucket (Maybe Text)
cbGrantRead = lens _cbGrantRead (\ s a -> s{_cbGrantRead = a});
-- | Allows grantee the read, write, read ACP, and write ACP permissions on
-- the bucket.
cbGrantFullControl :: Lens' CreateBucket (Maybe Text)
cbGrantFullControl = lens _cbGrantFullControl (\ s a -> s{_cbGrantFullControl = a});
-- | Undocumented member.
cbCreateBucketConfiguration :: Lens' CreateBucket (Maybe CreateBucketConfiguration)
cbCreateBucketConfiguration = lens _cbCreateBucketConfiguration (\ s a -> s{_cbCreateBucketConfiguration = a});
-- | Allows grantee to create, overwrite, and delete any object in the
-- bucket.
cbGrantWrite :: Lens' CreateBucket (Maybe Text)
cbGrantWrite = lens _cbGrantWrite (\ s a -> s{_cbGrantWrite = a});
-- | The canned ACL to apply to the bucket.
cbACL :: Lens' CreateBucket (Maybe BucketCannedACL)
cbACL = lens _cbACL (\ s a -> s{_cbACL = a});
-- | Undocumented member.
cbBucket :: Lens' CreateBucket BucketName
cbBucket = lens _cbBucket (\ s a -> s{_cbBucket = a});
instance AWSRequest CreateBucket where
type Rs CreateBucket = CreateBucketResponse
request = putXML s3
response
= receiveEmpty
(\ s h x ->
CreateBucketResponse' <$>
(h .#? "Location") <*> (pure (fromEnum s)))
instance ToElement CreateBucket where
toElement
= mkElement
"{http://s3.amazonaws.com/doc/2006-03-01/}CreateBucketConfiguration"
.
_cbCreateBucketConfiguration
instance ToHeaders CreateBucket where
toHeaders CreateBucket'{..}
= mconcat
["x-amz-grant-read-acp" =# _cbGrantReadACP,
"x-amz-grant-write-acp" =# _cbGrantWriteACP,
"x-amz-grant-read" =# _cbGrantRead,
"x-amz-grant-full-control" =# _cbGrantFullControl,
"x-amz-grant-write" =# _cbGrantWrite,
"x-amz-acl" =# _cbACL]
instance ToPath CreateBucket where
toPath CreateBucket'{..}
= mconcat ["/", toBS _cbBucket]
instance ToQuery CreateBucket where
toQuery = const mempty
-- | /See:/ 'createBucketResponse' smart constructor.
data CreateBucketResponse = CreateBucketResponse'
{ _cbrsLocation :: !(Maybe Text)
, _cbrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateBucketResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cbrsLocation'
--
-- * 'cbrsResponseStatus'
createBucketResponse
:: Int -- ^ 'cbrsResponseStatus'
-> CreateBucketResponse
createBucketResponse pResponseStatus_ =
CreateBucketResponse'
{ _cbrsLocation = Nothing
, _cbrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
cbrsLocation :: Lens' CreateBucketResponse (Maybe Text)
cbrsLocation = lens _cbrsLocation (\ s a -> s{_cbrsLocation = a});
-- | The response status code.
cbrsResponseStatus :: Lens' CreateBucketResponse Int
cbrsResponseStatus = lens _cbrsResponseStatus (\ s a -> s{_cbrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-s3/gen/Network/AWS/S3/CreateBucket.hs | mpl-2.0 | 6,358 | 0 | 13 | 1,435 | 1,086 | 639 | 447 | 131 | 1 |
{-# Language CPP #-}
-- Copyright 2009-2010 Corey O'Connor
-- | Xterm output driver. This uses the Terminfo driver with some
-- extensions for Xterm.
module Graphics.Vty.Output.XTermColor
( reserveTerminal
)
where
import Graphics.Vty.Output.Interface
import Graphics.Vty.Input.Mouse
import Graphics.Vty.Input.Focus
import qualified Graphics.Vty.Output.TerminfoBased as TerminfoBased
import Blaze.ByteString.Builder (writeToByteString)
import Blaze.ByteString.Builder.Word (writeWord8)
import Control.Monad (void, when)
import Control.Monad.Trans
import Data.Char (toLower)
import Data.IORef
import System.Posix.IO (fdWrite)
import System.Posix.Types (Fd)
import System.Posix.Env (getEnv)
import Data.List (isInfixOf)
import Data.Maybe (catMaybes)
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid ((<>))
#endif
-- | Construct an Xterm output driver. Initialize the display to UTF-8.
reserveTerminal :: ( Applicative m, MonadIO m ) => String -> Fd -> m Output
reserveTerminal variant outFd = liftIO $ do
let flushedPut = void . fdWrite outFd
-- If the terminal variant is xterm-color use xterm instead since,
-- more often than not, xterm-color is broken.
let variant' = if variant == "xterm-color" then "xterm" else variant
utf8a <- utf8Active
when (not utf8a) $ flushedPut setUtf8CharSet
t <- TerminfoBased.reserveTerminal variant' outFd
mouseModeStatus <- newIORef False
focusModeStatus <- newIORef False
pasteModeStatus <- newIORef False
let xtermSetMode t' m newStatus = do
curStatus <- getModeStatus t' m
when (newStatus /= curStatus) $
case m of
Focus -> liftIO $ do
case newStatus of
True -> flushedPut requestFocusEvents
False -> flushedPut disableFocusEvents
writeIORef focusModeStatus newStatus
Mouse -> liftIO $ do
case newStatus of
True -> flushedPut requestMouseEvents
False -> flushedPut disableMouseEvents
writeIORef mouseModeStatus newStatus
BracketedPaste -> liftIO $ do
case newStatus of
True -> flushedPut enableBracketedPastes
False -> flushedPut disableBracketedPastes
writeIORef pasteModeStatus newStatus
Hyperlink -> setMode t Hyperlink newStatus
xtermGetMode Mouse = liftIO $ readIORef mouseModeStatus
xtermGetMode Focus = liftIO $ readIORef focusModeStatus
xtermGetMode BracketedPaste = liftIO $ readIORef pasteModeStatus
xtermGetMode Hyperlink = getModeStatus t Hyperlink
let t' = t
{ terminalID = terminalID t ++ " (xterm-color)"
, releaseTerminal = do
when (not utf8a) $ liftIO $ flushedPut setDefaultCharSet
setMode t' BracketedPaste False
setMode t' Mouse False
setMode t' Focus False
releaseTerminal t
, mkDisplayContext = \tActual r -> do
dc <- mkDisplayContext t tActual r
return $ dc { inlineHack = xtermInlineHack t' }
, supportsMode = const True
, getModeStatus = xtermGetMode
, setMode = xtermSetMode t'
}
return t'
utf8Active :: IO Bool
utf8Active = do
let vars = ["LC_ALL", "LANG", "LC_CTYPE"]
results <- map (toLower <$>) . catMaybes <$> mapM getEnv vars
let matches = filter ("utf8" `isInfixOf`) results <>
filter ("utf-8" `isInfixOf`) results
return $ not $ null matches
-- | Enable bracketed paste mode:
-- http://cirw.in/blog/bracketed-paste
enableBracketedPastes :: String
enableBracketedPastes = "\ESC[?2004h"
-- | Disable bracketed paste mode:
disableBracketedPastes :: String
disableBracketedPastes = "\ESC[?2004l"
-- | These sequences set xterm based terminals to UTF-8 output.
--
-- There is no known terminfo capability equivalent to this.
setUtf8CharSet, setDefaultCharSet :: String
setUtf8CharSet = "\ESC%G"
setDefaultCharSet = "\ESC%@"
xtermInlineHack :: Output -> IO ()
xtermInlineHack t = do
let writeReset = foldMap (writeWord8.toEnum.fromEnum) "\ESC[K"
outputByteBuffer t $ writeToByteString writeReset
| jtdaugherty/vty | src/Graphics/Vty/Output/XTermColor.hs | bsd-3-clause | 4,409 | 0 | 23 | 1,234 | 946 | 490 | 456 | 86 | 11 |
{-# LANGUAGE RankNTypes, FlexibleContexts #-}
{-
This module is not meant primarily for instructive and pedagogical purposes.
As such, it is not fully featured, and sacrifices performance and generality
for clarity of code.
-}
module Data.Iteratee.Codecs.Wave {-# DEPRECATED "This will be moved to a separate package in the future" #-} (
WAVEDE (..),
WAVEDE_ENUM (..),
WAVE_CHUNK (..),
AudioFormat (..),
waveReader,
readRiff,
waveChunk,
chunkToString,
dictReadFormat,
dictReadFirstFormat,
dictReadLastFormat,
dictReadFirstData,
dictReadLastData,
dictReadData,
dictProcessData
)
where
import Prelude as P
import Control.Monad (join)
import Control.Monad.Trans (lift)
import Data.Iteratee
import qualified Data.Iteratee as Iter
import Data.Iteratee.Binary
import Data.Char (chr, ord)
import Data.Int
import Data.Word
import Data.Bits (shiftL)
import Data.Maybe
import qualified Data.IntMap as IM
-- =====================================================
-- WAVE libary code
-- useful type synonyms
-- |A WAVE directory is a list associating WAVE chunks with
-- a record WAVEDE
type WAVEDict = IM.IntMap [WAVEDE]
data WAVEDE = WAVEDE{
wavede_count :: Int, -- ^length of chunk
wavede_type :: WAVE_CHUNK, -- ^type of chunk
wavede_enum :: WAVEDE_ENUM -- ^enumerator to get values of chunk
}
type EnumeratorM sFrom sTo m a = Iteratee sTo m a -> m (Iteratee sFrom m a)
joinL :: (Monad m, Nullable s) => m (Iteratee s m a) -> Iteratee s m a
joinL = join . lift
data WAVEDE_ENUM =
WEN_BYTE (forall a. EnumeratorM [Word8] [Word8] IO a)
| WEN_DUB (forall a. EnumeratorM [Word8] [Double] IO a)
-- |Standard WAVE Chunks
data WAVE_CHUNK = WAVE_FMT -- ^Format
| WAVE_DATA -- ^Data
| WAVE_OTHER String -- ^Other
deriving (Eq, Ord, Show)
instance Enum WAVE_CHUNK where
fromEnum WAVE_FMT = 1
fromEnum WAVE_DATA = 2
fromEnum (WAVE_OTHER _) = 3
toEnum 1 = WAVE_FMT
toEnum 2 = WAVE_DATA
toEnum 3 = WAVE_OTHER ""
toEnum _ = error "Invalid enumeration value"
-- -----------------
-- wave chunk reading/writing functions
-- |Convert a string to WAVE_CHUNK type
waveChunk :: String -> Maybe WAVE_CHUNK
waveChunk str
| str == "fmt " = Just WAVE_FMT
| str == "data" = Just WAVE_DATA
| P.length str == 4 = Just $ WAVE_OTHER str
| otherwise = Nothing
-- |Convert a WAVE_CHUNK to the representative string
chunkToString :: WAVE_CHUNK -> String
chunkToString WAVE_FMT = "fmt "
chunkToString WAVE_DATA = "data"
chunkToString (WAVE_OTHER str) = str
-- -----------------
data AudioFormat = AudioFormat {
numberOfChannels :: NumChannels, -- ^Number of channels in the audio data
sampleRate :: SampleRate, -- ^Sample rate of the audio
bitDepth :: BitDepth -- ^Bit depth of the audio data
} deriving (Show, Eq)
type NumChannels = Integer
type SampleRate = Integer
type BitDepth = Integer
-- convenience function to read a 4-byte ASCII string
stringRead4 :: Monad m => Iteratee [Word8] m String
stringRead4 = do
s1 <- Iter.head
s2 <- Iter.head
s3 <- Iter.head
s4 <- Iter.head
return $ map (chr . fromIntegral) [s1, s2, s3, s4]
-- -----------------
-- |The library function to read the WAVE dictionary
waveReader :: Iteratee [Word8] IO (Maybe WAVEDict)
waveReader = do
readRiff
tot_size <- endianRead4 LSB
readRiffWave
chunks_m <- findChunks $ fromIntegral tot_size
loadDict $ joinM chunks_m
-- |Read the RIFF header of a file.
readRiff :: Iteratee [Word8] IO ()
readRiff = do
cnt <- heads $ fmap (fromIntegral . ord) "RIFF"
if cnt == 4 then return () else throwErr $ iterStrExc "Bad RIFF header"
-- | Read the WAVE part of the RIFF header.
readRiffWave :: Iteratee [Word8] IO ()
readRiffWave = do
cnt <- heads $ fmap (fromIntegral . ord) "WAVE"
if cnt == 4 then return () else throwErr $ iterStrExc "Bad RIFF/WAVE header"
-- | An internal function to find all the chunks. It assumes that the
-- stream is positioned to read the first chunk.
findChunks :: Int -> Iteratee [Word8] IO (Maybe [(Int, WAVE_CHUNK, Int)])
findChunks n = findChunks' 12 []
where
findChunks' offset acc = do
typ <- stringRead4
count <- endianRead4 LSB
case waveChunk typ of
Nothing -> (throwErr . iterStrExc $ "Bad subchunk descriptor: " ++ show typ)
>> return Nothing
Just chk -> let newpos = offset + 8 + count in
case newpos >= fromIntegral n of
True -> return . Just $ reverse $
(fromIntegral offset, chk, fromIntegral count) : acc
False -> do
Iter.seek $ fromIntegral newpos
findChunks' newpos $
(fromIntegral offset, chk, fromIntegral count) : acc
loadDict :: [(Int, WAVE_CHUNK, Int)] ->
Iteratee [Word8] IO (Maybe WAVEDict)
loadDict = P.foldl read_entry (return (Just IM.empty))
where
read_entry dictM (offset, typ, count) = dictM >>=
maybe (return Nothing) (\dict -> do
enum_m <- readValue dict offset typ count
case (enum_m, IM.lookup (fromEnum typ) dict) of
(Just enum, Nothing) -> --insert new entry
return . Just $ IM.insert (fromEnum typ)
[WAVEDE (fromIntegral count) typ enum] dict
(Just enum, Just _vals) -> --existing entry
return . Just $ IM.update
(\ls -> Just $ ls ++ [WAVEDE (fromIntegral count) typ enum])
(fromEnum typ) dict
(Nothing, _) -> return (Just dict)
)
readValue :: WAVEDict ->
Int -> -- Offset
WAVE_CHUNK -> -- Chunk type
Int -> -- Count
Iteratee [Word8] IO (Maybe WAVEDE_ENUM)
readValue _dict offset _ 0 = do
throwErr . iterStrExc $ "Zero count in the entry of chunk at: " ++ show offset
return Nothing
readValue dict offset WAVE_DATA count = do
fmt_m <- dictReadLastFormat dict
case fmt_m of
Just fmt ->
return . Just . WEN_DUB $ \iter_dub -> return $ do
Iter.seek (8 + fromIntegral offset)
let iter = Iter.convStream (convFunc fmt) iter_dub
joinI . joinI . Iter.take count $ iter
Nothing -> do
throwErr . iterStrExc $ "No valid format for data chunk at: " ++ show offset
return Nothing
-- return the WaveFormat iteratee
readValue _dict offset WAVE_FMT count =
return . Just . WEN_BYTE $ \iter -> return $ do
Iter.seek (8 + fromIntegral offset)
Iter.joinI $ Iter.take count iter
-- for WAVE_OTHER, return Word8s and maybe the user can parse them
readValue _dict offset (WAVE_OTHER _str) count =
return . Just . WEN_BYTE $ \iter -> return $ do
Iter.seek (8 + fromIntegral offset)
Iter.joinI $ Iter.take count iter
-- |Convert Word8s to Doubles
convFunc :: AudioFormat -> Iteratee [Word8] IO [Double]
convFunc (AudioFormat _nc _sr 8) = fmap
((:[]) . normalize 8 . (fromIntegral :: Word8 -> Int8))
Iter.head
convFunc (AudioFormat _nc _sr 16) = fmap
((:[]) . normalize 16 . (fromIntegral :: Word16 -> Int16))
(endianRead2 LSB)
convFunc (AudioFormat _nc _sr 24) = fmap
((:[]) . normalize 24 . (fromIntegral :: Word32 -> Int32))
(endianRead3 LSB)
convFunc (AudioFormat _nc _sr 32) = fmap
((:[]) . normalize 32 . (fromIntegral :: Word32 -> Int32))
(endianRead4 LSB)
convFunc _ = error "unrecognized audio format in convFunc"
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe = either (const Nothing) Just
-- |An Iteratee to read a wave format chunk
sWaveFormat :: Iteratee [Word8] IO (Maybe AudioFormat)
sWaveFormat = do
f' <- endianRead2 LSB --data format, 1==PCM
nc <- endianRead2 LSB
sr <- endianRead4 LSB
Iter.drop 6
bd <- endianRead2 LSB
case f' == 1 of
True -> return . Just $ AudioFormat (fromIntegral nc)
(fromIntegral sr)
(fromIntegral bd)
False -> return Nothing
-- ---------------------
-- functions to assist with reading from the dictionary
-- |Read the first format chunk in the WAVE dictionary.
dictReadFirstFormat :: WAVEDict -> Iteratee [Word8] IO (Maybe AudioFormat)
dictReadFirstFormat dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just [] -> return Nothing
Just ((WAVEDE _ WAVE_FMT (WEN_BYTE enum)) : _xs) -> joinIM $ enum sWaveFormat
_ -> return Nothing
-- |Read the last fromat chunk from the WAVE dictionary. This is useful
-- when parsing all chunks in the dictionary.
dictReadLastFormat :: WAVEDict -> Iteratee [Word8] IO (Maybe AudioFormat)
dictReadLastFormat dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just [] -> return Nothing
Just xs -> let (WAVEDE _ WAVE_FMT (WEN_BYTE enum)) = last xs in
joinIM $ enum sWaveFormat
_ -> return Nothing
-- |Read the specified format chunk from the WAVE dictionary
dictReadFormat :: Int -> --Index in the format chunk list to read
WAVEDict -> --Dictionary
Iteratee [Word8] IO (Maybe AudioFormat)
dictReadFormat ix dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just xs -> let (WAVEDE _ WAVE_FMT (WEN_BYTE enum)) = (!!) xs ix in
joinIM $ enum sWaveFormat
_ -> return Nothing
-- |Read the first data chunk in the WAVE dictionary.
dictReadFirstData :: WAVEDict -> Iteratee [Word8] IO (Maybe [Double])
dictReadFirstData dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just [] -> return Nothing
Just ((WAVEDE _ WAVE_DATA (WEN_DUB enum)) : _xs) -> do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
-- |Read the last data chunk in the WAVE dictionary.
dictReadLastData :: WAVEDict -> Iteratee [Word8] IO (Maybe [Double])
dictReadLastData dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just [] -> return Nothing
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = last xs in do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
-- |Read the specified data chunk from the WAVE dictionary.
dictReadData :: Int -> --Index in the data chunk list to read
WAVEDict -> --Dictionary
Iteratee [Word8] IO (Maybe [Double])
dictReadData ix dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = (!!) xs ix in do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
-- |Read the specified data chunk from the dictionary, applying the
-- data to the specified Iteratee.
dictProcessData :: Int -> -- Index in the data chunk list to read
WAVEDict -> -- Dictionary
Iteratee [Double] IO a ->
Iteratee [Word8] IO (Maybe a)
dictProcessData ix dict iter = case IM.lookup (fromEnum WAVE_DATA) dict of
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = (!!) xs ix in do
e <- joinIM $ enum iter
return $ Just e
_ -> return Nothing
-- ---------------------
-- convenience functions
-- |Convert (Maybe []) to []. Nothing maps to an empty list.
joinM :: Maybe [a] -> [a]
joinM Nothing = []
joinM (Just a) = a
-- |Normalize a given value for the provided bit depth.
normalize :: Integral a => BitDepth -> a -> Double
normalize 8 a = (fromIntegral a - 128) / 128
normalize bd a = case (a > 0) of
True -> fromIntegral a / divPos
False -> fromIntegral a / divNeg
where
divPos = fromIntegral (1 `shiftL` fromIntegral (bd - 1) :: Int) - 1
divNeg = fromIntegral (1 `shiftL` fromIntegral (bd - 1) :: Int)
| iteloo/tsuru-sample | iteratee-0.8.9.6/Examples/Wave.hs | bsd-3-clause | 11,330 | 0 | 24 | 2,634 | 3,452 | 1,768 | 1,684 | 237 | 3 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Snap.Util.GZip.Tests
( tests ) where
------------------------------------------------------------------------------
import qualified Codec.Compression.GZip as GZip
import qualified Codec.Compression.Zlib as Zlib
import Control.Monad (replicateM)
import Data.Bits ((.&.))
import qualified Data.ByteString as B
import Data.ByteString.Builder (byteString)
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import Data.CaseInsensitive (CI)
import Data.Word (Word8)
import Snap.Core (Request, Response, Snap, getHeader, modifyResponse, runSnap, setContentType, setHeader, setResponseBody, writeBS)
import qualified Snap.Test as Test
import Snap.Test.Common (coverTypeableInstance, expectException, expectExceptionH, liftQ)
import Snap.Util.GZip (BadAcceptEncodingException, noCompression, withCompression)
import qualified System.IO.Streams as Streams
import System.Random (Random (randomIO))
import Test.Framework (Test)
import Test.Framework.Providers.HUnit (testCase)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.HUnit (assertEqual)
import qualified Test.HUnit as H
import Test.QuickCheck (Arbitrary (arbitrary))
import Test.QuickCheck.Monadic (PropertyM, assert, forAllM, monadicIO)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
------------------------------------------------------------------------------
------------------------------------------------------------------------------
tests :: [Test]
tests = [ testIdentity1
, testIdentity1_charset
, testIdentity2
, testIdentity3
, testIdentity4
, testIdentity5
, testNoHeaders
, testNoAcceptEncoding
, testAcceptEncodingBad
, testNopWhenContentEncodingSet
, testCompositionDoesn'tExplode
, testGzipLotsaChunks
, testNoCompression
, testBadHeaders
, testTrivials
]
------------------------------------------------------------------------------
gzipHdrs, xGzipHdrs, xGzipHdrs2, badHdrs, deflateHdrs, xDeflateHdrs
:: (CI ByteString, ByteString)
gzipHdrs = ("Accept-Encoding", "deflate,froz,gzip,glorble, x-gzip" )
xGzipHdrs = ("Accept-Encoding", "x-gzip;q=1.0" )
xGzipHdrs2 = ("Accept-Encoding", "x-gzip;q=1" )
badHdrs = ("Accept-Encoding", "*&%^&^$%&%&*^\023" )
deflateHdrs = ("Accept-Encoding", "deflate" )
xDeflateHdrs = ("Accept-Encoding", "x-deflate" )
------------------------------------------------------------------------------
mkNoHeaders :: IO Request
mkNoHeaders = Test.buildRequest $ return ()
------------------------------------------------------------------------------
mkGzipRq :: IO Request
mkGzipRq = Test.buildRequest $ uncurry Test.setHeader gzipHdrs
------------------------------------------------------------------------------
mkXGzipRq :: IO Request
mkXGzipRq = Test.buildRequest $ uncurry Test.setHeader xGzipHdrs
------------------------------------------------------------------------------
mkXGzip2Rq :: IO Request
mkXGzip2Rq = Test.buildRequest $ uncurry Test.setHeader xGzipHdrs2
------------------------------------------------------------------------------
mkDeflateRq :: IO Request
mkDeflateRq = Test.buildRequest $ uncurry Test.setHeader deflateHdrs
------------------------------------------------------------------------------
mkXDeflateRq :: IO Request
mkXDeflateRq = Test.buildRequest $ uncurry Test.setHeader xDeflateHdrs
------------------------------------------------------------------------------
mkBadRq :: IO Request
mkBadRq = Test.buildRequest $ uncurry Test.setHeader badHdrs
------------------------------------------------------------------------------
seqSnap :: Snap a -> Snap a
seqSnap m = do
!x <- m
return $! x `seq` x
------------------------------------------------------------------------------
goGeneric :: IO Request -> Snap a -> IO (Request, Response)
goGeneric mkRq m = do
rq <- mkRq
runSnap (seqSnap m) d d rq
where
d = (const $ return ())
goGZip, goDeflate, goXGZip, goXGZip2 :: Snap a -> IO (Request,Response)
goNoHeaders, goXDeflate, goBad :: Snap a -> IO (Request,Response)
goGZip = goGeneric mkGzipRq
goDeflate = goGeneric mkDeflateRq
goXGZip = goGeneric mkXGzipRq
goXGZip2 = goGeneric mkXGzip2Rq
goXDeflate = goGeneric mkXDeflateRq
goBad = goGeneric mkBadRq
goNoHeaders = goGeneric mkNoHeaders
------------------------------------------------------------------------------
noContentType :: L.ByteString -> Snap ()
noContentType body = modifyResponse $ setResponseBody e
where
e s = do
Streams.writeList (map byteString $ L.toChunks body) s
return s
------------------------------------------------------------------------------
withContentType :: ByteString -> L.ByteString -> Snap ()
withContentType ct body = modifyResponse $
setResponseBody e . setContentType ct
where
e s = do
Streams.writeList (map byteString $ L.toChunks body) s
return s
------------------------------------------------------------------------------
textPlain :: L.ByteString -> Snap ()
textPlain = withContentType "text/plain"
------------------------------------------------------------------------------
binary :: L.ByteString -> Snap ()
binary = withContentType "application/octet-stream"
------------------------------------------------------------------------------
testNoHeaders :: Test
testNoHeaders = testProperty "gzip/noheaders" $
monadicIO $
forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
-- if there's no content-type, withCompression should be a no-op
(!_,!rsp) <- goNoHeaders (seqSnap $ withCompression
$ noContentType s)
assertEqual "" Nothing $ getHeader "Content-Encoding" rsp
assertEqual "" Nothing $ getHeader "Vary" rsp
body <- Test.getResponseBody rsp
assertEqual "" s $ L.fromChunks [body]
------------------------------------------------------------------------------
testNoAcceptEncoding :: Test
testNoAcceptEncoding = testProperty "gzip/noAcceptEncoding" $
monadicIO $
forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
-- if there's no accept-encoding, withCompression should be a no-op
(!_,!rsp) <- goNoHeaders (seqSnap $ withCompression
$ textPlain s)
assertEqual "" Nothing $ getHeader "Content-Encoding" rsp
assertEqual "" Nothing $ getHeader "Vary" rsp
body <- Test.getResponseBody rsp
assertEqual "" s $ L.fromChunks [body]
------------------------------------------------------------------------------
testAcceptEncodingBad :: Test
testAcceptEncodingBad = testCase "gzip/acceptEncodingBad" $ do
expectExceptionH $ Test.runHandler (Test.setHeader "Accept-Encoding" "$")
snap
expectExceptionH $ Test.runHandler
(Test.setHeader "Accept-Encoding" "gzip;q=^") snap
where
snap = withCompression $ do
modifyResponse $ setHeader "Content-Type" "text/plain"
writeBS "ok"
------------------------------------------------------------------------------
testIdentity1 :: Test
testIdentity1 = testProperty "gzip/identity1" $ monadicIO $ forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goGZip (seqSnap $ withCompression $ textPlain s)
assertEqual "" (Just "gzip") $ getHeader "Content-Encoding" rsp
assertEqual "" (Just "Accept-Encoding") $ getHeader "Vary" rsp
body <- Test.getResponseBody rsp
let s1 = GZip.decompress $ L.fromChunks [body]
assertEqual "" s s1
------------------------------------------------------------------------------
testIdentity1_charset :: Test
testIdentity1_charset = testProperty "gzip/identity1_charset" $
monadicIO $ forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goGZip (seqSnap $ withCompression $
withContentType "text/plain; charset=utf-8" s)
assertEqual "" (Just "gzip") $ getHeader "Content-Encoding" rsp
assertEqual "" (Just "Accept-Encoding") $ getHeader "Vary" rsp
body <- Test.getResponseBody rsp
let s1 = GZip.decompress $ L.fromChunks [body]
assertEqual "" s s1
------------------------------------------------------------------------------
testIdentity2 :: Test
testIdentity2 = testProperty "gzip/identity2" $ monadicIO $
forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goDeflate (seqSnap $ withCompression $ textPlain s)
assertEqual "" (Just "deflate") $ getHeader "Content-Encoding" rsp
assertEqual "" (Just "Accept-Encoding") $ getHeader "Vary" rsp
body <- Test.getResponseBody rsp
let s' = Zlib.decompress $ L.fromChunks [body]
assertEqual "" s s'
------------------------------------------------------------------------------
testIdentity3 :: Test
testIdentity3 = testProperty "gzip/identity3" $ monadicIO $ forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goGZip (seqSnap $ withCompression $ binary s)
body <- Test.getResponseBody rsp
assertEqual "identify" s $ L.fromChunks [body]
------------------------------------------------------------------------------
testIdentity4 :: Test
testIdentity4 = testProperty "gzip/identity4" $ monadicIO $ forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goXGZip (seqSnap $ withCompression $ textPlain s)
assertEqual "encoding" (Just "x-gzip") (getHeader "Content-Encoding" rsp)
body <- Test.getResponseBody rsp
let s1 = GZip.decompress $ L.fromChunks [body]
assertEqual "identity" s s1
(!_,!rsp2) <- goXGZip2 (seqSnap $ withCompression $ textPlain s)
assertEqual "encoding" (Just "x-gzip") (getHeader "Content-Encoding" rsp2)
body2 <- Test.getResponseBody rsp2
let s2 = GZip.decompress $ L.fromChunks [body2]
assertEqual "identity2" s s2
------------------------------------------------------------------------------
testIdentity5 :: Test
testIdentity5 = testProperty "gzip/identity5" $ monadicIO $ forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goXDeflate (seqSnap $ withCompression $ textPlain s)
assertEqual "" (Just "x-deflate") $ getHeader "Content-Encoding" rsp
body <- Test.getResponseBody rsp
let s2 = Zlib.decompress $ L.fromChunks [body]
assertEqual "gzip" s s2
------------------------------------------------------------------------------
testBadHeaders :: Test
testBadHeaders = testProperty "gzip/bad headers" $ monadicIO $ forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = expectException $ do
(!_,!rsp) <- goBad (seqSnap $ withCompression $ textPlain s)
Test.getResponseBody rsp
------------------------------------------------------------------------------
testNopWhenContentEncodingSet :: Test
testNopWhenContentEncodingSet =
testProperty "gzip/testNopWhenContentEncodingSet" $
monadicIO $
forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = do
(!_,!rsp) <- liftQ $ goGZip $ f s
assert $ getHeader "Content-Encoding" rsp == Just "identity"
f !s = seqSnap $ withCompression $ do
modifyResponse $ setHeader "Content-Encoding" "identity"
textPlain s
------------------------------------------------------------------------------
testCompositionDoesn'tExplode :: Test
testCompositionDoesn'tExplode =
testProperty "gzip/testCompositionDoesn'tExplode" $
monadicIO $
forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goGZip (seqSnap $
withCompression $
withCompression $
withCompression $ textPlain s)
assertEqual "" (Just "gzip") $ getHeader "Content-Encoding" rsp
c <- Test.getResponseBody rsp
let s1 = GZip.decompress $ L.fromChunks [c]
assertEqual "composition" s s1
------------------------------------------------------------------------------
testGzipLotsaChunks :: Test
testGzipLotsaChunks = testCase "gzip/lotsOfChunks" prop
where
prop = do
a <- genRandom 12000
let s = L.take 120000 $ L.cycle $ L.fromChunks [a, B.reverse a]
(!_,!rsp) <- goGZip (seqSnap $ withCompression $ textPlain s)
body <- Test.getResponseBody rsp
let s1 = GZip.decompress $ L.fromChunks [body]
H.assertEqual "streams equal" s s1
genRandom n = B.pack <$> replicateM n randomWord8
t8 c = toEnum $ c .&. 0xff
randomWord8 :: IO Word8
randomWord8 = t8 <$> randomIO
------------------------------------------------------------------------------
testNoCompression :: Test
testNoCompression = testProperty "gzip/noCompression" $
monadicIO $ forAllM arbitrary prop
where
prop :: L.ByteString -> PropertyM IO ()
prop s = liftQ $ do
(!_,!rsp) <- goGZip (seqSnap $ withCompression $
(noCompression >> textPlain s))
assertEqual "" (Just "identity") $ getHeader "Content-Encoding" rsp
body <- Test.getResponseBody rsp
assertEqual "body matches" (S.concat $ L.toChunks s) body
------------------------------------------------------------------------------
testTrivials :: Test
testTrivials = testCase "gzip/trivials" $ do
coverTypeableInstance (undefined :: BadAcceptEncodingException)
| sopvop/snap-core | test/Snap/Util/GZip/Tests.hs | bsd-3-clause | 15,159 | 0 | 16 | 3,674 | 3,410 | 1,731 | 1,679 | 258 | 1 |
-- | Internal pretty-printing helpers for Nix expressions.
module Internal.PrettyPrinting
( onlyIf
, setattr, toAscList
, listattr
, boolattr
, attr
, string
, funargs
-- * Re-exports from other modules
, module Text.PrettyPrint.HughesPJClass
, Text, disp
)
where
import Data.Char
import Data.Function
import Data.List
import Data.Set ( Set )
import qualified Data.Set as Set
import Distribution.Text ( Text, disp )
import Text.PrettyPrint.HughesPJClass
attr :: String -> Doc -> Doc
attr n v = text n <+> equals <+> v <> semi
onlyIf :: Bool -> Doc -> Doc
onlyIf b d = if b then d else empty
boolattr :: String -> Bool -> Bool -> Doc
boolattr n p v = if p then attr n (bool v) else empty
listattr :: String -> Doc -> [String] -> Doc
listattr n prefix vs = onlyIf (not (null vs)) $
sep [ text n <+> equals <+> prefix <+> lbrack,
nest 2 $ fsep $ map text vs,
rbrack <> semi
]
setattr :: String -> Doc -> Set String -> Doc
setattr name prefix set = listattr name prefix (toAscList set)
toAscList :: Set String -> [String]
toAscList = sortBy (compare `on` map toLower) . Set.toList
bool :: Bool -> Doc
bool True = text "true"
bool False = text "false"
string :: String -> Doc
string = doubleQuotes . text
prepunctuate :: Doc -> [Doc] -> [Doc]
prepunctuate _ [] = []
prepunctuate p (d:ds) = d : map (p <>) ds
funargs :: [Doc] -> Doc
funargs xs = sep [
lbrace <+> fcat (prepunctuate (comma <> text " ") $ map (nest 2) xs),
rbrace <> colon
]
| psibi/cabal2nix | src/Internal/PrettyPrinting.hs | bsd-3-clause | 1,598 | 0 | 14 | 437 | 583 | 314 | 269 | 44 | 2 |
{-# LANGUAGE CPP
, GADTs
, DataKinds
, KindSignatures
, MultiParamTypeClasses
, FunctionalDependencies
, ScopedTypeVariables
, FlexibleContexts
, Rank2Types
, TypeSynonymInstances
, FlexibleInstances
#-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- 2016.04.28
-- |
-- Module : Language.Hakaru.Evaluation.Lazy
-- Copyright : Copyright (c) 2016 the Hakaru team
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC-only
--
-- Lazy partial evaluation.
--
-- BUG: completely gave up on structure sharing. Need to add that
-- back in. cf., @gvidal-lopstr07lncs.pdf@ for an approach much
-- like my old one.
----------------------------------------------------------------
module Language.Hakaru.Evaluation.Lazy
( evaluate
-- ** Helper functions
, evaluateNaryOp
, evaluatePrimOp
, evaluateArrayOp
-- ** Helpers that should really go away
, Interp(..), reifyPair
) where
import Prelude hiding (id, (.))
import Control.Category (Category(..))
#if __GLASGOW_HASKELL__ < 710
import Data.Functor ((<$>))
#endif
import Control.Monad ((<=<))
import Control.Monad.Identity (Identity, runIdentity)
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Text as Text
import Language.Hakaru.Syntax.IClasses
import Data.Number.Nat
import Data.Number.Natural
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Types.Sing
import Language.Hakaru.Types.Coercion
import Language.Hakaru.Types.HClasses
import Language.Hakaru.Syntax.TypeOf
import Language.Hakaru.Syntax.AST
import Language.Hakaru.Syntax.Datum
import Language.Hakaru.Syntax.DatumCase (DatumEvaluator, MatchResult(..), matchBranches, MatchState(..), matchTopPattern)
import Language.Hakaru.Syntax.ABT
import Language.Hakaru.Evaluation.Types
import qualified Language.Hakaru.Syntax.Prelude as P
{-
-- BUG: can't import this because of cyclic dependency
import qualified Language.Hakaru.Expect as E
-}
#ifdef __TRACE_DISINTEGRATE__
import Language.Hakaru.Pretty.Haskell (pretty)
import Debug.Trace (trace)
#endif
----------------------------------------------------------------
----------------------------------------------------------------
-- TODO: (eventually) accept an argument dictating the evaluation
-- strategy (HNF, WHNF, full-beta NF,...). The strategy value should
-- probably be a family of singletons, where the type-level strategy
-- @s@ is also an index on the 'Context' and (the renamed) 'Whnf'.
-- That way we don't need to define a bunch of variant 'Context',
-- 'Statement', and 'Whnf' data types; but rather can use indexing
-- to select out subtypes of the generic versions.
-- | Lazy partial evaluation with some given \"perform\" and
-- \"evaluateCase\" functions. N.B., if @p ~ 'Pure@ then the
-- \"perform\" function will never be called.
evaluate
:: forall abt m p
. (ABT Term abt, EvaluationMonad abt m p)
=> MeasureEvaluator abt m
-> TermEvaluator abt m
{-# INLINE evaluate #-}
evaluate perform = evaluate_
where
evaluateCase_ :: CaseEvaluator abt m
evaluateCase_ = evaluateCase evaluate_
evaluate_ :: TermEvaluator abt m
evaluate_ e0 =
#ifdef __TRACE_DISINTEGRATE__
trace ("-- evaluate_: " ++ show (pretty e0)) $
#endif
caseVarSyn e0 (evaluateVar perform evaluate_) $ \t ->
case t of
-- Things which are already WHNFs
Literal_ v -> return . Head_ $ WLiteral v
Datum_ d -> return . Head_ $ WDatum d
Empty_ typ -> return . Head_ $ WEmpty typ
Array_ e1 e2 -> return . Head_ $ WArray e1 e2
ArrayLiteral_ es -> return . Head_ $ WArrayLiteral es
Lam_ :$ e1 :* End -> return . Head_ $ WLam e1
Dirac :$ e1 :* End -> return . Head_ $ WDirac e1
MBind :$ e1 :* e2 :* End -> return . Head_ $ WMBind e1 e2
Plate :$ e1 :* e2 :* End -> return . Head_ $ WPlate e1 e2
MeasureOp_ o :$ es -> return . Head_ $ WMeasureOp o es
Superpose_ pes -> return . Head_ $ WSuperpose pes
Reject_ typ -> return . Head_ $ WReject typ
-- We don't bother evaluating these, even though we could...
Integrate :$ e1 :* e2 :* e3 :* End ->
return . Head_ $ WIntegrate e1 e2 e3
Summate h1 h2 :$ e1 :* e2 :* e3 :* End ->
return . Neutral $ syn t
--return . Head_ $ WSummate e1 e2 e3
-- Everything else needs some evaluation
App_ :$ e1 :* e2 :* End -> do
w1 <- evaluate_ e1
case w1 of
Neutral e1' -> return . Neutral $ P.app e1' e2
Head_ v1 -> evaluateApp v1
where
evaluateApp (WLam f) =
-- call-by-name:
caseBind f $ \x f' -> do
i <- getIndices
push (SLet x (Thunk e2) i) f' >>= evaluate_
evaluateApp _ = error "evaluate{App_}: the impossible happened"
Let_ :$ e1 :* e2 :* End -> do
i <- getIndices
caseBind e2 $ \x e2' ->
push (SLet x (Thunk e1) i) e2' >>= evaluate_
CoerceTo_ c :$ e1 :* End -> coerceTo c <$> evaluate_ e1
UnsafeFrom_ c :$ e1 :* End -> coerceFrom c <$> evaluate_ e1
-- TODO: will maybe clean up the code to map 'evaluate' over @es@ before calling the evaluateFooOp helpers?
NaryOp_ o es -> evaluateNaryOp evaluate_ o es
ArrayOp_ o :$ es -> evaluateArrayOp evaluate_ o es
PrimOp_ o :$ es -> evaluatePrimOp evaluate_ o es
-- BUG: avoid the chance of looping in case 'E.expect' residualizes!
-- TODO: use 'evaluate' in 'E.expect' for the evaluation of @e1@
Expect :$ e1 :* e2 :* End ->
error "TODO: evaluate{Expect}: unclear how to handle this without cyclic dependencies"
{-
-- BUG: can't call E.expect because of cyclic dependency
evaluate_ . E.expect e1 $ \e3 ->
syn (Let_ :$ e3 :* e2 :* End)
-}
Case_ e bs -> evaluateCase_ e bs
_ :$ _ -> error "evaluate: the impossible happened"
----------------------------------------------------------------
-- BUG: need to improve the types so they can capture polymorphic data types
-- BUG: this is a **really gross** hack. If we can avoid it, we should!!!
class Interp a a' | a -> a' where
reify :: (ABT Term abt) => Head abt a -> a'
reflect :: (ABT Term abt) => a' -> Head abt a
instance Interp 'HNat Natural where
reflect = WLiteral . LNat
reify (WLiteral (LNat n)) = n
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
instance Interp 'HInt Integer where
reflect = WLiteral . LInt
reify (WLiteral (LInt i)) = i
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
instance Interp 'HProb NonNegativeRational where
reflect = WLiteral . LProb
reify (WLiteral (LProb p)) = p
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
reify (WIntegrate _ _ _) = error "TODO: reify{WIntegrate}"
--reify (WSummate _ _ _) = error "TODO: reify{WSummate}"
instance Interp 'HReal Rational where
reflect = WLiteral . LReal
reify (WLiteral (LReal r)) = r
reify (WCoerceTo _ _) = error "TODO: reify{WCoerceTo}"
reify (WUnsafeFrom _ _) = error "TODO: reify{WUnsafeFrom}"
identifyDatum :: (ABT Term abt) => DatumEvaluator (abt '[]) Identity
identifyDatum = return . (viewWhnfDatum <=< toWhnf)
-- HACK: this requires -XTypeSynonymInstances and -XFlexibleInstances
-- This instance does seem to work; albeit it's trivial...
instance Interp HUnit () where
reflect () = WDatum dUnit
reify v = runIdentity $ do
match <- matchTopPattern identifyDatum (fromHead v) pUnit Nil1
case match of
Just (Matched_ _ss Nil1) -> return ()
_ -> error "reify{HUnit}: the impossible happened"
-- HACK: this requires -XTypeSynonymInstances and -XFlexibleInstances
-- This instance also seems to work...
instance Interp HBool Bool where
reflect = WDatum . (\b -> if b then dTrue else dFalse)
reify v = runIdentity $ do
matchT <- matchTopPattern identifyDatum (fromHead v) pTrue Nil1
case matchT of
Just (Matched_ _ss Nil1) -> return True
Just GotStuck_ -> error "reify{HBool}: the impossible happened"
Nothing -> do
matchF <- matchTopPattern identifyDatum (fromHead v) pFalse Nil1
case matchF of
Just (Matched_ _ss Nil1) -> return False
_ -> error "reify{HBool}: the impossible happened"
-- TODO: can't we just use 'viewHeadDatum' and match on that?
reifyPair
:: (ABT Term abt) => Head abt (HPair a b) -> (abt '[] a, abt '[] b)
reifyPair v =
let impossible = error "reifyPair: the impossible happened"
e0 = fromHead v
n = nextFree e0
(a,b) = sUnPair $ typeOf e0
x = Variable Text.empty n a
y = Variable Text.empty (1 + n) b
in runIdentity $ do
match <- matchTopPattern identifyDatum e0 (pPair PVar PVar) (Cons1 x (Cons1 y Nil1))
case match of
Just (Matched_ ss Nil1) ->
case ss [] of
[Assoc x' e1, Assoc y' e2] ->
maybe impossible id $ do
Refl <- varEq x x'
Refl <- varEq y y'
Just $ return (e1, e2)
_ -> impossible
_ -> impossible
{-
instance Interp (HPair a b) (abt '[] a, abt '[] b) where
reflect (a,b) = P.pair a b
reify = reifyPair
instance Interp (HEither a b) (Either (abt '[] a) (abt '[] b)) where
reflect (Left a) = P.left a
reflect (Right b) = P.right b
reify =
instance Interp (HMaybe a) (Maybe (abt '[] a)) where
reflect Nothing = P.nothing
reflect (Just a) = P.just a
reify =
data ListHead (a :: Hakaru)
= NilHead
| ConsHead (abt '[] a) (abt '[] (HList a)) -- modulo scoping of @abt@
instance Interp (HList a) (ListHead a) where
reflect [] = P.nil
reflect (x:xs) = P.cons x xs
reify =
-}
impl, diff, nand, nor :: Bool -> Bool -> Bool
impl x y = not x || y
diff x y = x && not y
nand x y = not (x && y)
nor x y = not (x || y)
-- BUG: no Floating instance for LogFloat (nor NonNegativeRational), so can't actually use this...
natRoot :: (Floating a) => a -> Nat -> a
natRoot x y = x ** recip (fromIntegral (fromNat y))
----------------------------------------------------------------
evaluateNaryOp
:: (ABT Term abt, EvaluationMonad abt m p)
=> TermEvaluator abt m
-> NaryOp a
-> Seq (abt '[] a)
-> m (Whnf abt a)
evaluateNaryOp evaluate_ = \o es -> mainLoop o (evalOp o) Seq.empty es
where
-- TODO: there's got to be a more efficient way to do this...
mainLoop o op ws es =
case Seq.viewl es of
Seq.EmptyL -> return $
case Seq.viewl ws of
Seq.EmptyL -> identityElement o -- Avoid empty naryOps
w Seq.:< ws'
| Seq.null ws' -> w -- Avoid singleton naryOps
| otherwise ->
Neutral . syn . NaryOp_ o $ fmap fromWhnf ws
e Seq.:< es' -> do
w <- evaluate_ e
case matchNaryOp o w of
Nothing -> mainLoop o op (snocLoop op ws w) es'
Just es2 -> mainLoop o op ws (es2 Seq.>< es')
snocLoop
:: (ABT syn abt)
=> (Head abt a -> Head abt a -> Head abt a)
-> Seq (Whnf abt a)
-> Whnf abt a
-> Seq (Whnf abt a)
snocLoop op ws w1 =
-- TODO: immediately return @ws@ if @w1 == identityElement o@ (whenever identityElement is defined)
case Seq.viewr ws of
Seq.EmptyR -> Seq.singleton w1
ws' Seq.:> w2 ->
case (w1,w2) of
(Head_ v1, Head_ v2) -> snocLoop op ws' (Head_ (op v1 v2))
_ -> ws Seq.|> w1
matchNaryOp
:: (ABT Term abt)
=> NaryOp a
-> Whnf abt a
-> Maybe (Seq (abt '[] a))
matchNaryOp o w =
case w of
Head_ _ -> Nothing
Neutral e ->
caseVarSyn e (const Nothing) $ \t ->
case t of
NaryOp_ o' es | o' == o -> Just es
_ -> Nothing
-- TODO: move this off to Prelude.hs or somewhere...
identityElement :: (ABT Term abt) => NaryOp a -> Whnf abt a
identityElement o =
case o of
And -> Head_ (WDatum dTrue)
Or -> Head_ (WDatum dFalse)
Xor -> Head_ (WDatum dFalse)
Iff -> Head_ (WDatum dTrue)
Min _ -> Neutral (syn (NaryOp_ o Seq.empty)) -- no identity in general (but we could do it by cases...)
Max _ -> Neutral (syn (NaryOp_ o Seq.empty)) -- no identity in general (but we could do it by cases...)
-- TODO: figure out how to reuse 'P.zero_' and 'P.one_' here; requires converting thr @(syn . Literal_)@ into @(Head_ . WLiteral)@. Maybe we should change 'P.zero_' and 'P.one_' so they just return the 'Literal' itself rather than the @abt@?
Sum HSemiring_Nat -> Head_ (WLiteral (LNat 0))
Sum HSemiring_Int -> Head_ (WLiteral (LInt 0))
Sum HSemiring_Prob -> Head_ (WLiteral (LProb 0))
Sum HSemiring_Real -> Head_ (WLiteral (LReal 0))
Prod HSemiring_Nat -> Head_ (WLiteral (LNat 1))
Prod HSemiring_Int -> Head_ (WLiteral (LInt 1))
Prod HSemiring_Prob -> Head_ (WLiteral (LProb 1))
Prod HSemiring_Real -> Head_ (WLiteral (LReal 1))
-- | The evaluation interpretation of each NaryOp
evalOp
:: (ABT Term abt)
=> NaryOp a
-> Head abt a
-> Head abt a
-> Head abt a
-- TODO: something more efficient\/direct if we can...
evalOp And = \v1 v2 -> reflect (reify v1 && reify v2)
evalOp Or = \v1 v2 -> reflect (reify v1 || reify v2)
evalOp Xor = \v1 v2 -> reflect (reify v1 /= reify v2)
evalOp Iff = \v1 v2 -> reflect (reify v1 == reify v2)
evalOp (Min _) = error "TODO: evalOp{Min}"
evalOp (Max _) = error "TODO: evalOp{Max}"
{-
evalOp (Min _) = \v1 v2 -> reflect (reify v1 `min` reify v2)
evalOp (Max _) = \v1 v2 -> reflect (reify v1 `max` reify v2)
evalOp (Sum _) = \v1 v2 -> reflect (reify v1 + reify v2)
evalOp (Prod _) = \v1 v2 -> reflect (reify v1 * reify v2)
-}
-- HACK: this is just to have something to test. We really should reduce\/remove all this boilerplate...
evalOp (Sum theSemi) =
\(WLiteral v1) (WLiteral v2) -> WLiteral $ evalSum theSemi v1 v2
evalOp (Prod theSemi) =
\(WLiteral v1) (WLiteral v2) -> WLiteral $ evalProd theSemi v1 v2
-- TODO: even if only one of the arguments is a literal, if that literal is zero\/one, then we can still partially evaluate it. (As is done in the old finally-tagless code)
evalSum, evalProd :: HSemiring a -> Literal a -> Literal a -> Literal a
evalSum HSemiring_Nat = \(LNat n1) (LNat n2) -> LNat (n1 + n2)
evalSum HSemiring_Int = \(LInt i1) (LInt i2) -> LInt (i1 + i2)
evalSum HSemiring_Prob = \(LProb p1) (LProb p2) -> LProb (p1 + p2)
evalSum HSemiring_Real = \(LReal r1) (LReal r2) -> LReal (r1 + r2)
evalProd HSemiring_Nat = \(LNat n1) (LNat n2) -> LNat (n1 * n2)
evalProd HSemiring_Int = \(LInt i1) (LInt i2) -> LInt (i1 * i2)
evalProd HSemiring_Prob = \(LProb p1) (LProb p2) -> LProb (p1 * p2)
evalProd HSemiring_Real = \(LReal r1) (LReal r2) -> LReal (r1 * r2)
----------------------------------------------------------------
evaluateArrayOp
:: ( ABT Term abt, EvaluationMonad abt m p
, typs ~ UnLCs args, args ~ LCs typs)
=> TermEvaluator abt m
-> ArrayOp typs a
-> SArgs abt args
-> m (Whnf abt a)
evaluateArrayOp evaluate_ = go
where
go o@(Index _) = \(e1 :* e2 :* End) -> do
let -- idxCode :: abt '[] ('HArray a) -> abt '[] 'HNat -> abt '[] a
-- idxCode a i = Neutral $ syn (ArrayOp_ o :$ a :* i :* End)
w1 <- evaluate_ e1
case w1 of
Neutral e1' ->
return . Neutral $ syn (ArrayOp_ o :$ e1' :* e2 :* End)
Head_ (WArray _ b) ->
caseBind b $ \x body -> extSubst x e2 body >>= evaluate_
Head_ (WEmpty _) ->
error "TODO: evaluateArrayOp{Index}{Head_ (WEmpty _)}"
Head_ (WArrayLiteral arr) ->
do w2 <- evaluate_ e2
case w2 of
Head_ (WLiteral (LNat n)) -> return . Neutral $
arr !! fromInteger (fromNatural n)
_ -> return . Neutral $
syn (ArrayOp_ o :$ fromWhnf w1 :* fromWhnf w2 :* End)
_ -> error "evaluateArrayOp{Index}: uknown whnf of array type"
go o@(Size _) = \(e1 :* End) -> do
w1 <- evaluate_ e1
case w1 of
Neutral e1' -> return . Neutral $ syn (ArrayOp_ o :$ e1' :* End)
Head_ (WEmpty _) -> return . Head_ $ WLiteral (LNat 0)
Head_ (WArray e2 _) -> evaluate_ e2
Head_ (WArrayLiteral es) -> return . Head_ . WLiteral .
primCoerceFrom (Signed HRing_Int) .
LInt . toInteger $ length es
go (Reduce _) = \(e1 :* e2 :* e3 :* End) ->
error "TODO: evaluateArrayOp{Reduce}"
----------------------------------------------------------------
-- TODO: maybe we should adjust 'Whnf' to have a third option for
-- closed terms of the atomic\/literal types, so that we can avoid
-- reducing them just yet. Of course, we'll have to reduce them
-- eventually, but we can leave that for the runtime evaluation or
-- Maple or whatever. These are called \"annotated\" terms in Fischer
-- et al 2008 (though they allow anything to be annotated, not just
-- closed terms of atomic type).
evaluatePrimOp
:: forall abt m p typs args a
. ( ABT Term abt, EvaluationMonad abt m p
, typs ~ UnLCs args, args ~ LCs typs)
=> TermEvaluator abt m
-> PrimOp typs a
-> SArgs abt args
-> m (Whnf abt a)
evaluatePrimOp evaluate_ = go
where
-- HACK: we don't have any way of saying these functions haven't reduced even though it's not actually a neutral term.
neu1 :: forall b c
. (abt '[] b -> abt '[] c)
-> abt '[] b
-> m (Whnf abt c)
neu1 f e = (Neutral . f . fromWhnf) <$> evaluate_ e
neu2 :: forall b c d
. (abt '[] b -> abt '[] c -> abt '[] d)
-> abt '[] b
-> abt '[] c
-> m (Whnf abt d)
neu2 f e1 e2 = do e1' <- fromWhnf <$> evaluate_ e1
e2' <- fromWhnf <$> evaluate_ e2
return . Neutral $ f e1' e2'
rr1 :: forall b b' c c'
. (Interp b b', Interp c c')
=> (b' -> c')
-> (abt '[] b -> abt '[] c)
-> abt '[] b
-> m (Whnf abt c)
rr1 f' f e = do
w <- evaluate_ e
return $
case w of
Neutral e' -> Neutral $ f e'
Head_ v -> Head_ . reflect $ f' (reify v)
rr2 :: forall b b' c c' d d'
. (Interp b b', Interp c c', Interp d d')
=> (b' -> c' -> d')
-> (abt '[] b -> abt '[] c -> abt '[] d)
-> abt '[] b
-> abt '[] c
-> m (Whnf abt d)
rr2 f' f e1 e2 = do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
return $
case w1 of
Neutral e1' -> Neutral $ f e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' -> Neutral $ f (fromWhnf w1) e2'
Head_ v2 -> Head_ . reflect $ f' (reify v1) (reify v2)
primOp2_
:: forall b c d
. PrimOp '[ b, c ] d -> abt '[] b -> abt '[] c -> abt '[] d
primOp2_ o e1 e2 = syn (PrimOp_ o :$ e1 :* e2 :* End)
-- TODO: something more efficient\/direct if we can...
go Not (e1 :* End) = rr1 not P.not e1
go Impl (e1 :* e2 :* End) = rr2 impl (primOp2_ Impl) e1 e2
go Diff (e1 :* e2 :* End) = rr2 diff (primOp2_ Diff) e1 e2
go Nand (e1 :* e2 :* End) = rr2 nand P.nand e1 e2
go Nor (e1 :* e2 :* End) = rr2 nor P.nor e1 e2
-- HACK: we don't have a way of saying that 'Pi' (or 'Infinity',...) is in fact a head; so we're forced to call it neutral which is a lie. We should add constructor(s) to 'Head' to cover these magic constants; probably grouped together under a single constructor called something like @Constant@. Maybe should group them like that in the AST as well?
go Pi End = return $ Neutral P.pi
-- We treat trig functions as strict, thus forcing their
-- arguments; however, to avoid fuzz issues we don't actually
-- evaluate the trig functions.
--
-- HACK: we might should have some other way to make these
-- 'Whnf' rather than calling them neutral terms; since they
-- aren't, in fact, neutral!
go Sin (e1 :* End) = neu1 P.sin e1
go Cos (e1 :* End) = neu1 P.cos e1
go Tan (e1 :* End) = neu1 P.tan e1
go Asin (e1 :* End) = neu1 P.asin e1
go Acos (e1 :* End) = neu1 P.acos e1
go Atan (e1 :* End) = neu1 P.atan e1
go Sinh (e1 :* End) = neu1 P.sinh e1
go Cosh (e1 :* End) = neu1 P.cosh e1
go Tanh (e1 :* End) = neu1 P.tanh e1
go Asinh (e1 :* End) = neu1 P.asinh e1
go Acosh (e1 :* End) = neu1 P.acosh e1
go Atanh (e1 :* End) = neu1 P.atanh e1
-- TODO: deal with how we have better types for these three ops than Haskell does...
-- go RealPow (e1 :* e2 :* End) = rr2 (**) (P.**) e1 e2
go RealPow (e1 :* e2 :* End) = neu2 (P.**) e1 e2
-- HACK: these aren't actually neutral!
-- BUG: we should try to cancel out @(exp . log)@ and @(log . exp)@
go Exp (e1 :* End) = neu1 P.exp e1
go Log (e1 :* End) = neu1 P.log e1
-- HACK: these aren't actually neutral!
go (Infinity h) End =
case h of
HIntegrable_Nat -> return . Neutral $ P.primOp0_ (Infinity h)
HIntegrable_Prob -> return $ Neutral P.infinity
go GammaFunc (e1 :* End) = neu1 P.gammaFunc e1
go BetaFunc (e1 :* e2 :* End) = neu2 P.betaFunc e1 e2
go (Equal theEq) (e1 :* e2 :* End) = rrEqual theEq e1 e2
go (Less theOrd) (e1 :* e2 :* End) = rrLess theOrd e1 e2
go (NatPow theSemi) (e1 :* e2 :* End) =
case theSemi of
HSemiring_Nat -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Int -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Prob -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
HSemiring_Real -> rr2 (\v1 v2 -> v1 ^ fromNatural v2) (P.^) e1 e2
go (Negate theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 negate P.negate e1
HRing_Real -> rr1 negate P.negate e1
go (Abs theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 (unsafeNatural . abs) P.abs_ e1
HRing_Real -> rr1 (unsafeNonNegativeRational . abs) P.abs_ e1
go (Signum theRing) (e1 :* End) =
case theRing of
HRing_Int -> rr1 signum P.signum e1
HRing_Real -> rr1 signum P.signum e1
go (Recip theFractional) (e1 :* End) =
case theFractional of
HFractional_Prob -> rr1 recip P.recip e1
HFractional_Real -> rr1 recip P.recip e1
go (NatRoot theRadical) (e1 :* e2 :* End) =
case theRadical of
HRadical_Prob -> neu2 (flip P.thRootOf) e1 e2
{-
go (NatRoot theRadical) (e1 :* e2 :* End) =
case theRadical of
HRadical_Prob -> rr2 natRoot (flip P.thRootOf) e1 e2
go (Erf theContinuous) (e1 :* End) =
case theContinuous of
HContinuous_Prob -> rr1 erf P.erf e1
HContinuous_Real -> rr1 erf P.erf e1
-}
go op _ = error $ "TODO: evaluatePrimOp{" ++ show op ++ "}"
rrEqual
:: forall b. HEq b -> abt '[] b -> abt '[] b -> m (Whnf abt HBool)
rrEqual theEq =
case theEq of
HEq_Nat -> rr2 (==) (P.==)
HEq_Int -> rr2 (==) (P.==)
HEq_Prob -> rr2 (==) (P.==)
HEq_Real -> rr2 (==) (P.==)
HEq_Array aEq -> error "TODO: rrEqual{HEq_Array}"
HEq_Bool -> rr2 (==) (P.==)
HEq_Unit -> rr2 (==) (P.==)
HEq_Pair aEq bEq ->
\e1 e2 -> do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
case w1 of
Neutral e1' ->
return . Neutral
$ P.primOp2_ (Equal theEq) e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' ->
return . Neutral
$ P.primOp2_ (Equal theEq) (fromHead v1) e2'
Head_ v2 -> do
let (v1a, v1b) = reifyPair v1
let (v2a, v2b) = reifyPair v2
wa <- rrEqual aEq v1a v2a
wb <- rrEqual bEq v1b v2b
return $
case wa of
Neutral ea ->
case wb of
Neutral eb -> Neutral (ea P.&& eb)
Head_ vb
| reify vb -> wa
| otherwise -> Head_ $ WDatum dFalse
Head_ va
| reify va -> wb
| otherwise -> Head_ $ WDatum dFalse
HEq_Either aEq bEq -> error "TODO: rrEqual{HEq_Either}"
rrLess
:: forall b. HOrd b -> abt '[] b -> abt '[] b -> m (Whnf abt HBool)
rrLess theOrd =
case theOrd of
HOrd_Nat -> rr2 (<) (P.<)
HOrd_Int -> rr2 (<) (P.<)
HOrd_Prob -> rr2 (<) (P.<)
HOrd_Real -> rr2 (<) (P.<)
HOrd_Array aOrd -> error "TODO: rrLess{HOrd_Array}"
HOrd_Bool -> rr2 (<) (P.<)
HOrd_Unit -> rr2 (<) (P.<)
HOrd_Pair aOrd bOrd ->
\e1 e2 -> do
w1 <- evaluate_ e1
w2 <- evaluate_ e2
case w1 of
Neutral e1' ->
return . Neutral
$ P.primOp2_ (Less theOrd) e1' (fromWhnf w2)
Head_ v1 ->
case w2 of
Neutral e2' ->
return . Neutral
$ P.primOp2_ (Less theOrd) (fromHead v1) e2'
Head_ v2 -> do
let (v1a, v1b) = reifyPair v1
let (v2a, v2b) = reifyPair v2
error "TODO: rrLess{HOrd_Pair}"
-- BUG: The obvious recursion won't work because we need to know when the first components are equal before recursing (to implement lexicographic ordering). We really need a ternary comparison operator like 'compare'.
HOrd_Either aOrd bOrd -> error "TODO: rrLess{HOrd_Either}"
----------------------------------------------------------------
----------------------------------------------------------- fin.
| zaxtax/hakaru | haskell/Language/Hakaru/Evaluation/Lazy.hs | bsd-3-clause | 28,398 | 14 | 31 | 9,833 | 7,609 | 3,821 | 3,788 | -1 | -1 |
module Program.List.Roll where
import qualified Program.List.Expression as X
import qualified Program.List.Semantics as S
import qualified Program.List.Operation as O
import qualified Program.List.Value as V
import qualified Program.General.Environment as E
import qualified Program.General.Program as P
import qualified Program.List.Config as C
import Autolib.TES.Identifier
import Autolib.Util.Zufall
import Control.Monad ( forM )
import Data.Maybe ( isJust )
import Autolib.Reporter ( result)
environment ::C.Config
-> IO ( E.Environment V.Value )
environment conf = do
binds <- forM ( C.variables conf ) $ \ ( C.Declaration ty na ) -> do
d <- datah ty $ C.data_size_bounds conf
return ( na, d )
return $ E.make binds
datah :: V.Type
-> (Int,Int) -- ^ data size range
-> IO V.Value
datah ( ty @ V.TCollect { V.name = n, V.arg = V.TScalar } ) (lo,hi) = do
s <- randomRIO (lo,hi)
xs <- forM [ 1 .. s ] $ \ _ -> do
randomRIO (0, fromIntegral lo )
return $ V.Collect
{ V.typeof = ty
, V.contents = map V.Scalar xs
}
program :: [ O.Operation ]
-> E.Environment V.Value
-> C.Config
-> IO ( P.Program S.Statement )
program ops env conf = do
let handle e k | k > 0 = do
(x, f) <- statement ops e conf
xs <- handle f (k-1)
return $ x : xs
handle e 0 = return []
l <- randomRIO $ C.program_length_bounds conf
xs <- handle env l
return $ P.Program xs
statement :: [ O.Operation ]
-> E.Environment V.Value
-> C.Config
-> IO (S.Statement, E.Environment V.Value)
statement ops env conf = do
(s, Just f) <- do
x <- top_expression ops env conf
return ( S.Statement x, result $ S.execute env $ P.Program [ S.Statement x ] )
`repeat_until` \ (x, mf) -> isJust mf
return (s, f )
top_expression :: [ O.Operation ]
-> E.Environment V.Value
-> C.Config
-> IO X.Expression
top_expression ops env conf = do
( name, val ) <- eins
$ filter ( \ (name, val ) -> case val of
V.Collect {} -> True
_ -> False
)
$ E.contents env
let l = length $ V.contents val
action <- eins $
[ do i <- some_expression ops env conf ( 0, l )
e <- some_expression ops env conf ( 0, l )
return $ X.Methodcall ( X.Reference name ) ( mkunary "add" ) [ i, e ]
] ++
[ do i <- some_expression ops env conf ( 0, l - 1 )
return $ X.Methodcall ( X.Reference name ) ( mkunary "remove" ) [ i ]
| l > 0
]
action
-- | construct some expression with some value from the given range
some_expression ops env conf bnd = do
v <- randomRIO bnd
d <- randomRIO $ C.expression_depth_bounds conf
expression ops env v d
expression ops env v d | d > 0 = do
( name, val ) <- eins
$ filter ( \ (name, val ) -> case val of
V.Collect {} -> True
_ -> False
)
$ E.contents env
action <- eins $
[ do return $ X.Methodcall ( X.Reference name ) ( mkunary "size" ) []
| v == length ( V.contents val )
] ++
[ do e <- expression ops env i ( d - 1 )
return $ X.Methodcall ( X.Reference name ) ( mkunary "get" ) [ e ]
| (i,c) <- zip [ 0 .. ] $ V.contents val
, V.Scalar ( fromIntegral v ) == c
] ++
[ return $ X.Scalar $ fromIntegral v
]
action
expression ops env v d =
return $ X.Scalar $ fromIntegral d
{-
case filter ( \ op -> O.object op == V.name ( V.typeof val ) )
-- $ filter ( \ op -> O.result op `conform` ty )
$ filter ( \ op -> O.toplevel op == top )
$ ops of
[] -> do
-- expression top ops env ty 0
return $ X.Scalar $ fromIntegral d
ops -> do
op <- eins ops
xs <- forM ( O.args op ) $ \ arg -> do
d <- randomRIO ( 0, d-1)
expression False ops env arg d
return $ X.Methodcall ( X.Reference name ) ( mkunary $ O.method op ) xs
expression top ops env ty d | d <= 0 = do
i <- randomRIO ( 0, 3 )
return $ X.Scalar i
t1 `conform` t2 = case (t1, t2) of
-- (O.Void, O.Void) -> True
(_, O.Void) -> True
(O.Void, _) -> False
(_, _) -> True -- what?
-}
| Erdwolf/autotool-bonn | src/Program/List/Roll.hs | gpl-2.0 | 4,577 | 1 | 19 | 1,656 | 1,412 | 721 | 691 | 96 | 2 |
-- | Contains comment-related actions, like editing comments
-- and performing moderator actions on posts.
module Reddit.Actions.Comment
( getNewComments
, getNewComments'
, getMoreChildren
, getCommentInfo
, getCommentsInfo
, editComment
, deleteComment
, removeComment ) where
import Reddit.Types.Comment
import Reddit.Types.Empty
import Reddit.Types.Error
import Reddit.Types.Listing
import Reddit.Types.Options
import Reddit.Types.Post
import Reddit.Types.Reddit
import Reddit.Types.Subreddit
import qualified Reddit.Routes as Route
import Data.Default.Class
import Data.Text (Text)
import Network.API.Builder (APIError(..))
-- | Get a 'CommentListing' for the most recent comments on the site overall.
-- This maps to <http://reddit.com/r/$SUBREDDIT/comments>, or <http://reddit.com/comments>
-- if the subreddit is not specified.
-- Note that none of the comments returned will have any child comments.
getNewComments :: Monad m => Maybe SubredditName -> RedditT m CommentListing
getNewComments = getNewComments' def
-- | Get a 'CommentListing' for the most recent comments with the specified 'Options' and
-- 'SubredditName'. Note that none of the comments returned will have any child comments.
-- If the 'Options' is 'def', then this function is identical to 'getNewComments'.
getNewComments' :: Monad m => Options CommentID -> Maybe SubredditName -> RedditT m CommentListing
getNewComments' opts r = runRoute $ Route.newComments opts r
-- | Expand children comments that weren't fetched on initial load.
-- Equivalent to the web UI's "load more comments" button.
getMoreChildren :: Monad m
=> PostID -- ^ @PostID@ for the top-level
-> [CommentID] -- ^ List of @CommentID@s to expand
-> RedditT m [CommentReference]
getMoreChildren _ [] = return []
getMoreChildren p cs = do
let (now, next) = splitAt 20 cs
POSTWrapped rs <- runRoute $ Route.moreChildren p now
more <- getMoreChildren p next
return $ rs ++ more
-- | Given a 'CommentID', 'getCommentInfo' will return the full details for that comment.
getCommentInfo :: Monad m => CommentID -> RedditT m Comment
getCommentInfo c = do
res <- getCommentsInfo [c]
case res of
Listing _ _ [comment] -> return comment
_ -> failWith $ APIError InvalidResponseError
-- | Given a list of 'CommentID's, 'getCommentsInfo' will return another list containing
-- the full details for all the comments. Note that Reddit's
-- API imposes a limitation of 100 comments per request, so this function will fail immediately if given a list of more than 100 IDs.
getCommentsInfo :: Monad m => [CommentID] -> RedditT m CommentListing
getCommentsInfo cs =
if null $ drop 100 cs
then do
res <- runRoute $ Route.commentsInfo cs
case res of
Listing _ _ comments | sameLength comments cs ->
return res
_ -> failWith $ APIError InvalidResponseError
else failWith $ APIError TooManyRequests
where
sameLength (_:xs) (_:ys) = sameLength xs ys
sameLength [] [] = True
sameLength _ _ = False
-- | Edit a comment.
editComment :: Monad m
=> CommentID -- ^ Comment to edit
-> Text -- ^ New comment text
-> RedditT m Comment
editComment thing text = do
POSTWrapped res <- runRoute $ Route.edit thing text
return res
-- | Deletes one of your own comments. Note that this is different from
-- removing a comment as a moderator action.
deleteComment :: Monad m => CommentID -> RedditT m ()
deleteComment = nothing . runRoute . Route.delete
-- | Removes a comment (as a moderator action). Note that this is different
-- from deleting a comment.
removeComment :: Monad m => CommentID -> RedditT m ()
removeComment = nothing . runRoute . Route.removePost False
| intolerable/reddit | src/Reddit/Actions/Comment.hs | bsd-2-clause | 3,796 | 0 | 14 | 770 | 747 | 389 | 358 | 65 | 5 |
module Main where
import Compiler.Resolve
import Compiler.Parser
import Compiler.OptimiseLp
import Compiler.CodeGen
import Compiler.OptimiseHs
main = do
src <- readFile "tagsoup2.txt"
pre <- readFile "Prefix.hs"
let code = show $ optimiseHs $ codeGen $ optimiseLp $ resolve $ parse src
writeFile "../Text/HTML/TagSoup/Generated/Parser.hs" (pre ++ code)
| ndmitchell/tagsoup | dead/parser/Main.hs | bsd-3-clause | 372 | 0 | 14 | 63 | 101 | 51 | 50 | 11 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Stack.PackageDump
( Line
, eachSection
, eachPair
, DumpPackage (..)
, conduitDumpPackage
, ghcPkgDump
, InstalledCache
, InstalledCacheEntry (..)
, newInstalledCache
, loadInstalledCache
, saveInstalledCache
, addProfiling
, addHaddock
, sinkMatching
, pruneDeps
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Exception.Enclosed (tryIO)
import Control.Monad (liftM)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger)
import Control.Monad.Trans.Control
import Data.Attoparsec.Args
import Data.Attoparsec.Text as P
import Data.Binary.VersionTagged
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Either (partitionEithers)
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes)
import qualified Data.Set as Set
import qualified Data.Text.Encoding as T
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Path
import Path.IO (createTree)
import Prelude -- Fix AMP warning
import Stack.GhcPkg
import Stack.Types
import System.Directory (getDirectoryContents, doesFileExist)
import System.Process.Read
-- | Cached information on whether package have profiling libraries and haddocks.
newtype InstalledCache = InstalledCache (IORef InstalledCacheInner)
newtype InstalledCacheInner = InstalledCacheInner (Map GhcPkgId InstalledCacheEntry)
deriving (Binary, NFData, Generic)
instance HasStructuralInfo InstalledCacheInner
instance HasSemanticVersion InstalledCacheInner
-- | Cached information on whether a package has profiling libraries and haddocks.
data InstalledCacheEntry = InstalledCacheEntry
{ installedCacheProfiling :: !Bool
, installedCacheHaddock :: !Bool
, installedCacheIdent :: !PackageIdentifier }
deriving (Eq, Generic)
instance Binary InstalledCacheEntry
instance HasStructuralInfo InstalledCacheEntry
instance NFData InstalledCacheEntry
-- | Call ghc-pkg dump with appropriate flags and stream to the given @Sink@, for a single database
ghcPkgDump
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m)
=> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ if empty, use global
-> Sink ByteString IO a
-> m a
ghcPkgDump menv wc mpkgDbs sink = do
case reverse mpkgDbs of
(pkgDb:_) -> (createDatabase menv wc) pkgDb -- TODO maybe use some retry logic instead?
_ -> return ()
a <- sinkProcessStdout Nothing menv (ghcPkgExeName wc) args sink
return a
where
args = concat
[ case mpkgDbs of
[] -> ["--global", "--no-user-package-db"]
_ -> ["--user", "--no-user-package-db"] ++ concatMap (\pkgDb -> ["--package-db", toFilePath pkgDb]) mpkgDbs
, ["dump", "--expand-pkgroot"]
]
-- | Create a new, empty @InstalledCache@
newInstalledCache :: MonadIO m => m InstalledCache
newInstalledCache = liftIO $ InstalledCache <$> newIORef (InstalledCacheInner Map.empty)
-- | Load a @InstalledCache@ from disk, swallowing any errors and returning an
-- empty cache.
loadInstalledCache :: (MonadLogger m, MonadIO m) => Path Abs File -> m InstalledCache
loadInstalledCache path = do
m <- taggedDecodeOrLoad (toFilePath path) (return $ InstalledCacheInner Map.empty)
liftIO $ fmap InstalledCache $ newIORef m
-- | Save a @InstalledCache@ to disk
saveInstalledCache :: MonadIO m => Path Abs File -> InstalledCache -> m ()
saveInstalledCache path (InstalledCache ref) = liftIO $ do
createTree (parent path)
readIORef ref >>= taggedEncodeFile (toFilePath path)
-- | Prune a list of possible packages down to those whose dependencies are met.
--
-- * id uniquely identifies an item
--
-- * There can be multiple items per name
pruneDeps
:: (Ord name, Ord id)
=> (id -> name) -- ^ extract the name from an id
-> (item -> id) -- ^ the id of an item
-> (item -> [id]) -- ^ get the dependencies of an item
-> (item -> item -> item) -- ^ choose the desired of two possible items
-> [item] -- ^ input items
-> Map name item
pruneDeps getName getId getDepends chooseBest =
Map.fromList
. (map $ \item -> (getName $ getId item, item))
. loop Set.empty Set.empty []
where
loop foundIds usedNames foundItems dps =
case partitionEithers $ map depsMet dps of
([], _) -> foundItems
(s', dps') ->
let foundIds' = Map.fromListWith chooseBest s'
foundIds'' = Set.fromList $ map getId $ Map.elems foundIds'
usedNames' = Map.keysSet foundIds'
foundItems' = Map.elems foundIds'
in loop
(Set.union foundIds foundIds'')
(Set.union usedNames usedNames')
(foundItems ++ foundItems')
(catMaybes dps')
where
depsMet dp
| name `Set.member` usedNames = Right Nothing
| all (`Set.member` foundIds) (getDepends dp) = Left (name, dp)
| otherwise = Right $ Just dp
where
id' = getId dp
name = getName id'
-- | Find the package IDs matching the given constraints with all dependencies installed.
-- Packages not mentioned in the provided @Map@ are allowed to be present too.
sinkMatching :: Monad m
=> Bool -- ^ require profiling?
-> Bool -- ^ require haddock?
-> Map PackageName Version -- ^ allowed versions
-> Consumer (DumpPackage Bool Bool)
m
(Map PackageName (DumpPackage Bool Bool))
sinkMatching reqProfiling reqHaddock allowed = do
dps <- CL.filter (\dp -> isAllowed (dpPackageIdent dp) &&
(not reqProfiling || dpProfiling dp) &&
(not reqHaddock || dpHaddock dp))
=$= CL.consume
return $ Map.fromList $ map (packageIdentifierName . dpPackageIdent &&& id) $ Map.elems $ pruneDeps
id
dpGhcPkgId
dpDepends
const -- Could consider a better comparison in the future
dps
where
isAllowed (PackageIdentifier name version) =
case Map.lookup name allowed of
Just version' | version /= version' -> False
_ -> True
-- | Add profiling information to the stream of @DumpPackage@s
addProfiling :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b) m (DumpPackage Bool b)
addProfiling (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
p <- case Map.lookup gid m of
Just installed -> return (installedCacheProfiling installed)
Nothing | null (dpLibraries dp) -> return True
Nothing -> do
let loop [] = return False
loop (dir:dirs) = do
econtents <- tryIO $ getDirectoryContents dir
let contents = either (const []) id econtents
if or [isProfiling content lib
| content <- contents
, lib <- dpLibraries dp
] && not (null contents)
then return True
else loop dirs
loop $ dpLibDirs dp
return dp { dpProfiling = p }
isProfiling :: FilePath -- ^ entry in directory
-> ByteString -- ^ name of library
-> Bool
isProfiling content lib =
prefix `S.isPrefixOf` S8.pack content
where
prefix = S.concat ["lib", lib, "_p"]
-- | Add haddock information to the stream of @DumpPackage@s
addHaddock :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b) m (DumpPackage a Bool)
addHaddock (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
h <- case Map.lookup gid m of
Just installed -> return (installedCacheHaddock installed)
Nothing | not (dpHasExposedModules dp) -> return True
Nothing -> do
let loop [] = return False
loop (ifc:ifcs) = do
exists <- doesFileExist ifc
if exists
then return True
else loop ifcs
loop $ dpHaddockInterfaces dp
return dp { dpHaddock = h }
-- | Dump information for a single package
data DumpPackage profiling haddock = DumpPackage
{ dpGhcPkgId :: !GhcPkgId
, dpPackageIdent :: !PackageIdentifier
, dpLibDirs :: ![FilePath]
, dpLibraries :: ![ByteString]
, dpHasExposedModules :: !Bool
, dpDepends :: ![GhcPkgId]
, dpHaddockInterfaces :: ![FilePath]
, dpProfiling :: !profiling
, dpHaddock :: !haddock
, dpIsExposed :: !Bool
}
deriving (Show, Eq, Ord)
data PackageDumpException
= MissingSingleField ByteString (Map ByteString [Line])
| Couldn'tParseField ByteString [Line]
deriving Typeable
instance Exception PackageDumpException
instance Show PackageDumpException where
show (MissingSingleField name values) = unlines $ concat
[ return $ concat
[ "Expected single value for field name "
, show name
, " when parsing ghc-pkg dump output:"
]
, map (\(k, v) -> " " ++ show (k, v)) (Map.toList values)
]
show (Couldn'tParseField name ls) =
"Couldn't parse the field " ++ show name ++ " from lines: " ++ show ls
-- | Convert a stream of bytes into a stream of @DumpPackage@s
conduitDumpPackage :: MonadThrow m
=> Conduit ByteString m (DumpPackage () ())
conduitDumpPackage = (=$= CL.catMaybes) $ eachSection $ do
pairs <- eachPair (\k -> (k, ) <$> CL.consume) =$= CL.consume
let m = Map.fromList pairs
let parseS k =
case Map.lookup k m of
Just [v] -> return v
_ -> throwM $ MissingSingleField k m
-- Can't fail: if not found, same as an empty list. See:
-- https://github.com/fpco/stack/issues/182
parseM k =
case Map.lookup k m of
Just vs -> vs
Nothing -> []
parseDepend :: MonadThrow m => ByteString -> m (Maybe GhcPkgId)
parseDepend "builtin_rts" = return Nothing
parseDepend bs =
liftM Just $ parseGhcPkgId bs'
where
(bs', _builtinRts) =
case stripSuffixBS " builtin_rts" bs of
Nothing ->
case stripPrefixBS "builtin_rts " bs of
Nothing -> (bs, False)
Just x -> (x, True)
Just x -> (x, True)
case Map.lookup "id" m of
Just ["builtin_rts"] -> return Nothing
_ -> do
name <- parseS "name" >>= parsePackageName
version <- parseS "version" >>= parseVersion
ghcPkgId <- parseS "id" >>= parseGhcPkgId
-- if a package has no modules, these won't exist
let libDirKey = "library-dirs"
libraries = parseM "hs-libraries"
exposedModules = parseM "exposed-modules"
exposed = parseM "exposed"
depends <- mapM parseDepend $ parseM "depends"
let parseQuoted key =
case mapM (P.parseOnly (argsParser NoEscaping) . T.decodeUtf8) val of
Left{} -> throwM (Couldn'tParseField key val)
Right dirs -> return (concat dirs)
where
val = parseM key
libDirPaths <- parseQuoted libDirKey
haddockInterfaces <- parseQuoted "haddock-interfaces"
return $ Just DumpPackage
{ dpGhcPkgId = ghcPkgId
, dpPackageIdent = PackageIdentifier name version
, dpLibDirs = libDirPaths
, dpLibraries = S8.words $ S8.unwords libraries
, dpHasExposedModules = not (null libraries || null exposedModules)
, dpDepends = catMaybes (depends :: [Maybe GhcPkgId])
, dpHaddockInterfaces = haddockInterfaces
, dpProfiling = ()
, dpHaddock = ()
, dpIsExposed = exposed == ["True"]
}
stripPrefixBS :: ByteString -> ByteString -> Maybe ByteString
stripPrefixBS x y
| x `S.isPrefixOf` y = Just $ S.drop (S.length x) y
| otherwise = Nothing
stripSuffixBS :: ByteString -> ByteString -> Maybe ByteString
stripSuffixBS x y
| x `S.isSuffixOf` y = Just $ S.take (S.length y - S.length x) y
| otherwise = Nothing
-- | A single line of input, not including line endings
type Line = ByteString
-- | Apply the given Sink to each section of output, broken by a single line containing ---
eachSection :: Monad m
=> Sink Line m a
-> Conduit ByteString m a
eachSection inner =
CL.map (S.filter (/= _cr)) =$= CB.lines =$= start
where
_cr = 13
peekBS = await >>= maybe (return Nothing) (\bs ->
if S.null bs
then peekBS
else leftover bs >> return (Just bs))
start = peekBS >>= maybe (return ()) (const go)
go = do
x <- toConsumer $ takeWhileC (/= "---") =$= inner
yield x
CL.drop 1
start
-- | Grab each key/value pair
eachPair :: Monad m
=> (ByteString -> Sink Line m a)
-> Conduit Line m a
eachPair inner =
start
where
start = await >>= maybe (return ()) start'
_colon = 58
_space = 32
start' bs1 =
toConsumer (valSrc =$= inner key) >>= yield >> start
where
(key, bs2) = S.break (== _colon) bs1
(spaces, bs3) = S.span (== _space) $ S.drop 1 bs2
indent = S.length key + 1 + S.length spaces
valSrc
| S.null bs3 = noIndent
| otherwise = yield bs3 >> loopIndent indent
noIndent = do
mx <- await
case mx of
Nothing -> return ()
Just bs -> do
let (spaces, val) = S.span (== _space) bs
if S.length spaces == 0
then leftover val
else do
yield val
loopIndent (S.length spaces)
loopIndent i =
loop
where
loop = await >>= maybe (return ()) go
go bs
| S.length spaces == i && S.all (== _space) spaces =
yield val >> loop
| otherwise = leftover bs
where
(spaces, val) = S.splitAt i bs
-- | General purpose utility
takeWhileC :: Monad m => (a -> Bool) -> Conduit a m a
takeWhileC f =
loop
where
loop = await >>= maybe (return ()) go
go x
| f x = yield x >> loop
| otherwise = leftover x
| meiersi-11ce/stack | src/Stack/PackageDump.hs | bsd-3-clause | 15,806 | 0 | 27 | 5,207 | 4,065 | 2,079 | 1,986 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
module Data.OrdPSQ.Benchmark
( benchmark
) where
import Data.List (foldl')
import qualified Data.OrdPSQ as OrdPSQ
import Criterion.Main
import Prelude hiding (lookup)
import BenchmarkTypes
benchmark :: String -> [BElem] -> BenchmarkSet
benchmark name elems = BenchmarkSet
{ bGroupName = name
, bMinView = whnf bench_minView initialPSQ
, bLookup = whnf (bench_lookup keys) initialPSQ
, bInsertEmpty = nf (bench_insert firstElems) OrdPSQ.empty
, bInsertNew = nf (bench_insert secondElems) initialPSQ
, bInsertDuplicates = nf (bench_insert firstElems) initialPSQ
, bDelete = nf (bench_delete firstKeys) initialPSQ
}
where
(firstElems, secondElems) = splitAt (numElems `div` 2) elems
numElems = length elems
keys = map (\(x, _, _) -> x) elems
firstKeys = map (\(x, _, _) -> x) firstElems
initialPSQ = OrdPSQ.fromList firstElems :: OrdPSQ.OrdPSQ Int Int ()
-- Get the sum of all priorities by getting all elements using 'lookup'
bench_lookup :: [Int] -> OrdPSQ.OrdPSQ Int Int () -> Int
bench_lookup xs m = foldl' (\n k -> maybe n fst (OrdPSQ.lookup k m)) 0 xs
-- Insert a list of elements one-by-one into a PSQ
bench_insert :: [BElem] -> OrdPSQ.OrdPSQ Int Int () -> OrdPSQ.OrdPSQ Int Int ()
bench_insert xs m0 = foldl' (\m (k, p, v) -> OrdPSQ.insert k p v m) m0 xs
-- Get the sum of all priorities by sequentially popping all elements using
-- 'minView'
bench_minView :: OrdPSQ.OrdPSQ Int Int () -> Int
bench_minView = go 0
where
go !n t = case OrdPSQ.minView t of
Nothing -> n
Just (k, x, _, t') -> go (n + k + x) t'
-- Empty a queue by sequentially removing all elements
bench_delete :: [Int] -> OrdPSQ.OrdPSQ Int Int () -> OrdPSQ.OrdPSQ Int Int ()
bench_delete keys t0 = foldl' (\t k -> OrdPSQ.delete k t) t0 keys
| meiersi/psqueues-old | benchmarks/Data/OrdPSQ/Benchmark.hs | bsd-3-clause | 1,966 | 0 | 13 | 514 | 620 | 335 | 285 | 33 | 2 |
Subsets and Splits