code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module DockerCompose.SpecialChars where
import Data.Text.Lazy as L
data SpecialChar
= Space
| Colon
| Hyphen
| NewLine
| Equals
| BlankChar
| SpecialChar :. SpecialChar
deriving (Eq, Show)
toText :: SpecialChar -> L.Text
toText Space = " "
toText Colon = ":"
toText Hyphen = "-"
toText NewLine = "\n"
toText Equals = "="
toText BlankChar = mempty
toText (x :. y) = toText (x `mappend` y)
instance Monoid SpecialChar where
mempty = BlankChar
x `mappend` y = x :. y
| amarpotghan/docker-compose-dsl | src/DockerCompose/SpecialChars.hs | bsd-3-clause | 533 | 0 | 7 | 119 | 171 | 96 | 75 | 23 | 1 |
{-# LANGUAGE ConstrainedClassMethods #-}
{-|
The machinery used by "Typechecker.Typechecker" and
"Typechecker.Capturechecker" for handling errors and backtracing.
-}
module Typechecker.TypeError (Backtrace
,emptyBT
,Pushable(push)
,TCError(TCError)
,Error(..)
,TCWarning(TCWarning)
,Warning(..)
,ExecutionContext(..)
,currentContextFromBacktrace
,validUseOfBreak
,validUseOfContinue
) where
import Text.PrettyPrint
import Data.Maybe
import Data.List
import Data.Char
import Text.Printf (printf)
import Identifiers
import Types
import AST.AST hiding (showWithKind)
import AST.PrettyPrinter
import AST.Meta(Position)
data BacktraceNode = BTFunction Name Type
| BTTrait Type
| BTClass Type
| BTParam ParamDecl
| BTField FieldDecl
| BTMethod MethodDecl
| BTExpr Expr
| BTTypedef Type
| BTModule Name
| BTImport Namespace
deriving(Eq)
isBTExpr :: BacktraceNode -> Bool
isBTExpr (BTExpr _) = True
isBTExpr _ = False
instance Show BacktraceNode where
show (BTFunction n ty) =
concat ["In function '", show n, "' of type '", show ty, "'"]
show (BTClass ty) = concat ["In class '", show ty, "'"]
show (BTTrait ty) = concat ["In trait '", show ty, "'"]
show (BTParam p) = concat ["In parameter '", show (ppParamDecl p), "'"]
show (BTField f) = concat ["In field '", show (ppFieldDecl f), "'"]
show (BTMethod m) =
let name = hname $ mheader m
ty = htype $ mheader m
method | isStreamMethod m = "stream method"
| otherwise = "method"
in
concat ["In ", method, " '", show name, "' of type '", show ty, "'"]
show (BTExpr expr)
| (isNothing . getSugared) expr = ""
| otherwise =
let str = show $ nest 2 $ ppSugared expr
in "In expression: \n" ++ str
show (BTTypedef tl) =
concat ["In typedef '", show tl, "'"]
show (BTModule m) =
concat ["In declaration of module '", show m, "'"]
show (BTImport ns) =
concat ["In import of module '", show ns, "'"]
type Backtrace = [(Position, BacktraceNode)]
emptyBT :: Backtrace
emptyBT = []
reduceBT :: Backtrace -> Backtrace
reduceBT = truncateExprs . dropMiniLets . mergeBlocks . nub
where
mergeBlocks ((pos1, BTExpr seq@Seq{}):(pos2, BTExpr e2):bt) =
if hasBody e2
then mergeBlocks $ (pos2, BTExpr e2):bt
else (pos1, BTExpr seq) : mergeBlocks ((pos2, BTExpr e2) : bt)
mergeBlocks (node:bt) = node:mergeBlocks bt
mergeBlocks [] = []
dropMiniLets :: Backtrace -> Backtrace
dropMiniLets = filter (not . isMiniLetNode . snd)
isMiniLetNode node
| BTExpr e <- node
, Just MiniLet{} <- getSugared e = True
| otherwise = False
truncateExprs ((pos1, BTExpr e1):(pos2, BTExpr e2):bt) =
(pos1, BTExpr e1):(pos2, BTExpr e2):
filter (not . isBTExpr . snd) bt
truncateExprs bt = bt
data ExecutionContext = MethodContext MethodDecl
| ClosureContext (Maybe Type)
| FunctionContext Name Type
currentContextFromBacktrace :: Backtrace -> ExecutionContext
currentContextFromBacktrace [] = error "TypeError.hs: No execution context"
currentContextFromBacktrace ((_, BTExpr Closure{mty}):_) = ClosureContext mty
currentContextFromBacktrace ((_, BTMethod m):_) = MethodContext m
currentContextFromBacktrace ((_, BTFunction f t):_) = FunctionContext f t
currentContextFromBacktrace (_:bt) = currentContextFromBacktrace bt
validUseOfBreak :: Backtrace -> Bool
validUseOfBreak [] = False
validUseOfBreak ((_, BTExpr l@For{}):_) = True
validUseOfBreak ((_, BTExpr l@While{}):_) = True
validUseOfBreak ((_, BTExpr l@Repeat{}):_) = True
validUseOfBreak ((_, BTExpr c@Closure{}):_) = False
validUseOfBreak (_:bt) = validUseOfBreak bt
validUseOfContinue :: Backtrace -> Bool
validUseOfContinue [] = False
validUseOfContinue ((_, BTExpr l@For{}):_) = False
validUseOfContinue ((_, BTExpr l@While{}):_) = True
validUseOfContinue ((_, BTExpr l@DoWhile{}):_) = True
validUseOfContinue ((_, BTExpr l@Repeat{}):_) = True
validUseOfContinue ((_, BTExpr c@Closure{}):_) = False
validUseOfContinue (_:bt) = validUseOfContinue bt
-- | A type class for unifying the syntactic elements that can be pushed to the
-- backtrace stack.
class Pushable a where
push :: a -> Backtrace -> Backtrace
pushMeta :: HasMeta a => a -> BacktraceNode -> Backtrace -> Backtrace
pushMeta m n bt = (getPos m, n) : bt
instance Pushable Function where
push fun =
pushMeta fun (BTFunction (functionName fun) (functionType fun))
instance Pushable TraitDecl where
push t = pushMeta t (BTTrait (tname t))
instance Pushable ClassDecl where
push c = pushMeta c (BTClass (cname c))
instance Pushable FieldDecl where
push f = pushMeta f (BTField f)
instance Pushable ParamDecl where
push p = pushMeta p (BTParam p)
instance Pushable MethodDecl where
push m = pushMeta m (BTMethod m)
instance Pushable Expr where
push expr = pushMeta expr (BTExpr expr)
instance Pushable Typedef where
push t@(Typedef {typedefdef}) = pushMeta t (BTTypedef typedefdef)
instance Pushable ModuleDecl where
push m@(Module{modname}) = pushMeta m (BTModule modname)
instance Pushable ImportDecl where
push i@(Import{itarget}) = pushMeta i (BTImport itarget)
refTypeName :: Type -> String
refTypeName ty
| isClassType ty = if isADT ty
then "abstract data type case '" ++ getId ty ++ "'"
else "class '" ++ getId ty ++ "'"
| isTraitType ty = if isADT ty
then "abstract data type '" ++ getId ty ++ "'"
else "trait '" ++ getId ty ++ "'"
| isCapabilityType ty = "capability '" ++ show ty ++ "'"
| isUnionType ty = "union '" ++ show ty ++ "'"
| isTypeVar ty
, Just bound <- getBound ty
= refTypeName bound
| otherwise = error $ "TypeError.hs: No refTypeName for " ++
showWithKind ty
-- | The data type for a type checking error. Showing it will
-- produce an error message and print the backtrace.
data TCError = TCError Error Backtrace
instance Show TCError where
show (TCError err []) =
" *** Error during typechecking *** \n" ++
show err ++ "\n"
show (TCError err bt@((pos, _):_)) =
" *** Error during typechecking *** \n" ++
show pos ++ "\n" ++
show err ++ "\n" ++
concatMap showBT (reduceBT bt)
where
showBT (_, node) =
case show node of
"" -> ""
s -> s ++ "\n"
data Error =
DistinctTypeParametersError Type
| WrongNumberOfMethodArgumentsError Name Type Int Int
| WrongNumberOfFunctionArgumentsError QualifiedName Int Int
| WrongNumberOfFunctionTypeArgumentsError QualifiedName Int Int
| WrongNumberOfTypeParametersError Type Int Type Int
| MissingFieldRequirementError FieldDecl Type
| CovarianceViolationError FieldDecl Type Type
| RequiredFieldMismatchError FieldDecl Type Type Bool
| NonDisjointConjunctionError Type Type FieldDecl
| OverriddenMethodTypeError Name Type Type Type
| OverriddenMethodError Name Type Error
| IncludedMethodConflictError Name Type Type
| MissingMethodRequirementError FunctionHeader Type
| MissingMainClass
| SyncStreamCall
| UnknownTraitError Type
| UnknownADTError Type
| UnknownRefTypeError Type
| NonADTCaseError Type
| MalformedCapabilityError Type
| MalformedBoundError Type
| RecursiveTypesynonymError Type
| DuplicateThingError String String
| PassiveStreamingMethodError
| PolymorphicConstructorError
| StreamingConstructorError
| MainMethodArgumentsError
| MainConstructorError
| FieldNotFoundError Name Type
| MethodNotFoundError Name Type
| BreakOutsideOfLoopError
| BreakUsedAsExpressionError
| ContinueOutsideOfLoopError
| ContinueUsedAsExpressionError
| NonCallableTargetError Type
| NonSendableTargetError Type
| MainMethodCallError
| ConstructorCallError
| ExpectingOtherTypeError String Type
| NonStreamingContextError Expr
| UnboundFunctionError QualifiedName
| NonFunctionTypeError Type
| BottomTypeInferenceError
| IfInferenceError
| IfBranchMismatchError Type Type
| EmptyMatchClauseError
| ActiveMatchError
| MatchInferenceError
| ThisReassignmentError
| ImmutableVariableError QualifiedName
| PatternArityMismatchError Name Int Int
| PatternTypeMismatchError Expr Type
| NonMaybeExtractorPatternError Expr
| InvalidPatternError Expr
| DuplicatePatternVarError Name Expr
| InvalidTupleTargetError Expr Int Type
| InvalidTupleAccessError Expr Int
| CannotReadFieldError Expr
| NonAssignableLHSError
| ValFieldAssignmentError Name Type
| UnboundVariableError QualifiedName
| BuriedVariableError QualifiedName
| ObjectCreationError Type
| NonIterableError Type
| EmptyArrayLiteralError
| NonIndexableError Type
| NonSizeableError Type
| FormatStringLiteralError
| UnprintableExpressionError Type
| WrongNumberOfPrintArgumentsError Int Int
| UnaryOperandMismatchError UnaryOp Type
| BinaryOperandMismatchError BinaryOp String Type Type
| UndefinedBinaryOperatorError BinaryOp
| NullTypeInferenceError
| CannotBeNullError Type
| TypeMismatchError Type Type
| TypeWithCapabilityMismatchError Type Type Type
| TypeVariableAmbiguityError Type Type Type
| FreeTypeVariableError Type
| TypeVariableAndVariableCommonNameError [Name]
| UnionMethodAmbiguityError Type Name
| MalformedUnionTypeError Type Type
| RequiredFieldMutabilityError Type FieldDecl
| ProvidingTraitFootprintError Type Type Name [FieldDecl]
| TypeArgumentInferenceError Expr Type
| AmbiguousTypeError Type [Type]
| UnknownTypeUsageError String Type
| AmbiguousNameError QualifiedName [(QualifiedName, Type)]
| UnknownNamespaceError (Maybe Namespace)
| UnknownNameError Namespace Name
| ShadowedImportError ImportDecl
| WrongModuleNameError Name FilePath
| BadSyncCallError
| PrivateAccessModifierTargetError Name
| ClosureReturnError
| ClosureForwardError
| MatchMethodNonMaybeReturnError
| MatchMethodNonEmptyParameterListError
| ImpureMatchMethodError Expr
| IdComparisonNotSupportedError Type
| IdComparisonTypeMismatchError Type Type
| ForwardInPassiveContext Type
| ForwardInFunction
| ForwardTypeError Type Type
| ForwardTypeClosError Type Type
| CannotHaveModeError Type
| ModelessError Type
| ModeOverrideError Type
| CannotConsumeError Expr
| CannotConsumeTypeError Expr
| ImmutableConsumeError Expr
| CannotGiveReadModeError Type
| CannotGiveSharableModeError Type
| NonValInReadContextError Type
| NonSafeInReadContextError Type Type
| NonSafeInExtendedReadTraitError Type Name Type
| ProvidingToReadTraitError Type Type Name
| SubordinateReturnError Name Type
| SubordinateArgumentError Expr
| SubordinateFieldError Name
| ThreadLocalFieldError Type
| ThreadLocalFieldExtensionError Type FieldDecl
| ThreadLocalArgumentError Expr
| PolymorphicArgumentSendError Expr Type
| PolymorphicReturnError Name Type
| ThreadLocalReturnError Name Type
| MalformedConjunctionError Type Type Type
| CannotUnpackError Type
| CannotInferUnpackingError Type
| UnsplittableTypeError Type
| DuplicatingSplitError Type
| StackboundArrayTypeError Type
| ManifestConflictError Type Type
| ManifestClassConflictError Type Type
| UnmodedMethodExtensionError Type Name
| ActiveTraitError Type Type
| NewWithModeError
| UnsafeTypeArgumentError Type Type
| OverlapWithBuiltins
| SimpleError String
----------------------------
-- Capturechecking errors --
----------------------------
| ReverseBorrowingError
| BorrowedFieldError Type
| LinearClosureError QualifiedName Type
| BorrowedLeakError Expr
| NonBorrowableError Expr
| ActiveBorrowError Expr Type
| ActiveBorrowSendError Expr Type
| DuplicateBorrowError Expr
| StackboundednessMismatchError Type Type
| LinearCaptureError Expr Type
arguments 1 = "argument"
arguments _ = "arguments"
typeParameters 1 = "type parameter"
typeParameters _ = "type parameters"
enumerateSafeTypes =
"Safe types are primitives and types with read, active or local mode."
instance Show Error where
show (DistinctTypeParametersError ty) =
printf "Type parameters of '%s' must be distinct" (show ty)
show (WrongNumberOfMethodArgumentsError name targetType expected actual) =
let nameWithKind =
(if name == constructorName
then "Constructor"
else "Method '" ++ show name ++ "'") ++
" in " ++ refTypeName targetType
in printf "%s expects %d %s. Got %d"
nameWithKind expected (arguments expected) actual
show (WrongNumberOfFunctionArgumentsError name expected actual) =
printf "Function %s expects %d %s. Got %d"
(show name) expected (arguments expected) actual
show (WrongNumberOfFunctionTypeArgumentsError name expected actual) =
printf "Function %s expects %d %s. Got %d"
(show name) expected (typeParameters expected) actual
show (WrongNumberOfTypeParametersError ty1 n1 ty2 n2) =
printf "'%s' expects %d type %s, but '%s' has %d"
(showWithoutMode ty1) n1 (arguments n1) (showWithoutMode ty2) n2
show (MissingFieldRequirementError field trait) =
printf "Cannot find field '%s' required by included %s"
(show field) (refTypeName trait)
show (CovarianceViolationError field expected trait) =
printf ("Field '%s' must have a subtype of '%s' to meet " ++
"the requirements of included %s")
(show field) (show expected) (refTypeName trait)
show (RequiredFieldMismatchError field expected trait isSub) =
printf ("Field '%s' must exactly match type '%s' " ++
"to meet the requirements of included %s%s")
(show field) (show expected) (refTypeName trait)
(if isSub
then ". Consider turning '" ++ show (fname field) ++
"' into a val-field in " ++ refTypeName trait
else "")
show (NonDisjointConjunctionError left right field) =
printf
"Conjunctive traits '%s' and '%s' cannot share mutable field '%s'"
(show left) (show right) (show field)
show (OverriddenMethodTypeError name expected trait actual) =
printf ("Overridden method '%s' does not " ++
"have the expected type '%s' required by %s.\n" ++
"Actual type is '%s'")
(show name) (show expected) (refTypeName trait) (show actual)
show (OverriddenMethodError name trait err) =
case err of
FieldNotFoundError f _ ->
printf ("Overridden method '%s' requires access to field '%s' " ++
"which is not in requiring %s.\n" ++
"Consider extending the trait on inclusion: %s(%s)")
(show name) (show f) (refTypeName trait) (show trait) (show f)
MethodNotFoundError m _ ->
printf ("Overridden method '%s' calls method '%s' " ++
"which is not in requiring %s.\n" ++
"Consider extending the trait on inclusion: %s(%s())")
(show name) (show m) (refTypeName trait) (show trait) (show m)
TypeMismatchError actual expected ->
if actual == abstractTraitFromTraitType trait
then printf ("Overridden method '%s' uses 'this' as %s " ++
"and cannot be typechecked in requiring %s")
(show name) (show expected) (refTypeName trait)
else defaultMessage
ValFieldAssignmentError f targetType ->
if targetType == abstractTraitFromTraitType trait
then printf ("Overridden method '%s' writes field '%s' " ++
"which is marked as immutable in requiring %s.")
(show name) (show f) (refTypeName trait)
else defaultMessage
err -> defaultMessage
where
defaultMessage =
printf ("Overridden method '%s' cannot be typechecked in " ++
"requiring %s:\n%s")
(show name) (refTypeName trait) (show err)
show (IncludedMethodConflictError name left right) =
printf "Conflicting inclusion of method '%s' from %s and %s"
(show name) (refTypeName left) (refTypeName right)
show (MissingMethodRequirementError header trait) =
printf "Cannot find method '%s' required by included %s"
(show $ ppFunctionHeader header) (refTypeName trait)
show (UnknownTraitError ty) =
printf "Couldn't find trait '%s'" (getId ty)
show (UnknownADTError ty) =
printf "Couldn't find ADT constructor '%s'" (getId ty)
show MissingMainClass = "Couldn't find active class 'Main'"
show SyncStreamCall = "A stream method can not be called synchronously since it will invariably deadlock"
show (IdComparisonNotSupportedError ty) =
printf "Type '%s' does not support identity comparison%s" (show ty)
(if isRefType ty
then " (must include Id trait)"
else "")
show (IdComparisonTypeMismatchError lty rty)
| isTupleType lty && isTupleType rty &&
length (getArgTypes lty) /= length (getArgTypes rty) =
printf "Cannot compare tuples of different sizes: %s and %s"
(show lty) (show rty)
| otherwise =
printf "Cannot compare values across types %s and %s"
(show lty) (show rty)
show BadSyncCallError = "Synchronous method calls on actors are not allowed (except on the current this)"
show (PrivateAccessModifierTargetError name) =
printf "Cannot call private %s" kind
where
kind = if name == constructorName
then "constructor"
else "method '" ++ show name ++ "'"
show (UnknownRefTypeError ty) =
printf "Couldn't find class, trait or typedef '%s'" (show ty)
show (NonADTCaseError ty) =
printf "Type '%s' is not an abstract data type" (show ty)
show (MalformedCapabilityError ty) =
printf "Cannot form capability with %s" (showWithKind ty)
show (MalformedBoundError bound) =
printf "Cannot use %s as bound (must have trait)" (showWithKind bound)
show (RecursiveTypesynonymError ty) =
printf "Type synonyms cannot be recursive. One of the culprits is %s"
(getId ty)
show (DuplicateThingError kind thing) =
printf "Duplicate %s of %s" kind thing
show PassiveStreamingMethodError =
"Cannot have streaming methods in a passive class"
show StreamingConstructorError =
"Constructor cannot be streaming"
show MainMethodArgumentsError =
"Main method must have argument type () or ([String])"
show MainConstructorError =
"Main class cannot have a constructor"
show (FieldNotFoundError name ty) =
printf "No field '%s' in %s"
(show name) (refTypeName ty)
show (MethodNotFoundError name ty) =
let nameWithKind = if name == constructorName
then "constructor"
else "method '" ++ show name ++ "'"
targetType = if isRefType ty
then refTypeName ty
else showWithKind ty
in printf "No %s in %s"
nameWithKind targetType
show BreakUsedAsExpressionError =
"Break is a statement and cannot be used as a value or expression"
show BreakOutsideOfLoopError =
"Break can only be used inside loops"
show ContinueUsedAsExpressionError =
"Continue is a statement and cannot be used as a value or expression"
show ContinueOutsideOfLoopError =
"Continue can only be used inside while, do/while, and repeat loops"
show (NonCallableTargetError targetType) =
printf "Cannot call method on expression of type '%s'"
(show targetType)
show (NonSendableTargetError targetType) =
printf "Cannot send message to expression of type '%s'"
(show targetType)
show MainMethodCallError = "Cannot call the main method"
show ConstructorCallError =
"Constructor method 'init' can only be called during object creation"
show (ExpectingOtherTypeError something ty) =
printf "Expected %s but found expression of type '%s'"
something (show ty)
show (NonStreamingContextError e) =
printf "Cannot have '%s' outside of a streaming method"
(show $ ppSugared e)
show (UnboundFunctionError name) =
printf "Unbound function variable '%s'" (show name)
show (NonFunctionTypeError ty) =
printf "Cannot use value of type '%s' as a function" (show ty)
show BottomTypeInferenceError = "Not enough information to infer the type.\n" ++
"Try adding more type information."
show IfInferenceError = "Cannot infer result type of if-statement"
show (IfBranchMismatchError ty1 ty2) =
"Type mismatch in different branches of if-statement:\n" ++
" then: " ++ show ty1 ++ "\n" ++
" else: " ++ show ty2
show EmptyMatchClauseError = "Match statement must have at least one clause"
show ActiveMatchError = "Cannot match on an active object"
show MatchInferenceError = "Cannot infer result type of match expression"
show ThisReassignmentError = "Cannot rebind variable 'this'"
show (ImmutableVariableError qname) =
printf "Variable '%s' is immutable and cannot be re-assigned"
(show qname)
show (PatternArityMismatchError name expected actual) =
printf "Extractor '%s' returns %s. Pattern has %s"
(show name)
(if expected == 1
then "1 value"
else show expected ++ " values")
(show actual)
show (PatternTypeMismatchError pattern ty) =
printf "Pattern '%s' does not match expected type '%s'"
(show $ ppSugared pattern) (show ty)
show (NonMaybeExtractorPatternError pattern) =
printf "Extractor '%s' must return a Maybe type to be used as a pattern"
(show $ ppSugared pattern)
show (InvalidPatternError pattern) =
printf "'%s' is not a valid pattern"
(show $ ppSugared pattern)
show (DuplicatePatternVarError name pattern) =
printf "Variable '%s' is used multiple times in pattern '%s'"
(show name) (show $ ppSugared pattern)
show (InvalidTupleTargetError target compartment ty) =
printf "Compartment access %s.%d expects a tuple target, found %s"
(show $ ppSugared target)
compartment
(show ty)
show (InvalidTupleAccessError target compartment) =
printf "No .%d compartment in tuple %s"
compartment
(show $ ppSugared target)
show (CannotReadFieldError target) =
let targetType = getType target in
if isClassType targetType && isModeless targetType then
printf "Cannot access field of expression '%s' of unmoded class '%s'"
(show $ ppSugared target) (show targetType)
else
printf "Cannot read field of expression '%s' of %s"
(show $ ppSugared target) (showWithKind targetType)
show NonAssignableLHSError =
"Left-hand side cannot be assigned to"
show (ValFieldAssignmentError name targetType) =
printf "Cannot assign to val-field '%s' in %s"
(show name) (refTypeName targetType)
show (UnboundVariableError name) =
printf "Unbound variable '%s'" (show name)
show (BuriedVariableError name) =
printf "Variable '%s' cannot be accessed during borrowing" (show name)
show (ObjectCreationError ty)
| isMainType ty = "Cannot create additional Main objects"
| isCapabilityType ty =
printf "Cannot create instance of %s (type must be a class)"
(refTypeName ty)
| otherwise = printf "Cannot create object of type '%s'" (show ty)
show (NonIterableError ty) =
printf "Type '%s' is not iterable" (show ty)
show EmptyArrayLiteralError = "Array literal must have at least one element"
show (NonIndexableError ty) =
printf "Type '%s' is not indexable" (show ty)
show (NonSizeableError ty) =
printf "Type '%s' has no size" (show ty)
show FormatStringLiteralError =
"Formatted printing expects first argument to be a string literal"
show (UnprintableExpressionError ty) =
printf "Expression of type '%s' is not printable" (show ty)
show (WrongNumberOfPrintArgumentsError expected actual) =
printf ("Wrong number of arguments to print. Format string " ++
"expects %d %s. Found %d") expected (arguments expected) actual
show (UnaryOperandMismatchError op ty) =
printf "Operator '%s' is not defined for values of type '%s'"
(show op) (show ty)
show (BinaryOperandMismatchError op kind lType rType) =
printf ("Operator '%s' is only defined for %s types\n" ++
" Left type: %s\n" ++
" Right type: %s")
(show op) kind (show lType) (show rType)
show (UndefinedBinaryOperatorError op) =
printf "Undefined binary operator '%s'" (show op)
show NullTypeInferenceError =
"Cannot infer type of null valued expression. " ++
"Try adding type annotations"
show (CannotBeNullError ty) =
printf ("Null valued expression cannot have type '%s' " ++
"(must have reference type)") (show ty)
show (TypeMismatchError actual expected)
| isTypeVar actual && isJust (getBound actual) =
printf "Type '%s' with bound '%s' does not match expected type '%s'"
(show actual) (show . fromJust $ getBound actual) (show expected)
| isArrowType actual
, isArrowType expected
, actual `withModeOf` expected == expected =
printf ("Closure of type '%s' captures %s state and cannot " ++
"be used as type '%s'")
(show actual) (showModeOf actual) (show expected)
| otherwise = printf "Type '%s' does not match expected type '%s'"
(show actual) (show expected)
show (TypeWithCapabilityMismatchError actual cap expected) =
printf "Type '%s' with capability '%s' does not match expected type '%s'%s"
(show actual) (show cap) (show expected) pointer
where
pointer =
let actualTraits = typesFromCapability cap
expectedTraits = typesFromCapability expected
remainders = actualTraits \\ expectedTraits
nonDroppables = filter (not . isReadSingleType) remainders
nonDroppable = head nonDroppables
in if isCapabilityType expected &&
all (\te -> any (\ta -> ta == te &&
ta `modeSubtypeOf` te) actualTraits)
expectedTraits
then ". Cannot drop mode '" ++ showModeOf nonDroppable ++ "'"
else ""
show (TypeVariableAmbiguityError expected ty1 ty2) =
printf "Type variable '%s' cannot be bound to both '%s' and '%s'"
(getId expected) (show ty1) (show ty2)
show (FreeTypeVariableError ty) =
if getId ty == "void"
then printf "Type 'void' is deprecated. Use 'unit' instead"
else printf "Type variable '%s' is unbound" (show ty)
show (TypeVariableAndVariableCommonNameError [name]) =
printf "Type variable '%s' clashes with existing variable name."
(show name)
show (TypeVariableAndVariableCommonNameError names) =
printf "Type variables %s clash with existing variable names."
formattingName
where
formattingName =
let ns = map (\n -> "'" ++ show n ++ "', ") (init names)
lastName = "'" ++ show (last names) ++ "'"
in show ns ++ "and " ++ lastName
show (UnionMethodAmbiguityError ty name) =
printf "Cannot disambiguate method '%s' in %s"
(show name) (showWithKind ty)
show (MalformedUnionTypeError ty union) =
printf "Type '%s' is not compatible with %s"
(show ty) (showWithKind union)
show (TypeArgumentInferenceError call param) =
printf "Cannot infer the type of parameter '%s' of %s '%s'"
(show param) kind calledName
where
mname = name call
kind | isFunctionCall call = "function"
| isMethodCallOrMessageSend call =
if mname == constructorName
then "class"
else "method"
| otherwise = error msg
calledName | isFunctionCall call = show $ qname call
| isMethodCallOrMessageSend call =
if mname == constructorName
then show $ getType (target call)
else show mname
| otherwise = error msg
msg = "TypeError.hs: " ++ show call ++
" is not a function or method call"
show (RequiredFieldMutabilityError requirer field) =
printf "Trait '%s' requires field '%s' to be mutable"
(getId requirer) (show field)
show (ProvidingTraitFootprintError provider requirer mname fields) =
printf ("Trait '%s' cannot provide method '%s' to %s.\n" ++
"'%s' can mutate fields that are marked immutable in '%s':\n%s")
(getId provider) (show mname) (refTypeName requirer)
(getId provider) (getId requirer)
(unlines (map ((" " ++) . show) fields))
show (AmbiguousTypeError ty candidates) =
printf "Ambiguous reference to %s. Possible candidates are:\n%s"
(showWithKind ty) (unlines $ map ((" " ++) . show) candidates)
show (UnknownTypeUsageError usage ty) =
printf "Cannot %s unimported type %s"
usage (show ty)
show (AmbiguousNameError qname candidates) =
printf "Ambiguous reference to function %s. Possible candidates are:\n%s"
(show qname) candidateList
where
candidateList =
unlines $ map ((" " ++) . showCandidate) candidates
showCandidate (qn, ty) = show qn ++ " : " ++ show ty
show (UnknownNamespaceError maybeNs) =
printf "Unknown namespace %s"
(maybe "" show maybeNs)
show (UnknownNameError ns name) =
printf "Module %s has no function or type called '%s'"
(show ns) (show name)
show (ShadowedImportError i) =
printf "Introduction of module alias '%s' shadows existing import"
(show $ itarget i)
show (WrongModuleNameError modname expected) =
printf "Module name '%s' and file name '%s' must match"
(show modname) expected
show PolymorphicConstructorError =
printf "Constructors (a.k.a. 'init methods') cannot use parametric methods"
show ClosureReturnError =
"Closures must declare their type to use return"
show ClosureForwardError =
"Closures must declare their type to use forward"
show MatchMethodNonMaybeReturnError =
"Match methods must return a Maybe type"
show MatchMethodNonEmptyParameterListError =
"Match methods cannot have parameters"
show (ImpureMatchMethodError e) =
printf "Match methods must be pure%s"
pointer
where
pointer
| While{} <- e = ". Consider using a for loop"
| otherwise = ""
show (ForwardTypeError retType ty) =
printf ("Returned type %s of forward should match with " ++
"the result type of the containing method %s")
(show retType) (show ty)
show (ForwardTypeClosError retType ty) =
printf ("Result type %s of the closure should match with " ++
"the return type %s of the forward")
(show retType) (show ty)
show (ForwardInPassiveContext cname) =
printf "Forward can not be used in passive class '%s'"
(show cname)
show (ForwardInFunction) = "Forward cannot be used in functions"
show (CannotHaveModeError ty) =
if isClassType ty
then printf "Cannot give mode to unmoded %s" (refTypeName ty)
else printf "Cannot give mode to %s" (Types.showWithKind ty)
show (ModelessError ty) =
printf "No mode given to %s" (refTypeName ty)
show (ModeOverrideError ty) =
printf "Cannot override declared mode '%s' of %s"
(showModeOf ty) (refTypeName ty)
show (CannotConsumeError expr) =
printf "Cannot consume '%s'" (show (ppSugared expr))
show (CannotConsumeTypeError expr) =
printf ("Cannot consume '%s' of type '%s'. " ++
"Consider using a Maybe-type")
(show (ppSugared expr)) (show (getType expr))
show (ImmutableConsumeError expr)
| VarAccess{} <- expr =
printf "Cannot consume immutable variable '%s'"
(show (ppSugared expr))
| FieldAccess{} <- expr =
printf "Cannot consume immutable field '%s'"
(show (ppSugared expr))
| otherwise =
printf "Cannot consume immutable target '%s'"
(show (ppSugared expr))
show (CannotGiveReadModeError trait) =
printf ("Cannot give read mode to trait '%s'. " ++
"It must be declared as read at its declaration site")
(getId trait)
show (CannotGiveSharableModeError ty) =
printf ("Cannot give sharable mode to %s. " ++
"It can only be used for type parameters")
(refTypeName ty)
show (NonValInReadContextError ctx) =
printf "Read %s can only have val fields"
(if isTraitType ctx then "traits" else "classes")
show (NonSafeInReadContextError ctx ty) =
printf "Read %s can not have field of non-safe type '%s'. \n%s"
(if isTraitType ctx then "trait" else "class") (show ty)
enumerateSafeTypes
show (NonSafeInExtendedReadTraitError t f ty) =
printf "Read trait '%s' cannot be extended with field '%s' of non-safe type '%s'. \n%s"
(getId t) (show f) (show ty)
enumerateSafeTypes
show (ProvidingToReadTraitError provider requirer mname) =
printf "Non-read trait '%s' cannot provide method '%s' to read trait '%s'"
(getId provider) (show mname) (getId requirer)
show (SubordinateReturnError name ty) =
printf ("Method '%s' returns a %s and cannot " ++
"be called from outside of its aggregate")
(show name) (if isArrowType ty
then "closure that captures subordinate state"
else "subordinate capability")
show (SubordinateArgumentError arg) =
if isArrowType (getType arg)
then printf ("Closure '%s' captures subordinate state " ++
"and cannot be passed outside of its aggregate")
(show (ppSugared arg))
else printf ("Cannot pass subordinate argument '%s' " ++
"outside of its aggregate")
(show (ppSugared arg))
show (SubordinateFieldError name) =
printf ("Field '%s' is subordinate and cannot be accessed " ++
"from outside of its aggregate")
(show name)
show (ThreadLocalFieldError ty) =
printf "%s must have declared 'local' or 'active' mode to have actor local fields"
(if isTraitType ty then "Traits" else "Classes")
show (ThreadLocalFieldExtensionError trait field) =
printf ("Trait '%s' must have local mode to be extended " ++
"with field '%s' of actor local type '%s'")
(show trait) (show $ fname field)
(showWithoutMode $ ftype field)
show (ThreadLocalArgumentError arg) =
if isArrowType (getType arg)
then printf ("Closure '%s' captures actor local variables " ++
"and cannot be passed to another active object")
(show (ppSugared arg))
else printf ("Cannot pass actor local argument '%s' " ++
"to another active object")
(show (ppSugared arg))
show (ThreadLocalReturnError name ty) =
printf ("Method '%s' returns a %s and cannot " ++
"be called by a different active object")
(show name) (if isArrowType ty
then "closure that captures local state"
else "local capability")
show (PolymorphicArgumentSendError arg ty) =
printf ("Cannot pass value of '%s' between active objects. " ++
"Its type is polymorphic so it may not be safe to share.\n" ++
"Consider marking the type variable '%s' as 'sharable'")
(show (ppSugared arg)) (getId ty)
show (PolymorphicReturnError name ty) =
printf ("Method '%s' returns a value of polymorphic type, and sharing " ++
"it between active objects may not be safe. \n" ++
"Consider marking the type variable '%s' as 'sharable'.")
(show name) (getId ty)
show (MalformedConjunctionError ty nonDisjoint source) =
printf "Type '%s' does not form a conjunction with '%s' in %s"
(show ty) (show nonDisjoint) (Types.showWithKind source)
show (CannotUnpackError source) =
printf "Cannot unpack empty capability of class '%s'"
(show source)
show (CannotInferUnpackingError cap) =
printf ("Unpacking of %s cannot be inferred. " ++
"Try adding type annotations")
(Types.showWithKind cap)
show (UnsplittableTypeError ty) =
printf "Cannot unpack %s"
(Types.showWithKind ty)
show (DuplicatingSplitError ty) =
printf "Cannot duplicate linear trait '%s'"
(showWithoutMode ty)
show (StackboundArrayTypeError ty) =
printf "Arrays cannot store borrowed values of type '%s'"
(show ty)
show (ManifestConflictError formal conflicting) =
printf ("Trait '%s' with declared mode '%s' can only be " ++
"composed with traits of the same mode. Found '%s'")
(showWithoutMode formal) (showModeOf formal) (show conflicting)
show (ManifestClassConflictError cls conflicting) =
printf "Trait '%s' cannot be included by class '%s' of declared mode '%s'"
(show conflicting) (showWithoutMode cls) (showModeOf cls)
show (UnmodedMethodExtensionError cls name) =
printf ("Unmoded class '%s' cannot declare new method '%s'. " ++
"Possible fixes: \n" ++
" - Add a mode to the class (e.g. %s)\n" ++
" - Assign the method to an included trait: T(%s())")
(show cls) (show name)
"active, local, read, linear or subord" (show name)
show (ActiveTraitError active nonActive) =
printf ("Active trait '%s' can only be included together with " ++
"other active traits. Found '%s'")
(showWithoutMode active) (show nonActive)
show (UnsafeTypeArgumentError formal ty) =
if isModeless ty then
-- TODO: Could be more precise (e.g. distinguish between linear/subord)
printf ("Cannot use non-aliasable type '%s' as type argument. " ++
"Type parameter '%s' requires the type to have %s mode")
(show ty) (getId formal) (if isModeless formal
then "an aliasable"
else showModeOf formal)
else
printf ("Cannot use %s type '%s' as type argument. " ++
"Type parameter '%s' requires the type to have %s mode")
(showModeOf ty) (showWithoutMode ty)
(getId formal) (if isModeless formal
then "an aliasable"
else showModeOf formal)
show OverlapWithBuiltins =
printf ("Types Maybe, Fut, Stream, and Par are built-in and cannot be redefined.")
show (SimpleError msg) = msg
----------------------------
-- Capturechecking errors --
----------------------------
show ReverseBorrowingError =
"Reverse borrowing (returning borrowed values) " ++
"is currently not supported"
show (BorrowedFieldError ftype) =
printf "Cannot have field of borrowed type '%s'"
(show ftype)
show (LinearClosureError name ty) =
printf "Cannot capture variable '%s' of linear type '%s' in a closure"
(show name) (show ty)
show (BorrowedLeakError e) =
printf "Cannot pass borrowed expression '%s' as non-borrowed parameter"
(show (ppSugared e))
show (NonBorrowableError FieldAccess{target, name}) =
printf "Cannot borrow linear field '%s' from non-linear path '%s'"
(show name) (show (ppSugared target))
show (NonBorrowableError ArrayAccess{target}) =
printf "Cannot borrow linear array value from non-linear path '%s'"
(show (ppSugared target))
show (NonBorrowableError e) =
printf "Expression '%s' cannot be borrowed."
(show (ppSugared e))
show (ActiveBorrowError arg targetType) =
printf ("Expression '%s' cannot be borrowed " ++
"by active object of type '%s'")
(show (ppSugared arg)) (show targetType)
show (ActiveBorrowSendError arg targetType) =
printf ("Cannot send borrowed expression '%s' to active object " ++
"of type '%s'")
(show (ppSugared arg)) (show targetType)
show (DuplicateBorrowError root) =
printf ("Borrowed variable '%s' cannot be used more than once " ++
"in an argument list")
(show (ppSugared root))
show (StackboundednessMismatchError ty expected) =
printf "%s does not match %s" (kindOf ty) (kindOf' expected)
where
kindOf ty
| isStackboundType ty = "Borrowed type '" ++ show ty ++ "'"
| otherwise = "Non-borrowed type '" ++ show ty ++ "'"
kindOf' ty =
let c:s = kindOf ty
in toLower c:s
show (LinearCaptureError e ty) =
printf "Cannot capture expression '%s' of linear type '%s'"
(show (ppSugared e)) (show ty)
data TCWarning = TCWarning Backtrace Warning
instance Show TCWarning where
show (TCWarning [] w) =
"Warning:\n" ++
show w
show (TCWarning ((pos, _):_) w) =
"Warning at " ++ show pos ++ ":\n" ++
show w
data Warning = StringDeprecatedWarning
| StringIdentityWarning
| PolymorphicIdentityWarning
| ShadowedMethodWarning FieldDecl
| ExpressionResultIgnoredWarning Expr
| ArrayTypeArgumentWarning
| ArrayInReadContextWarning
| SharedArrayWarning
| CapabilitySplitWarning
| ShadowingADTCaseWarning Name
instance Show Warning where
show StringDeprecatedWarning =
"Type 'string' is deprecated. Use 'String' instead."
show StringIdentityWarning =
"Comparing String identity. Equality should be compared using 'equals'"
show PolymorphicIdentityWarning =
"Comparing polymorphic values is unstable. \n" ++
"Later versions of Encore will require type constraints for this to work"
show (ExpressionResultIgnoredWarning expr) =
"Result of '" ++ show (ppSugared expr) ++ "' is discarded"
show (ShadowedMethodWarning Field{fname, ftype}) =
printf ("Field '%s' holds %s and could be confused with " ++
"the method of the same name")
(show fname) (if isArrayType ftype
then "an array"
else "a function")
show ArrayTypeArgumentWarning =
"Using arrays as type arguments is pontentially unsafe. " ++
"This will be fixed in a later version of Encore."
show ArrayInReadContextWarning =
"Using arrays in fields of a read trait or class is potentially unsafe. " ++
"In later versions of Encore, this array must be made immutable."
show SharedArrayWarning =
"Passing arrays between actors is potentially unsafe. " ++
"This will be fixed in a later version of Encore."
show CapabilitySplitWarning =
"Unpacking linear capabilities is not fully supported and may be unsafe. " ++
"This will be fixed in a later version of Encore."
show (ShadowingADTCaseWarning name) =
"Variable '" ++ show name ++ "' shadows ADT case of same name. " ++
"You most likely want to write '" ++ show name ++ "()'."
| parapluu/encore | src/types/Typechecker/TypeError.hs | bsd-3-clause | 45,929 | 0 | 20 | 13,446 | 9,719 | 4,938 | 4,781 | -1 | -1 |
{- | Script to demonstrate Calculations and other Operations on Signals -}
-- module Demo_Signal where
import qualified EFA.Signal.Plot as Plot
import qualified EFA.Signal.Signal as S
import EFA.Signal.SignalFill ((.-), (./), (.*))
import EFA.Signal.Signal (PSignal, TSignal, Scal, FFSignal)
import EFA.Utility.Async (concurrentlyMany_)
import EFA.Signal.Typ (Typ, A, D, P, N, Tt)
import qualified Graphics.Gnuplot.Advanced as GnuPlot
import qualified Graphics.Gnuplot.Terminal.Default as DefaultTerm
import qualified Graphics.Gnuplot.Plot.TwoDimensional as Plot2D
import qualified Graphics.Gnuplot.Graph.TwoDimensional as Graph2D
import qualified Graphics.Gnuplot.LineSpecification as LineSpec
import qualified Graphics.Gnuplot.ColorSpecification as Colour
import qualified Graphics.Gnuplot.Frame as Frame
import qualified Graphics.Gnuplot.Frame.OptionSet as Opts
import Control.Functor.HT (void)
import Data.Monoid ((<>))
-- Generate objects to work with
offset :: Scal (Typ D P Tt) Double
offset = S.toScalar 0
-- Time Vector
time :: TSignal [] Double
time = S.fromList ([0,0.1..pi]++[pi])
-- constant efficiency
n1 :: Scal (Typ A N Tt) Double
n1 = S.toScalar 0.8
-- Generate two Power Signals
pSig1, pSig2 :: PSignal [] Double
pSig1 =
((S.changeType (S.map sin time)) .- offset)
.*
(S.toScalar 1000 :: Scal (Typ A N Tt) Double)
pSig2 = pSig1 .* n1
-- Make Time-Step-Integration to get 1D energy flow signals
fSig1, fSig2 :: FFSignal [] Double
fSig1 = S.partIntegrate time pSig1
fSig2 = S.partIntegrate time pSig2
nVal2 :: Scal (Typ A N Tt) Double
nVal2 = S.sum fSig2 ./ S.sum fSig1
myPlotStyle ::
Plot2D.T x y -> Plot2D.T x y
myPlotStyle =
fmap (Graph2D.lineSpec $
LineSpec.pointSize 0.1 $
LineSpec.pointType 0 $
LineSpec.lineColor Colour.lightSalmon $
LineSpec.lineWidth 10 LineSpec.deflt)
myPlotStyle2 ::
LineSpec.T -> LineSpec.T
myPlotStyle2 =
( LineSpec.pointSize 0.1 .
LineSpec.pointType 0 .
LineSpec.lineColor Colour.lightSalmon .
LineSpec.lineWidth 10)
histoStyle ::
LineSpec.T -> LineSpec.T
histoStyle =
( LineSpec.pointSize 10 .
LineSpec.pointType 2 .
LineSpec.lineColor Colour.lightSalmon .
LineSpec.lineWidth 10)
terminate ::
(LineSpec.T -> LineSpec.T) ->
Plot2D.T x y -> Plot2D.T x y
terminate func = fmap (Graph2D.lineSpec $ func $ LineSpec.deflt)
myFrameStyle ::
Plot2D.T Double Double ->
Frame.T (Graph2D.T Double Double)
myFrameStyle = Frame.cons $
Opts.title "Dies ist der Titel!!!" $
Opts.grid True $
Opts.deflt
histograms :: Plot2D.T Double Double
histograms =
Plot2D.list Graph2D.boxes (zip [1,1.2..] [102, 213, 378, 408, 840, 920])
lists :: Plot2D.T Double Double
lists =
Plot2D.list Graph2D.lines [(1, 200.0), (2.7, 160), (4, 700)]
plot :: Plot2D.T Double Double
plot = Plot.xy id time $ map (Plot.label "bla") [pSig1, pSig2]
main :: IO ()
main = do
concurrentlyMany_ [
void $ GnuPlot.plotSync DefaultTerm.cons $ plot,
void $ GnuPlot.plotSync DefaultTerm.cons $
myPlotStyle plot,
void $ GnuPlot.plotSync DefaultTerm.cons $
myFrameStyle $
terminate (myPlotStyle2) $ plot,
void $ GnuPlot.plotSync DefaultTerm.cons $
(terminate histoStyle histograms <> lists),
putStrLn (S.disp nVal2) ]
| energyflowanalysis/efa-2.1 | demo/signal/Main.hs | bsd-3-clause | 3,307 | 0 | 14 | 615 | 1,073 | 593 | 480 | 86 | 1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Trustworthy #-}
module Control.Monad.Skeleton.Internal (Cat(..), transCat, (|>), viewL, transKleisli) where
import Control.Arrow
import Unsafe.Coerce
-- | Type-aligned catenable queue
data Cat k a b where
Leaf :: k a b -> Cat k a b
Tree :: Cat k a b -> Cat k b c -> Cat k a c
transCat :: (forall x y. j x y -> k x y) -> Cat j a b -> Cat k a b
transCat f (Tree a b) = transCat f a `Tree` transCat f b
transCat f (Leaf k) = Leaf (f k)
{-# INLINE transCat #-}
(|>) :: Cat k a b -> k b c -> Cat k a c
s |> k = Tree s (Leaf k)
{-# INLINE (|>) #-}
-- | Match on the leftmost element. It gradually rotates the nodes so that following calls to 'viewL' is faster.
viewL :: forall k a b r. Cat k a b
-> (k a b -> r)
-> (forall x. k a x -> Cat k x b -> r)
-> r
viewL (Leaf k) e _ = e k
viewL (Tree a b) _ r = go a b where
go :: Cat k a x -> Cat k x b -> r
go (Leaf k) t = r k t
go (Tree c d) t = go c (Tree d t)
transKleisli :: (m b -> n b) -> Kleisli m a b -> Kleisli n a b
transKleisli f = unsafeCoerce (f Prelude..)
{-# INLINE transKleisli #-}
| fumieval/monad-skeleton | src/Control/Monad/Skeleton/Internal.hs | bsd-3-clause | 1,186 | 0 | 13 | 295 | 534 | 278 | 256 | 30 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UnboxedTuples #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.Array.Data
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2008..2009] Sean Lee
-- [2009..2014] Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- This module fixes the concrete representation of Accelerate arrays. We
-- allocate all arrays using pinned memory to enable safe direct-access by
-- non-Haskell code in multi-threaded code. In particular, we can safely pass
-- pointers to an array's payload to foreign code.
--
module Data.Array.Accelerate.Array.Data (
-- * Array operations and representations
ArrayElt(..), ArrayData, MutableArrayData, runArrayData,
ArrayEltR(..), GArrayData(..),
-- * Array tuple operations
fstArrayData, sndArrayData, pairArrayData,
-- * Type macros
HTYPE_INT, HTYPE_WORD, HTYPE_LONG, HTYPE_UNSIGNED_LONG, HTYPE_CCHAR,
) where
-- standard libraries
import Foreign (Ptr)
import Foreign.C.Types
import Data.Bits
import Data.Functor ((<$>))
import Data.Typeable (Typeable)
import Control.Monad
#ifdef ACCELERATE_UNSAFE_CHECKS
import qualified Data.Array.Base as MArray (readArray, writeArray)
#else
import qualified Data.Array.Base as MArray (unsafeRead, unsafeWrite)
#endif
import Data.Array.Storable.Internals
import Foreign.ForeignPtr.Unsafe
import System.IO.Unsafe
import Data.Array.MArray (MArray)
import Data.Array.Base (unsafeNewArray_)
import Language.Haskell.TH
-- friends
import Data.Array.Accelerate.Type
-- Add needed Typeable instance for StorableArray
--
deriving instance Typeable StorableArray
-- Determine the underlying type of a Haskell CLong or CULong.
--
$( runQ [d| type HTYPE_INT = $(
case finiteBitSize (undefined::Int) of
32 -> [t| Int32 |]
64 -> [t| Int64 |]
_ -> error "I don't know what architecture I am" ) |] )
$( runQ [d| type HTYPE_WORD = $(
case finiteBitSize (undefined::Word) of
32 -> [t| Word32 |]
64 -> [t| Word64 |]
_ -> error "I don't know what architecture I am" ) |] )
$( runQ [d| type HTYPE_LONG = $(
case finiteBitSize (undefined::CLong) of
32 -> [t| Int32 |]
64 -> [t| Int64 |]
_ -> error "I don't know what architecture I am" ) |] )
$( runQ [d| type HTYPE_UNSIGNED_LONG = $(
case finiteBitSize (undefined::CULong) of
32 -> [t| Word32 |]
64 -> [t| Word64 |]
_ -> error "I don't know what architecture I am" ) |] )
$( runQ [d| type HTYPE_CCHAR = $(
case isSigned (undefined::CChar) of
True -> [t| Int8 |]
False -> [t| Word8 |] ) |] )
-- Array representation
-- --------------------
-- |Immutable array representation
--
type ArrayData e = MutableArrayData e
-- |Mutable array representation
--
type MutableArrayData e = GArrayData (StorableArray Int) e
-- Array representation in dependence on the element type, but abstracting
-- over the basic array type (in particular, abstracting over mutability)
--
data family GArrayData :: (* -> *) -> * -> *
data instance GArrayData ba () = AD_Unit
data instance GArrayData ba Int = AD_Int (ba Int)
data instance GArrayData ba Int8 = AD_Int8 (ba Int8)
data instance GArrayData ba Int16 = AD_Int16 (ba Int16)
data instance GArrayData ba Int32 = AD_Int32 (ba Int32)
data instance GArrayData ba Int64 = AD_Int64 (ba Int64)
data instance GArrayData ba Word = AD_Word (ba Word)
data instance GArrayData ba Word8 = AD_Word8 (ba Word8)
data instance GArrayData ba Word16 = AD_Word16 (ba Word16)
data instance GArrayData ba Word32 = AD_Word32 (ba Word32)
data instance GArrayData ba Word64 = AD_Word64 (ba Word64)
data instance GArrayData ba CShort = AD_CShort (ba Int16)
data instance GArrayData ba CUShort = AD_CUShort (ba Word16)
data instance GArrayData ba CInt = AD_CInt (ba Int32)
data instance GArrayData ba CUInt = AD_CUInt (ba Word32)
data instance GArrayData ba CLong = AD_CLong (ba HTYPE_LONG)
data instance GArrayData ba CULong = AD_CULong (ba HTYPE_UNSIGNED_LONG)
data instance GArrayData ba CLLong = AD_CLLong (ba Int64)
data instance GArrayData ba CULLong = AD_CULLong (ba Word64)
data instance GArrayData ba Float = AD_Float (ba Float)
data instance GArrayData ba Double = AD_Double (ba Double)
data instance GArrayData ba CFloat = AD_CFloat (ba Float)
data instance GArrayData ba CDouble = AD_CDouble (ba Double)
data instance GArrayData ba Bool = AD_Bool (ba Word8)
data instance GArrayData ba Char = AD_Char (ba Char)
data instance GArrayData ba CChar = AD_CChar (ba HTYPE_CCHAR)
data instance GArrayData ba CSChar = AD_CSChar (ba Int8)
data instance GArrayData ba CUChar = AD_CUChar (ba Word8)
data instance GArrayData ba (a, b) = AD_Pair (GArrayData ba a)
(GArrayData ba b)
deriving instance Typeable GArrayData
-- | GADT to reify the 'ArrayElt' class.
--
data ArrayEltR a where
ArrayEltRunit :: ArrayEltR ()
ArrayEltRint :: ArrayEltR Int
ArrayEltRint8 :: ArrayEltR Int8
ArrayEltRint16 :: ArrayEltR Int16
ArrayEltRint32 :: ArrayEltR Int32
ArrayEltRint64 :: ArrayEltR Int64
ArrayEltRword :: ArrayEltR Word
ArrayEltRword8 :: ArrayEltR Word8
ArrayEltRword16 :: ArrayEltR Word16
ArrayEltRword32 :: ArrayEltR Word32
ArrayEltRword64 :: ArrayEltR Word64
ArrayEltRcshort :: ArrayEltR CShort
ArrayEltRcushort :: ArrayEltR CUShort
ArrayEltRcint :: ArrayEltR CInt
ArrayEltRcuint :: ArrayEltR CUInt
ArrayEltRclong :: ArrayEltR CLong
ArrayEltRculong :: ArrayEltR CULong
ArrayEltRcllong :: ArrayEltR CLLong
ArrayEltRcullong :: ArrayEltR CULLong
ArrayEltRfloat :: ArrayEltR Float
ArrayEltRdouble :: ArrayEltR Double
ArrayEltRcfloat :: ArrayEltR CFloat
ArrayEltRcdouble :: ArrayEltR CDouble
ArrayEltRbool :: ArrayEltR Bool
ArrayEltRchar :: ArrayEltR Char
ArrayEltRcchar :: ArrayEltR CChar
ArrayEltRcschar :: ArrayEltR CSChar
ArrayEltRcuchar :: ArrayEltR CUChar
ArrayEltRpair :: (ArrayElt a, ArrayElt b)
=> ArrayEltR a -> ArrayEltR b -> ArrayEltR (a,b)
-- Array operations
-- ----------------
--
-- TLM: do we need to INLINE these functions to get good performance interfacing
-- to external libraries, especially Repa?
class ArrayElt e where
type ArrayPtrs e
--
unsafeIndexArrayData :: ArrayData e -> Int -> e
ptrsOfArrayData :: ArrayData e -> ArrayPtrs e
--
newArrayData :: Int -> IO (MutableArrayData e)
unsafeReadArrayData :: MutableArrayData e -> Int -> IO e
unsafeWriteArrayData :: MutableArrayData e -> Int -> e -> IO ()
unsafeFreezeArrayData :: MutableArrayData e -> IO (ArrayData e)
unsafeFreezeArrayData = return
ptrsOfMutableArrayData :: MutableArrayData e -> IO (ArrayPtrs e)
ptrsOfMutableArrayData = return . ptrsOfArrayData
--
arrayElt :: ArrayEltR e
instance ArrayElt () where
type ArrayPtrs () = ()
unsafeIndexArrayData AD_Unit i = i `seq` ()
ptrsOfArrayData AD_Unit = ()
newArrayData size = size `seq` return AD_Unit
unsafeReadArrayData AD_Unit i = i `seq` return ()
unsafeWriteArrayData AD_Unit i () = i `seq` return ()
arrayElt = ArrayEltRunit
instance ArrayElt Int where
type ArrayPtrs Int = Ptr Int
unsafeIndexArrayData (AD_Int ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Int ba) = storableArrayPtr ba
newArrayData size = liftM AD_Int $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Int ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Int ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRint
instance ArrayElt Int8 where
type ArrayPtrs Int8 = Ptr Int8
unsafeIndexArrayData (AD_Int8 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Int8 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Int8 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Int8 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Int8 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRint8
instance ArrayElt Int16 where
type ArrayPtrs Int16 = Ptr Int16
unsafeIndexArrayData (AD_Int16 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Int16 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Int16 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Int16 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Int16 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRint16
instance ArrayElt Int32 where
type ArrayPtrs Int32 = Ptr Int32
unsafeIndexArrayData (AD_Int32 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Int32 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Int32 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Int32 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Int32 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRint32
instance ArrayElt Int64 where
type ArrayPtrs Int64 = Ptr Int64
unsafeIndexArrayData (AD_Int64 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Int64 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Int64 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Int64 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Int64 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRint64
instance ArrayElt Word where
type ArrayPtrs Word = Ptr Word
unsafeIndexArrayData (AD_Word ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Word ba) = storableArrayPtr ba
newArrayData size = liftM AD_Word $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Word ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Word ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRword
instance ArrayElt Word8 where
type ArrayPtrs Word8 = Ptr Word8
unsafeIndexArrayData (AD_Word8 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Word8 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Word8 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Word8 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Word8 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRword8
instance ArrayElt Word16 where
type ArrayPtrs Word16 = Ptr Word16
unsafeIndexArrayData (AD_Word16 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Word16 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Word16 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Word16 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Word16 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRword16
instance ArrayElt Word32 where
type ArrayPtrs Word32 = Ptr Word32
unsafeIndexArrayData (AD_Word32 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Word32 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Word32 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Word32 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Word32 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRword32
instance ArrayElt Word64 where
type ArrayPtrs Word64 = Ptr Word64
unsafeIndexArrayData (AD_Word64 ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Word64 ba) = storableArrayPtr ba
newArrayData size = liftM AD_Word64 $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Word64 ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Word64 ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRword64
instance ArrayElt CShort where
type ArrayPtrs CShort = Ptr Int16
unsafeIndexArrayData (AD_CShort ba) i = CShort $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CShort ba) = storableArrayPtr ba
newArrayData size = liftM AD_CShort $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CShort ba) i = CShort <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CShort ba) i (CShort e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcshort
instance ArrayElt CUShort where
type ArrayPtrs CUShort = Ptr Word16
unsafeIndexArrayData (AD_CUShort ba) i = CUShort $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CUShort ba) = storableArrayPtr ba
newArrayData size = liftM AD_CUShort $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CUShort ba) i = CUShort <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CUShort ba) i (CUShort e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcushort
instance ArrayElt CInt where
type ArrayPtrs CInt = Ptr Int32
unsafeIndexArrayData (AD_CInt ba) i = CInt $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CInt ba) = storableArrayPtr ba
newArrayData size = liftM AD_CInt $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CInt ba) i = CInt <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CInt ba) i (CInt e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcint
instance ArrayElt CUInt where
type ArrayPtrs CUInt = Ptr Word32
unsafeIndexArrayData (AD_CUInt ba) i = CUInt $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CUInt ba) = storableArrayPtr ba
newArrayData size = liftM AD_CUInt $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CUInt ba) i = CUInt <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CUInt ba) i (CUInt e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcuint
instance ArrayElt CLong where
type ArrayPtrs CLong = Ptr HTYPE_LONG
unsafeIndexArrayData (AD_CLong ba) i = CLong $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CLong ba) = storableArrayPtr ba
newArrayData size = liftM AD_CLong $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CLong ba) i = CLong <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CLong ba) i (CLong e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRclong
instance ArrayElt CULong where
type ArrayPtrs CULong = Ptr HTYPE_UNSIGNED_LONG
unsafeIndexArrayData (AD_CULong ba) i = CULong $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CULong ba) = storableArrayPtr ba
newArrayData size = liftM AD_CULong $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CULong ba) i = CULong <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CULong ba) i (CULong e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRculong
instance ArrayElt CLLong where
type ArrayPtrs CLLong = Ptr Int64
unsafeIndexArrayData (AD_CLLong ba) i = CLLong $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CLLong ba) = storableArrayPtr ba
newArrayData size = liftM AD_CLLong $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CLLong ba) i = CLLong <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CLLong ba) i (CLLong e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcllong
instance ArrayElt CULLong where
type ArrayPtrs CULLong = Ptr Word64
unsafeIndexArrayData (AD_CULLong ba) i = CULLong $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CULLong ba) = storableArrayPtr ba
newArrayData size = liftM AD_CULLong $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CULLong ba) i = CULLong <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CULLong ba) i (CULLong e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcullong
instance ArrayElt Float where
type ArrayPtrs Float = Ptr Float
unsafeIndexArrayData (AD_Float ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Float ba) = storableArrayPtr ba
newArrayData size = liftM AD_Float $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Float ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Float ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRfloat
instance ArrayElt Double where
type ArrayPtrs Double = Ptr Double
unsafeIndexArrayData (AD_Double ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Double ba) = storableArrayPtr ba
newArrayData size = liftM AD_Double $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Double ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Double ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRdouble
instance ArrayElt CFloat where
type ArrayPtrs CFloat = Ptr Float
unsafeIndexArrayData (AD_CFloat ba) i = CFloat $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CFloat ba) = storableArrayPtr ba
newArrayData size = liftM AD_CFloat $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CFloat ba) i = CFloat <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CFloat ba) i (CFloat e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcfloat
instance ArrayElt CDouble where
type ArrayPtrs CDouble = Ptr Double
unsafeIndexArrayData (AD_CDouble ba) i = CDouble $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CDouble ba) = storableArrayPtr ba
newArrayData size = liftM AD_CDouble $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CDouble ba) i = CDouble <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CDouble ba) i (CDouble e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcdouble
-- Bool arrays are stored as arrays of bytes. While this is memory inefficient,
-- it is better suited to parallel backends than the native Unboxed Bool
-- array representation that uses packed bit vectors, as that would require
-- atomic operations when writing data necessarily serialising threads.
--
instance ArrayElt Bool where
type ArrayPtrs Bool = Ptr Word8
unsafeIndexArrayData (AD_Bool ba) i = toBool (unsafeIndexArray ba i)
ptrsOfArrayData (AD_Bool ba) = storableArrayPtr ba
newArrayData size = liftM AD_Bool $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Bool ba) i = liftM toBool $ unsafeReadArray ba i
unsafeWriteArrayData (AD_Bool ba) i e = unsafeWriteArray ba i (fromBool e)
arrayElt = ArrayEltRbool
{-# INLINE toBool #-}
toBool :: Word8 -> Bool
toBool 0 = False
toBool _ = True
{-# INLINE fromBool #-}
fromBool :: Bool -> Word8
fromBool True = 1
fromBool False = 0
-- Unboxed Char is stored as a wide character, which is 4-bytes
--
instance ArrayElt Char where
type ArrayPtrs Char = Ptr Char
unsafeIndexArrayData (AD_Char ba) i = unsafeIndexArray ba i
ptrsOfArrayData (AD_Char ba) = storableArrayPtr ba
newArrayData size = liftM AD_Char $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_Char ba) i = unsafeReadArray ba i
unsafeWriteArrayData (AD_Char ba) i e = unsafeWriteArray ba i e
arrayElt = ArrayEltRchar
instance ArrayElt CChar where
type ArrayPtrs CChar = Ptr HTYPE_CCHAR
unsafeIndexArrayData (AD_CChar ba) i = CChar $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CChar ba) = storableArrayPtr ba
newArrayData size = liftM AD_CChar $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CChar ba) i = CChar <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CChar ba) i (CChar e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcchar
instance ArrayElt CSChar where
type ArrayPtrs CSChar = Ptr Int8
unsafeIndexArrayData (AD_CSChar ba) i = CSChar $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CSChar ba) = storableArrayPtr ba
newArrayData size = liftM AD_CSChar $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CSChar ba) i = CSChar <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CSChar ba) i (CSChar e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcschar
instance ArrayElt CUChar where
type ArrayPtrs CUChar = Ptr Word8
unsafeIndexArrayData (AD_CUChar ba) i = CUChar $ unsafeIndexArray ba i
ptrsOfArrayData (AD_CUChar ba) = storableArrayPtr ba
newArrayData size = liftM AD_CUChar $ unsafeNewArray_ (0,size-1)
unsafeReadArrayData (AD_CUChar ba) i = CUChar <$> unsafeReadArray ba i
unsafeWriteArrayData (AD_CUChar ba) i (CUChar e)
= unsafeWriteArray ba i e
arrayElt = ArrayEltRcuchar
instance (ArrayElt a, ArrayElt b) => ArrayElt (a, b) where
type ArrayPtrs (a, b) = (ArrayPtrs a, ArrayPtrs b)
unsafeIndexArrayData (AD_Pair a b) i = (unsafeIndexArrayData a i, unsafeIndexArrayData b i)
ptrsOfArrayData (AD_Pair a b) = (ptrsOfArrayData a, ptrsOfArrayData b)
newArrayData size
= do
a <- newArrayData size
b <- newArrayData size
return $ AD_Pair a b
unsafeReadArrayData (AD_Pair a b) i
= do
x <- unsafeReadArrayData a i
y <- unsafeReadArrayData b i
return (x, y)
unsafeWriteArrayData (AD_Pair a b) i (x, y)
= do
unsafeWriteArrayData a i x
unsafeWriteArrayData b i y
unsafeFreezeArrayData (AD_Pair a b)
= do
a' <- unsafeFreezeArrayData a
b' <- unsafeFreezeArrayData b
return $ AD_Pair a' b'
ptrsOfMutableArrayData (AD_Pair a b)
= do
aptr <- ptrsOfMutableArrayData a
bptr <- ptrsOfMutableArrayData b
return (aptr, bptr)
arrayElt = ArrayEltRpair arrayElt arrayElt
-- |Safe combination of creating and fast freezing of array data.
--
{-# INLINE runArrayData #-}
runArrayData :: ArrayElt e
=> IO (MutableArrayData e, e) -> (ArrayData e, e)
runArrayData st = unsafePerformIO $ do
(mad, r) <- st
return (mad, r)
-- Array tuple operations
-- ----------------------
fstArrayData :: ArrayData (a, b) -> ArrayData a
fstArrayData (AD_Pair x _) = x
sndArrayData :: ArrayData (a, b) -> ArrayData b
sndArrayData (AD_Pair _ y) = y
pairArrayData :: ArrayData a -> ArrayData b -> ArrayData (a, b)
pairArrayData = AD_Pair
-- Auxiliary functions
-- -------------------
-- Returns the element of an immutable array at the specified index.
--
-- This does no bounds checking unless you configured with -funsafe-checks. This
-- is usually OK, since the functions that convert from multidimensional to
-- linear indexing do bounds checking by default.
--
{-# INLINE unsafeIndexArray #-}
unsafeIndexArray :: MArray a e IO => a Int e -> Int -> e
unsafeIndexArray a i = unsafePerformIO $ unsafeReadArray a i
-- Read an element from a mutable array.
--
-- This does no bounds checking unless you configured with -funsafe-checks. This
-- is usually OK, since the functions that convert from multidimensional to
-- linear indexing do bounds checking by default.
--
{-# INLINE unsafeReadArray #-}
unsafeReadArray :: MArray a e m => a Int e -> Int -> m e
#ifdef ACCELERATE_UNSAFE_CHECKS
unsafeReadArray = MArray.readArray
#else
unsafeReadArray = MArray.unsafeRead
#endif
-- Write an element into a mutable array.
--
-- This does no bounds checking unless you configured with -funsafe-checks. This
-- is usually OK, since the functions that convert from multidimensional to
-- linear indexing do bounds checking by default.
--
{-# INLINE unsafeWriteArray #-}
unsafeWriteArray :: MArray a e m => a Int e -> Int -> e -> m ()
#ifdef ACCELERATE_UNSAFE_CHECKS
unsafeWriteArray = MArray.writeArray
#else
unsafeWriteArray = MArray.unsafeWrite
#endif
-- Obtains a pointer to the payload of an storable array.
--
{-# INLINE storableArrayPtr #-}
storableArrayPtr :: StorableArray i a -> Ptr a
storableArrayPtr (StorableArray _ _ _ fp) = unsafeForeignPtrToPtr fp
| kumasento/accelerate | Data/Array/Accelerate/Array/Data.hs | bsd-3-clause | 25,248 | 454 | 12 | 6,516 | 6,215 | 3,278 | 2,937 | 434 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Universum
import qualified Data.List.NonEmpty as NonEmpty
import Test.QuickCheck (Property, Result (..), conjoin,
counterexample, ioProperty, label, property,
quickCheckResult, withMaxSuccess, (===))
import Cardano.X509.Configuration (CertDescription (..),
DirConfiguration (..), ErrInvalidExpiryDays,
ServerConfiguration (..), TLSConfiguration (..),
fromConfiguration, genCertificate)
import Data.X509.Extra (FailedReason, ServiceID, SignedCertificate,
genRSA256KeyPair, isServerCertificate,
validateCertificate)
import Test.Cardano.X509.Configuration (tests)
import Test.Cardano.X509.Configuration.Arbitrary (AltNames (..),
Invalid (..), Unknown (..))
import Test.Pos.Util.Tripping (runTests)
--
-- Main
--
main :: IO ()
main = do
runTests [ tests ]
runQuickCheck
[ quickCheckResult $ label "GenCertificate is Valid" propGenCertificateValid
, quickCheckResult $ label "validateCertificate fails for unknown ServiceID" propUnknownService
, quickCheckResult $ label "Invalid Expiry Days throws" propInvalidExpiryDays
]
where
-- NOTE running 'quickCheck prop' doesn't make 'cabal test' fails
-- even if the property fails. So this little one cope with this
-- by running all specs and failing if one of them returned a failure.
runQuickCheck :: [IO Result] -> IO ()
runQuickCheck =
sequence >=> (mapM_ $ \case
Success {} -> return ()
_ -> exitFailure)
--
-- Properties
--
-- | Verify that each certificate generated is valid. Is uses the default
-- validation check of 'Data.X509.Validation'
propGenCertificateValid
:: (TLSConfiguration, DirConfiguration)
-> Property
propGenCertificateValid =
ioProperty . generateAndValidate getValidServiceID propAllCertsValid
-- | Verify that each server certificate generated is invalid when provided an
-- unknown ServiceID.
propUnknownService
:: Unknown AltNames
-> (TLSConfiguration, DirConfiguration)
-> Property
propUnknownService altNames =
ioProperty . generateAndValidate (getUnknownServiceID altNames) propServerCertsInvalid
-- | Verify that we can't generate certificates when provided invalid
-- expiry days.
propInvalidExpiryDays
:: (Invalid TLSConfiguration, DirConfiguration)
-> Property
propInvalidExpiryDays (Invalid tlsConf, dirConf) =
withMaxSuccess 10 $ ioProperty $ generateAndValidate getValidServiceID propAllCertsValid (tlsConf, dirConf)
`catch` (\(_ :: ErrInvalidExpiryDays) -> return $ property True)
`catch` (\(e :: SomeException) -> throwM e)
-- | Check that there's no validation FailedReason
propAllCertsValid
:: SignedCertificate
-> [FailedReason]
-> Property
propAllCertsValid _ =
(=== [])
-- | Check that there are actually some validation FailedReason for non-client
-- certificate
propServerCertsInvalid
:: SignedCertificate
-> [FailedReason]
-> Property
propServerCertsInvalid cert | isServerCertificate cert = (=/=) []
propServerCertsInvalid _ = const (property True)
-- | Actually generate certificates and validate them with the given property
-- Throws on error.
generateAndValidate
:: (TLSConfiguration -> ServiceID)
-> (SignedCertificate -> [FailedReason] -> Property)
-> (TLSConfiguration, DirConfiguration)
-> IO (Property)
generateAndValidate getServiceID predicate (tlsConf, dirConf) = do
(caDesc, certDescs) <-
fromConfiguration tlsConf dirConf genRSA256KeyPair <$> genRSA256KeyPair
(_, caCert) <- genCertificate caDesc
fmap conjoin $ forM certDescs $ \desc -> do
(_, cert) <- genCertificate desc
predicate cert <$>
validateCertificate caCert (certChecks desc) (getServiceID tlsConf) cert
-- | Get a valid serviceID from the configuration
getValidServiceID
:: TLSConfiguration
-> ServiceID
getValidServiceID tlsConf =
(NonEmpty.head $ serverAltNames $ tlsServer tlsConf, "")
-- | Get an invalid serviceID from the configuration
getUnknownServiceID
:: Unknown AltNames
-> TLSConfiguration
-> ServiceID
getUnknownServiceID (Unknown (AltNames (name :| _))) _ =
(name, "")
-- | Like '/=', but prints a counterexample when it fails.
-- Source: [email protected] Test.QuickCheck.Property#(=/=)
infix 4 =/=
(=/=) :: (Eq a, Show a) => a -> a -> Property
x =/= y =
counterexample (show x ++ interpret res ++ show y) res
where
res = x /= y
interpret True = " /= "
interpret False = " == "
| input-output-hk/pos-haskell-prototype | x509/test/Main.hs | mit | 4,810 | 0 | 14 | 1,097 | 972 | 547 | 425 | 92 | 2 |
module System.Console.Questioner.Autocomplete
where
| yamadapc/stack-run | unix/System/Console/Questioner/Autocomplete.hs | mit | 54 | 0 | 3 | 5 | 8 | 6 | 2 | 1 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
module Graphics.UI.Bottle.Direction
( Direction(..), coordinates
) where
import Prelude.Compat
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Data.Vector.Vector2 (Vector2(..))
import Graphics.UI.Bottle.Rect (R, Rect(..))
import qualified Graphics.UI.Bottle.Rect as Rect
-- RelativePos pos is relative to the top-left of the widget
data Direction = Outside | PrevFocalArea Rect | Point (Vector2 R)
coordinates :: Lens.Traversal' Direction Rect
coordinates _ Outside = pure Outside
coordinates f (PrevFocalArea x) = PrevFocalArea <$> f x
coordinates f (Point x) =
Point . (^. Rect.topLeft) <$> f (Rect x 0)
| da-x/lamdu | bottlelib/Graphics/UI/Bottle/Direction.hs | gpl-3.0 | 724 | 0 | 8 | 144 | 197 | 117 | 80 | 15 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EMR.ListBootstrapActions
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Provides information about the bootstrap actions associated with a
-- cluster.
--
-- /See:/ <http://docs.aws.amazon.com/ElasticMapReduce/latest/API/API_ListBootstrapActions.html AWS API Reference> for ListBootstrapActions.
--
-- This operation returns paginated results.
module Network.AWS.EMR.ListBootstrapActions
(
-- * Creating a Request
listBootstrapActions
, ListBootstrapActions
-- * Request Lenses
, lbaMarker
, lbaClusterId
-- * Destructuring the Response
, listBootstrapActionsResponse
, ListBootstrapActionsResponse
-- * Response Lenses
, lbarsBootstrapActions
, lbarsMarker
, lbarsResponseStatus
) where
import Network.AWS.EMR.Types
import Network.AWS.EMR.Types.Product
import Network.AWS.Pager
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | This input determines which bootstrap actions to retrieve.
--
-- /See:/ 'listBootstrapActions' smart constructor.
data ListBootstrapActions = ListBootstrapActions'
{ _lbaMarker :: !(Maybe Text)
, _lbaClusterId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListBootstrapActions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lbaMarker'
--
-- * 'lbaClusterId'
listBootstrapActions
:: Text -- ^ 'lbaClusterId'
-> ListBootstrapActions
listBootstrapActions pClusterId_ =
ListBootstrapActions'
{ _lbaMarker = Nothing
, _lbaClusterId = pClusterId_
}
-- | The pagination token that indicates the next set of results to retrieve
-- .
lbaMarker :: Lens' ListBootstrapActions (Maybe Text)
lbaMarker = lens _lbaMarker (\ s a -> s{_lbaMarker = a});
-- | The cluster identifier for the bootstrap actions to list .
lbaClusterId :: Lens' ListBootstrapActions Text
lbaClusterId = lens _lbaClusterId (\ s a -> s{_lbaClusterId = a});
instance AWSPager ListBootstrapActions where
page rq rs
| stop (rs ^. lbarsMarker) = Nothing
| stop (rs ^. lbarsBootstrapActions) = Nothing
| otherwise =
Just $ rq & lbaMarker .~ rs ^. lbarsMarker
instance AWSRequest ListBootstrapActions where
type Rs ListBootstrapActions =
ListBootstrapActionsResponse
request = postJSON eMR
response
= receiveJSON
(\ s h x ->
ListBootstrapActionsResponse' <$>
(x .?> "BootstrapActions" .!@ mempty) <*>
(x .?> "Marker")
<*> (pure (fromEnum s)))
instance ToHeaders ListBootstrapActions where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("ElasticMapReduce.ListBootstrapActions" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON ListBootstrapActions where
toJSON ListBootstrapActions'{..}
= object
(catMaybes
[("Marker" .=) <$> _lbaMarker,
Just ("ClusterId" .= _lbaClusterId)])
instance ToPath ListBootstrapActions where
toPath = const "/"
instance ToQuery ListBootstrapActions where
toQuery = const mempty
-- | This output contains the boostrap actions detail .
--
-- /See:/ 'listBootstrapActionsResponse' smart constructor.
data ListBootstrapActionsResponse = ListBootstrapActionsResponse'
{ _lbarsBootstrapActions :: !(Maybe [Command])
, _lbarsMarker :: !(Maybe Text)
, _lbarsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListBootstrapActionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lbarsBootstrapActions'
--
-- * 'lbarsMarker'
--
-- * 'lbarsResponseStatus'
listBootstrapActionsResponse
:: Int -- ^ 'lbarsResponseStatus'
-> ListBootstrapActionsResponse
listBootstrapActionsResponse pResponseStatus_ =
ListBootstrapActionsResponse'
{ _lbarsBootstrapActions = Nothing
, _lbarsMarker = Nothing
, _lbarsResponseStatus = pResponseStatus_
}
-- | The bootstrap actions associated with the cluster .
lbarsBootstrapActions :: Lens' ListBootstrapActionsResponse [Command]
lbarsBootstrapActions = lens _lbarsBootstrapActions (\ s a -> s{_lbarsBootstrapActions = a}) . _Default . _Coerce;
-- | The pagination token that indicates the next set of results to retrieve
-- .
lbarsMarker :: Lens' ListBootstrapActionsResponse (Maybe Text)
lbarsMarker = lens _lbarsMarker (\ s a -> s{_lbarsMarker = a});
-- | The response status code.
lbarsResponseStatus :: Lens' ListBootstrapActionsResponse Int
lbarsResponseStatus = lens _lbarsResponseStatus (\ s a -> s{_lbarsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-emr/gen/Network/AWS/EMR/ListBootstrapActions.hs | mpl-2.0 | 5,650 | 0 | 14 | 1,291 | 848 | 496 | 352 | 105 | 1 |
{-# LANGUAGE ScopedTypeVariables, UndecidableInstances, ConstraintKinds, GADTs, DataKinds, KindSignatures #-}
{-# OPTIONS -Wall #-}
module Language.Hakaru.Simplifiable (Simplifiable(mapleType)) where
import Prelude hiding (Real)
--import Data.Proxy (Proxy(..)) -- Is in Prelude for modern GHC?
import Data.Typeable (Typeable)
import Language.Hakaru.Syntax (Hakaru(..), HakaruFun(..))
import Language.Hakaru.Embed
import Data.List (intercalate)
-- TODO: We used to have @Typeable a@ for all Hakaru types @a@, but now that we've moved them into the @Hakaru*@ kind, now what?
-- N.B., 'Typeable' is polykinded...
class Typeable a => Simplifiable (a :: Hakaru *) where
mapleType :: proxy a -> String
instance Simplifiable 'HUnit where mapleType _ = "Unit"
instance Simplifiable 'HInt where mapleType _ = "Int"
instance Simplifiable 'HReal where mapleType _ = "Real"
instance Simplifiable 'HProb where mapleType _ = "Prob"
instance Simplifiable 'HBool where mapleType _ = "Bool"
instance (Simplifiable a, Simplifiable b) => Simplifiable ('HPair a b) where
mapleType _ = "Pair(" ++ mapleType (Proxy :: Proxy a) ++ "," ++
mapleType (Proxy :: Proxy b) ++ ")"
instance Simplifiable a => Simplifiable ('HList a) where
mapleType _ = "List(" ++ mapleType (Proxy :: Proxy a) ++ ")"
instance Simplifiable a => Simplifiable ('HMeasure a) where
mapleType _ = "Measure(" ++ mapleType (Proxy :: Proxy a) ++ ")"
instance Simplifiable a => Simplifiable ('HArray a) where
mapleType _ = "MVector(" ++ mapleType (Proxy :: Proxy a) ++ ")"
instance (Simplifiable a, Simplifiable b) => Simplifiable ('HFun a b) where
mapleType _ = "Arrow(" ++ mapleType (Proxy :: Proxy a) ++ "," ++
mapleType (Proxy :: Proxy b) ++ ")"
-- N.B., we replaced the old @Typeable (Tag t xss)@ requirement by it's two
-- prerequisites, becase otherwise we need to give a kind signature to @xss@
-- which for some strange reason causes a syntax error
class SimplifiableFun (x :: HakaruFun *) where
mapleTypeFn :: proxy x -> String
instance SimplifiableFun Id where mapleTypeFn _ = "Id"
instance Simplifiable x => SimplifiableFun (K x) where
mapleTypeFn _ = "Konst(" ++ mapleType (Proxy :: Proxy x) ++ ")"
instance (SingI xss, All2 SimplifiableFun xss, SimplEmbed t, Typeable t, Typeable xss) => Simplifiable (HTag t xss) where
mapleType _ = concat
[ "Tagged("
, mapleTypeEmbed (undefined :: t)
, ","
, typeList . map typeList . go2 $ (sing :: Sing xss)
, ")"
]
where
typeList xs = "[" ++ intercalate "," xs ++ "]"
go2 :: All2 SimplifiableFun xs => Sing xs -> [[String]]
go2 SNil = []
go2 (SCons x xs) = go1 x : go2 xs
go1 :: All SimplifiableFun xs => Sing xs -> [String]
go1 SNil = []
go1 (SCons x xs) = mapleTypeFn x : go1 xs
| bitemyapp/hakaru | Language/Hakaru/Simplifiable.hs | bsd-3-clause | 2,841 | 0 | 12 | 599 | 874 | 452 | 422 | -1 | -1 |
module Protocol.Persistence where
import General.Config (Config(..))
import General.Persistence
( PersistentBlockHeader(..)
, KeySet(..)
, EntityField(..)
, PersistentUTXO
, PersistentTransaction(..)
)
import General.Types (HasNetwork(..), HasPool(..))
import General.Hash (Hash(..))
import BitcoinCore.Keys (Address(..))
import BitcoinCore.BlockHeaders
( genesisBlock
, BlockHeader(..)
, BlockHash(..)
)
import BitcoinCore.Transaction.Transactions
( Transaction(..)
, TxHash
, hashTransaction
)
import Protocol.Util
( encodeBlockHeader
, decodeBlockHeader
, BlockIndex(..)
, toDbKey
, fromDbKey
)
import Database.Persist.Sql
( insertMany_
, insert_
, count
, runSqlPool
, Filter
, insert_
, selectList
, update
, (==.)
, (=.)
, (>=.)
, ConnectionPool
)
import qualified Database.Persist.Sql as DB
import Database.Persist.Types (SelectOpt(..))
import Control.Lens ((^.))
import Control.Monad (when)
import Data.List.Split (chunksOf)
-- Return the index for the most recent persisted block
getLastBlock :: ConnectionPool -> IO BlockIndex
getLastBlock pool = do
blockCount <- runSqlPool lastBlockQuery pool
return . BlockIndex $ blockCount - 1
where lastBlockQuery = count allBlocksFilter
allBlocksFilter = [] :: [Filter PersistentBlockHeader]
persistGenesisBlock :: Config -> IO ()
persistGenesisBlock config = do
lastBlock' <- getLastBlock (config^.pool)
when (lastBlock' < BlockIndex 0) $
runSqlPool (insert_ . encodeBlockHeader . genesisBlock $ (config^.network)) (config^.pool)
persistHeader :: ConnectionPool -> BlockHeader -> IO ()
persistHeader pool header =
runSqlPool (insert_ $ encodeBlockHeader header) pool
persistHeaders :: ConnectionPool -> [BlockHeader] -> IO ()
persistHeaders pool headers = do
let persistentHeaders = map encodeBlockHeader headers
chunkedPersistentHeaders = chunksOf 100 persistentHeaders
-- Headers are inserted in chunks
-- sqlite rejects if we insert all at once
runSqlPool (mapM_ insertMany_ chunkedPersistentHeaders) pool
-- | deletes all headers with index >= inx.
deleteHeaders :: ConnectionPool -> BlockIndex -> IO ()
deleteHeaders pool inx = do
lastBlock <- getLastBlock pool
let inxs = enumFromTo inx lastBlock
runSqlPool (mapM_ (DB.delete . toDbKey) inxs) pool
getBlockHeaderFromHash :: ConnectionPool -> BlockHash -> IO (Maybe (BlockIndex, BlockHeader))
getBlockHeaderFromHash pool (Hash hash') = do
matches <- runSqlPool (selectList [PersistentBlockHeaderHash ==. hash'] []) pool
case matches of
[] -> return Nothing
[header] -> do
let DB.Entity persistentKey persistentHeader = header
key = fromDbKey persistentKey
blockHeader = decodeBlockHeader persistentHeader
return . Just $ (key , blockHeader)
_ -> fail "Multiple blocks found with same hash."
getTransactionFromHash :: ConnectionPool -> TxHash -> IO (Maybe Integer)
getTransactionFromHash pool (Hash hash') = do
matches <- runSqlPool (selectList [PersistentTransactionHash ==. hash'] []) pool
case matches of
[] -> return Nothing
[tx] -> do
let DB.Entity persistentKey _ = tx
key = fromIntegral . DB.fromSqlKey $ persistentKey
return . Just $ key
_ -> fail "Multiple transactions found with same hash."
persistTransaction :: ConnectionPool -> Transaction -> IO ()
persistTransaction pool transaction =
runSqlPool (insert_ persistentTransaction) pool
where persistentTransaction = PersistentTransaction hash'
hash' = hash . hashTransaction $ transaction
getBlockWithIndex :: ConnectionPool -> BlockIndex -> IO (Maybe BlockHeader)
getBlockWithIndex pool i = (fmap . fmap) decodeBlockHeader $
runSqlPool (DB.get . toDbKey $ i) pool
nHeadersSinceKey :: ConnectionPool -> Int -> BlockIndex -> IO [BlockHeader]
nHeadersSinceKey pool n key = do
let key' = toDbKey key
persistentHeaders <- runSqlPool (selectList [ PersistentBlockHeaderId >=. key'] [LimitTo n]) pool
return $ map getHeaderFromEntity persistentHeaders
getHeaderFromEntity :: DB.Entity PersistentBlockHeader -> BlockHeader
getHeaderFromEntity (DB.Entity _ persistentHeader) = decodeBlockHeader persistentHeader
getAllAddresses :: ConnectionPool -> IO [Address]
getAllAddresses pool = do
let allAddressFilter = [] :: [Filter KeySet]
keySetEntities <- runSqlPool (selectList allAddressFilter []) pool
let getAddress (DB.Entity _ keySet) = Address . keySetAddress $ keySet
return $ map getAddress keySetEntities
persistUTXOs :: ConnectionPool -> [PersistentUTXO] -> IO ()
persistUTXOs pool utxos = runSqlPool (insertMany_ utxos) pool
getUnspentUTXOs :: ConnectionPool -> IO [DB.Entity PersistentUTXO]
getUnspentUTXOs pool = do
let unspentUTXOFilter = [ PersistentUTXOIsSpent ==. False]
runSqlPool (selectList unspentUTXOFilter []) pool
setUtxoSpent :: ConnectionPool -> DB.Key PersistentUTXO -> IO ()
setUtxoSpent pool key =
runSqlPool (update key [PersistentUTXOIsSpent =. True]) pool
setUTXOBlockHash :: ConnectionPool
-> DB.Key PersistentUTXO
-> BlockHash
-> IO ()
setUTXOBlockHash pool key (Hash hash') =
runSqlPool (update key [PersistentUTXOBlockHash =. hash']) pool | clample/lamdabtc | backend/src/Protocol/Persistence.hs | bsd-3-clause | 5,259 | 0 | 17 | 957 | 1,533 | 800 | 733 | 124 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLists #-}
module OSRSpec (spec) where
import Data.Either (isRight, isLeft)
import qualified Data.Vector.Storable as St
import TestUtils
import Arbitrary ((~==))
import OSR
import GDAL (Pair(..))
spec :: Spec
spec = do
describe "SpatialReference" $ do
itIO "can be created from EPSG number" $
srsFromEPSG 23030 `shouldSatisfy` isRight
itIO "can created from Proj4 string" $
srsFromProj4 "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
`shouldSatisfy` isRight
itIO "srsFromProj4 returns Left if invalid" $
srsFromProj4 "foo" `shouldSatisfy` isLeft
describe "CoordinateTransformation" $ do
itIO "can be created" $ do
let ct = do src <- srsFromEPSG 23030
dst <- srsFromEPSG 4326
coordinateTransformation src dst
isRight ct `shouldBe` True
itIO "can transform points" $ do
Right dst <- srsFromEPSGIO 4326
setAxisMappingStrategy dst OAMS_TRADITIONAL_GIS_ORDER
let Right ct = do src <- srsFromEPSG 23030
coordinateTransformation src dst
points :: St.Vector (Pair Double)
points = [ 10000 :+: 10000 , 20000 :+: 20000]
expected = [ (-7.399954586233987) :+: 8.910802667504762e-2
, (-7.31036658723297) :+: 0.17933194077993758]
almostEq (a:+:b) (a':+:b') = a ~== a' && b ~== b'
case points `transformWith` ct of
Just result ->
St.all id (St.zipWith almostEq result expected) `shouldBe` True
Nothing -> expectationFailure "transform returns Nothing"
| meteogrid/bindings-gdal | tests/OSRSpec.hs | bsd-3-clause | 1,649 | 0 | 20 | 442 | 425 | 215 | 210 | 39 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Distribution.Types.UnitId
( UnitId, unUnitId, mkUnitId
, DefUnitId
, unsafeMkDefUnitId
, unDefUnitId
, newSimpleUnitId
, mkLegacyUnitId
, getHSLibraryName
, InstalledPackageId -- backwards compat
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Utils.ShortText
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Text
import Distribution.Types.ComponentId
import Distribution.Types.PackageId
import Text.PrettyPrint (text)
-- | A unit identifier identifies a (possibly instantiated)
-- package/component that can be installed the installed package
-- database. There are several types of components that can be
-- installed:
--
-- * A traditional library with no holes, so that 'unitIdHash'
-- is @Nothing@. In the absence of Backpack, 'UnitId'
-- is the same as a 'ComponentId'.
--
-- * An indefinite, Backpack library with holes. In this case,
-- 'unitIdHash' is still @Nothing@, but in the install,
-- there are only interfaces, no compiled objects.
--
-- * An instantiated Backpack library with all the holes
-- filled in. 'unitIdHash' is a @Just@ a hash of the
-- instantiating mapping.
--
-- A unit is a component plus the additional information on how the
-- holes are filled in. Thus there is a one to many relationship: for a
-- particular component there are many different ways of filling in the
-- holes, and each different combination is a unit (and has a separate
-- 'UnitId').
--
-- 'UnitId' is distinct from 'OpenUnitId', in that it is always
-- installed, whereas 'OpenUnitId' are intermediate unit identities
-- that arise during mixin linking, and don't necessarily correspond
-- to any actually installed unit. Since the mapping is not actually
-- recorded in a 'UnitId', you can't actually substitute over them
-- (but you can substitute over 'OpenUnitId'). See also
-- "Distribution.Backpack.FullUnitId" for a mechanism for expanding an
-- instantiated 'UnitId' to retrieve its mapping.
--
-- Backwards compatibility note: if you need to get the string
-- representation of a UnitId to pass, e.g., as a @-package-id@
-- flag, use the 'display' function, which will work on all
-- versions of Cabal.
--
newtype UnitId = UnitId ShortText
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, NFData)
{-# DEPRECATED InstalledPackageId "Use UnitId instead" #-}
type InstalledPackageId = UnitId
instance Binary UnitId
-- | The textual format for 'UnitId' coincides with the format
-- GHC accepts for @-package-id@.
--
instance Text UnitId where
disp = text . unUnitId
parse = mkUnitId <$> Parse.munch1 (\c -> isAlphaNum c || c `elem` "-_.+")
-- | If you need backwards compatibility, consider using 'display'
-- instead, which is supported by all versions of Cabal.
--
unUnitId :: UnitId -> String
unUnitId (UnitId s) = fromShortText s
mkUnitId :: String -> UnitId
mkUnitId = UnitId . toShortText
-- | 'mkUnitId'
--
-- @since 2.0
instance IsString UnitId where
fromString = mkUnitId
-- | Create a unit identity with no associated hash directly
-- from a 'ComponentId'.
newSimpleUnitId :: ComponentId -> UnitId
newSimpleUnitId = mkUnitId . unComponentId
-- | Make an old-style UnitId from a package identifier.
-- Assumed to be for the public library
mkLegacyUnitId :: PackageId -> UnitId
mkLegacyUnitId = newSimpleUnitId . mkComponentId . display
-- | Returns library name prefixed with HS, suitable for filenames
getHSLibraryName :: UnitId -> String
getHSLibraryName uid = "HS" ++ display uid
-- | A 'UnitId' for a definite package. The 'DefUnitId' invariant says
-- that a 'UnitId' identified this way is definite; i.e., it has no
-- unfilled holes.
newtype DefUnitId = DefUnitId { unDefUnitId :: UnitId }
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, Binary, NFData, Text)
-- | Unsafely create a 'DefUnitId' from a 'UnitId'. Your responsibility
-- is to ensure that the 'DefUnitId' invariant holds.
unsafeMkDefUnitId :: UnitId -> DefUnitId
unsafeMkDefUnitId = DefUnitId
| mydaum/cabal | Cabal/Distribution/Types/UnitId.hs | bsd-3-clause | 4,151 | 0 | 12 | 699 | 453 | 284 | 169 | 44 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Mem.StableName
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Stable names are a way of performing fast ( \(\mathcal{O}(1)\) ),
-- not-quite-exact comparison between objects.
--
-- Stable names solve the following problem: suppose you want to build
-- a hash table with Haskell objects as keys, but you want to use
-- pointer equality for comparison; maybe because the keys are large
-- and hashing would be slow, or perhaps because the keys are infinite
-- in size. We can\'t build a hash table using the address of the
-- object as the key, because objects get moved around by the garbage
-- collector, meaning a re-hash would be necessary after every garbage
-- collection.
--
-------------------------------------------------------------------------------
module GHC.StableName (
-- * Stable Names
StableName (..),
makeStableName,
hashStableName,
eqStableName
) where
import GHC.IO ( IO(..) )
import GHC.Base ( Int(..), StableName#, makeStableName#
, eqStableName#, stableNameToInt# )
-----------------------------------------------------------------------------
-- Stable Names
{-|
An abstract name for an object, that supports equality and hashing.
Stable names have the following property:
* If @sn1 :: StableName@ and @sn2 :: StableName@ and @sn1 == sn2@
then @sn1@ and @sn2@ were created by calls to @makeStableName@ on
the same object.
The reverse is not necessarily true: if two stable names are not
equal, then the objects they name may still be equal. Note in particular
that `makeStableName` may return a different `StableName` after an
object is evaluated.
Stable Names are similar to Stable Pointers ("Foreign.StablePtr"),
but differ in the following ways:
* There is no @freeStableName@ operation, unlike "Foreign.StablePtr"s.
Stable names are reclaimed by the runtime system when they are no
longer needed.
* There is no @deRefStableName@ operation. You can\'t get back from
a stable name to the original Haskell object. The reason for
this is that the existence of a stable name for an object does not
guarantee the existence of the object itself; it can still be garbage
collected.
-}
data StableName a = StableName (StableName# a)
-- | Makes a 'StableName' for an arbitrary object. The object passed as
-- the first argument is not evaluated by 'makeStableName'.
makeStableName :: a -> IO (StableName a)
makeStableName a = IO $ \ s ->
case makeStableName# a s of (# s', sn #) -> (# s', StableName sn #)
-- | Convert a 'StableName' to an 'Int'. The 'Int' returned is not
-- necessarily unique; several 'StableName's may map to the same 'Int'
-- (in practice however, the chances of this are small, so the result
-- of 'hashStableName' makes a good hash key).
hashStableName :: StableName a -> Int
hashStableName (StableName sn) = I# (stableNameToInt# sn)
-- | @since 2.01
instance Eq (StableName a) where
(StableName sn1) == (StableName sn2) =
case eqStableName# sn1 sn2 of
0# -> False
_ -> True
-- | Equality on 'StableName' that does not require that the types of
-- the arguments match.
--
-- @since 4.7.0.0
eqStableName :: StableName a -> StableName b -> Bool
eqStableName (StableName sn1) (StableName sn2) =
case eqStableName# sn1 sn2 of
0# -> False
_ -> True
-- Requested by Emil Axelsson on glasgow-haskell-users, who wants to
-- use it for implementing observable sharing.
| sdiehl/ghc | libraries/base/GHC/StableName.hs | bsd-3-clause | 3,909 | 0 | 11 | 786 | 342 | 202 | 140 | 28 | 2 |
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
-- | Defines datatype for information about source element positions.
module Data.Position.BasicPosition(
BasicPosition(..),
span,
) where
import Control.Monad.Positions.Class
import Data.Hashable
import Data.Semigroup
import Prelude hiding (span)
import Text.Format hiding (line)
import Text.XML.Expat.Pickle
import Text.XML.Expat.Tree
import qualified Data.ByteString as Strict
import qualified Data.ByteString.UTF8 as Strict
import qualified Data.Position as Position
data BasicPosition =
-- | A span in a source file.
Span {
-- | The starting point.
spanStart :: !Position.Point,
-- | The starting point.
spanEnd :: !Position.Point
}
-- | A specific line and column in a source file.
| Point {
-- | The position.
pointPos :: !Position.Point
}
-- | A position representing a whole file.
| File {
-- | The name of the source file.
fileName :: !Position.Filename
}
-- | A synthetic position, generated internally by a compiler.
| Synthetic {
-- | A description of the origin of this position.
synthDesc :: !Strict.ByteString
}
-- | A command-line option.
| CmdLine
deriving (Ord, Eq)
span :: Position.Point -> Position.Point -> BasicPosition
span start end
| start == end = Point { pointPos = start }
| otherwise = Span { spanStart = start, spanEnd = end }
instance Position.PositionInfo BasicPosition where
location Span { spanStart = startpos, spanEnd = endpos } =
do
Position.PointInfo { Position.pointFile = fname } <- pointInfo startpos
return (Just (fname, Just (startpos, endpos)))
location Point { pointPos = pos } =
do
Position.PointInfo { Position.pointFile = fname } <- pointInfo pos
return (Just (fname, Just (pos, pos)))
location File { fileName = fname } = return (Just (fname, Nothing))
location _ = return Nothing
description Synthetic { synthDesc = desc } = desc
description CmdLine = Strict.fromString "from command line"
description _ = Strict.empty
children _ = Nothing
showContext _ = True
instance Position.Position BasicPosition BasicPosition where
positionInfo pos = [pos]
instance Semigroup BasicPosition where
Point { pointPos = pos1 } <> Point { pointPos = pos2 } =
Span { spanStart = pos1, spanEnd = pos2 }
Span { spanStart = pos1 } <> Point { pointPos = pos2 } =
Span { spanStart = pos1, spanEnd = pos2 }
Point { pointPos = pos1 } <> Span { spanEnd = pos2 } =
Span { spanStart = pos1, spanEnd = pos2 }
Span { spanStart = pos1 } <> Span { spanEnd = pos2 } =
Span { spanStart = pos1, spanEnd = pos2 }
CmdLine <> CmdLine = CmdLine
_ <> _ = error $! "Cannot combine positions"
instance Hashable BasicPosition where
hashWithSalt s Span { spanStart = start, spanEnd = end } =
s `hashWithSalt` (0 :: Word) `hashWithSalt` start `hashWithSalt` end
hashWithSalt s Point { pointPos = pos } =
s `hashWithSalt` (1 :: Word) `hashWithSalt` pos
hashWithSalt s File { fileName = fname } =
s `hashWithSalt` (2 :: Word) `hashWithSalt` fname
hashWithSalt s Synthetic { synthDesc = desc } =
s `hashWithSalt` (3 :: Word) `hashWithSalt` desc
hashWithSalt s CmdLine = s `hashWithSalt` (4 :: Int)
instance (MonadPositions m) => FormatM m BasicPosition where
formatM Span { spanStart = startpos, spanEnd = endpos } =
do
Position.PointInfo { Position.pointLine = startline,
Position.pointColumn = startcol,
Position.pointFile = fname } <- pointInfo startpos
Position.PointInfo { Position.pointLine = endline,
Position.pointColumn = endcol } <- pointInfo endpos
Position.FileInfo { Position.fileInfoName = fstr } <- fileInfo fname
if startline == endline
then return (hcat [bytestring fstr, colon, format startline, dot,
format startcol, char '-', format endcol])
else return (hcat [bytestring fstr, colon,
format startline, dot, format startcol, char '-',
format endline, dot, format endcol])
formatM Point { pointPos = pos } =
do
Position.PointInfo { Position.pointLine = line,
Position.pointColumn = col,
Position.pointFile = fname } <- pointInfo pos
Position.FileInfo { Position.fileInfoName = fstr } <- fileInfo fname
return (hcat [bytestring fstr, colon, format line, dot, format col])
formatM File { fileName = fname } =
do
Position.FileInfo { Position.fileInfoName = fstr } <- fileInfo fname
return (bytestring fstr)
formatM CmdLine = return (string "command line")
formatM Synthetic { synthDesc = desc } = return (bytestring desc)
spanPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] BasicPosition
spanPickler =
let
fwdfunc (start, end) =
Span { spanStart = start, spanEnd = end }
revfunc Span { spanStart = start, spanEnd = end } =
(start, end)
revfunc _ = error $! "Can't convert to Span"
in
xpWrap (fwdfunc, revfunc)
(xpElemNodes (gxFromString "Span")
(xpPair (xpElemAttrs (gxFromString "start") xpickle)
(xpElemAttrs (gxFromString "end") xpickle)))
pointPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] BasicPosition
pointPickler =
let
revfunc Point { pointPos = pos } = pos
revfunc _ = error $! "Can't convert to Point"
in
xpWrap (Point, revfunc) (xpElemAttrs (gxFromString "Point") xpickle)
filePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] BasicPosition
filePickler =
let
revfunc File { fileName = fname } = fname
revfunc _ = error $! "Can't convert to File"
in
xpWrap (File, revfunc) (xpElemAttrs (gxFromString "File") xpickle)
syntheticPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] BasicPosition
syntheticPickler =
let
revfunc Synthetic { synthDesc = desc } = gxFromByteString desc
revfunc _ = error $! "Can't convert to Synthetic"
in
xpWrap (Synthetic . gxToByteString, revfunc)
(xpElemNodes (gxFromString "Synthetic") (xpContent xpText))
cmdLinePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] BasicPosition
cmdLinePickler =
let
revfunc CmdLine = ()
revfunc _ = error $! "Can't convert to CmdArg"
in
xpWrap (const CmdLine, revfunc)
(xpElemNodes (gxFromString "CmdLine") xpUnit)
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler [NodeG [] tag text] BasicPosition where
xpickle =
let
picker Span {} = 0
picker Point {} = 1
picker File {} = 2
picker Synthetic {} = 3
picker CmdLine {} = 4
in
xpAlt picker [spanPickler, pointPickler, filePickler,
syntheticPickler, cmdLinePickler]
| emc2/compiler-misc | src/Data/Position/BasicPosition.hs | bsd-3-clause | 8,991 | 0 | 15 | 2,257 | 2,211 | 1,199 | 1,012 | 167 | 2 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes, OverloadedStrings #-}
module Handler.Faq
( getFaqR
) where
import Import
data Faq = Faq
{ hash :: String
, question :: String
, answer :: Html
}
faqs :: [Faq]
faqs =
[ Faq "purpose" "What's the purpose of this site?" [shamlet|\
<p>Haskell has a vibrant, talented community of very capable programmers. This site aims to be the meeting point for these developers. By centralizing, we hope to make it easier for employers to find people to fill positions, and thus give Haskell a lower entrance cost into industry.
|]
, Faq "just-professionals" "I'm just a Haskell hobbyist. Does that mean this site isn't for me?" [shamlet|\
<p>While the main purpose of the site is for professionals and industry, there's no reason hobbyists shouldn't join in as well. The secondary mission of this site is to provide social networking. As the site is still young, it's not clear what features will be implemented, but this site will be a great resource for any Haskell programmer.
|]
, Faq "openid" "How do I create an account? What's OpenID?" [shamlet|\
<p>Instead of creating a brand new username/password on Haskellers, you can simply log in with OpenID. Most people out there already have an OpenID: Google, Yahoo!, AOL, Wordpress and many others provide them. If you have a Google or Yahoo! account, just click on the appropriate logo and you will be asked to log in automatically. We also support Facebook logins.
<p>Don't worry, we only use this information to authenticate you. We do not request any personal information from your OpenID provider, nor do we ever see your password. The only information Haskellers gets is what you provide us explicitly.
|]
, Faq "report" "What does reporting a user do, and when should I use it?" [shamlet|\
<p>Reporting a user sends a message to the site administrators that a user has been reported. It will also tell us who did the reporting if you are logged in. This is a simple way for you to let us know that there is something inappropriate on a user page. Examples of inappropriate content are:
<ul>
<li>Inappropriate profile picture. Comic images and other drawings are allowed, though you are encouraged to use an actual photo.
<li>Inappropriate language. No profanity of any kind is welcome on this site.
<li>Spam links. Each user is allowed to post one link to a website: this link should be describing them. The link need not directly relate to Haskell, but linking to information on pharmaceuticals is clearly not allowed.
<li>A profile clearly not belonging to a Haskell user. We have a very liberal definition of a Haskell user here: even someone who's never written a line of Haskell code but has read about the language is welcome. But beyond that, you don't really have a place on this site, and are simply adding noise to the signal.
|]
, Faq "add-feature" "I see that you have not implemented feature XYZ. Are you going to?" [shamlet|\
<p>
\This site is still in its infancy, so just because we haven't implemented a feature does not mean we won't. If you have a recommendation, feel free to either email the haskell-cafe mailing list or contact
<a href="http://www.haskellers.com/user/16/">Michael Snoyman
\ directly.
|]
]
getFaqR :: Handler Html
getFaqR = defaultLayout $ do
setTitle "Haskellers Frequently Asked Questions"
$(widgetFile "faq")
| danse/haskellers | Handler/Faq.hs | bsd-2-clause | 3,426 | 0 | 10 | 648 | 158 | 95 | 63 | 19 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Повторное тестирование надстройки </title>
<maps>
<homeID>retest</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Содержание</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Избранное</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/retest/src/main/javahelp/org/zaproxy/addon/retest/resources/help_ru_RU/helpset_ru_RU.hs | apache-2.0 | 1,045 | 77 | 66 | 158 | 521 | 261 | 260 | -1 | -1 |
module A2 where
import Control.Parallel.Strategies (rpar, runEval)
fib 1 = 1
fib n
= (n1_2 + n2) + 1
where
n1 = fib (n - 1)
n2 = fib (n - 2)
n1_2
=
runEval
(do n1_2 <- rpar n1
return n1_2)
| RefactoringTools/HaRe | old/testing/evalMonad/A2AST.hs | bsd-3-clause | 278 | 0 | 12 | 131 | 104 | 55 | 49 | 12 | 1 |
{-# LANGUAGE FlexibleInstances, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-full-laziness #-}
-- cabal install judy
import Control.Monad (forM_)
import Criterion.Config
import Criterion.Main
import Criterion.Types
import qualified Data.IntMap as I
import qualified Data.Judy as J
import qualified Data.Map as M
import qualified Data.IntMap as I
import Data.List (foldl')
-- An example of how to specify a configuration value.
myConfig = defaultConfig { cfgPerformGC = ljust True }
main = defaultMainWith myConfig (return ()) [
bgroup "judy" [
bench "insert 1M" $ whnf testit 1000000
, bench "insert 10M" $ whnf testit 10000000
, bench "insert 100M" $ whnf testit 100000000
],
bgroup "map" [
bench "insert 100k" $ whnf testmap 100000
, bench "insert 1M" $ whnf testmap 1000000
],
bgroup "intmap" [
bench "insert 100k" $ whnf testintmap 100000
, bench "insert 1M" $ whnf testintmap 1000000
]
]
testit n = do
j <- J.new :: IO (J.JudyL Int)
forM_ [1..n] $ \n -> J.insert n (fromIntegral n :: Int) j
v <- J.lookup 100 j
v `seq` return ()
testmap :: Int -> M.Map Int Int
testmap n =
foldl' (\m k -> M.insert k 1 m) M.empty [0..n]
testintmap :: Int -> I.IntMap Int
testintmap n =
foldl' (\m k -> I.insert k 1 m) I.empty [0..n]
| bgamari/criterion | examples/Judy.hs | bsd-2-clause | 1,479 | 0 | 11 | 466 | 446 | 233 | 213 | 34 | 1 |
import Foreign.C
import Foreign.Marshal.Array
import Foreign.Storable
import Control.Concurrent
-- The test works only on UNIX like.
-- unportable bits:
import qualified System.Posix.Internals as SPI
import qualified System.Posix.Types as SPT
pipe :: IO (CInt, CInt)
pipe = allocaArray 2 $ \fds -> do
throwErrnoIfMinus1_ "pipe" $ SPI.c_pipe fds
rd <- peekElemOff fds 0
wr <- peekElemOff fds 1
return (rd, wr)
main :: IO ()
main = do
(r1, w1) <- pipe
(r2, _w2) <- pipe
_ <- forkIO $ do -- thread A
threadWaitRead (SPT.Fd r1)
_ <- forkIO $ do -- thread B
threadWaitRead (SPT.Fd r2)
yield -- switch to A, then B
-- now both are blocked
_ <- SPI.c_close w1 -- unblocking thread A fd
_ <- SPI.c_close r2 -- breaking thread B fd
yield -- kick RTS IO manager
{-
Trac #10590 exposed a bug as:
T10590: internal error: removeThreadFromDeQueue: not found
(GHC version 7.11.20150702 for x86_64_unknown_linux)
Please report this as a GHC bug: http://www.haskell.org/ghc/reportabug
-}
| ezyang/ghc | testsuite/tests/rts/T10590.hs | bsd-3-clause | 1,092 | 0 | 14 | 281 | 253 | 132 | 121 | 24 | 1 |
module T2412A where
import {-# SOURCE #-} T2412 ( Baz )
type Bar = Baz
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/T2412A.hs | bsd-3-clause | 74 | 0 | 5 | 18 | 19 | 13 | 6 | 3 | 0 |
module Y2021.M04.D22.Solution where
{--
What's the opposite of Kung Fu Fighting?
I think, actually, we're not looking for the opposite of super-cool fighting-
styles, but their dual.
So, what's the dual of Kung Fu Fighting?
That's a much easier question to ask, and to answer.
--}
import Data.Maybe (mapMaybe)
import Y2021.M04.D08.Solution (readMaybe)
data WuShu = ShaolinSoccer
| KungFuHustle
| FruitsBasket
deriving (Eq, Ord, Show, Read)
{--
Here's the problem:
>>> (read "ShaolinSoccer") :: WuShu
ShaolinSoccer
... works find. However, if you recall from a previous exercise
... (Y2021.M04.D14.Exercise) ...
We write out these types in human-readable forms, AND they're in our data-store
in that format, SO, GIVEN THAT, how do we read a value that's been human-
readable-i-tized?
(That's a word now).
--}
dataStore :: [String]
dataStore = ["Shaolin Soccer", "Fruits Basket", "Kung Fu Hustle"]
fetchFightingStyles :: [String] -> [WuShu]
fetchFightingStyles = mapMaybe (readMaybe . concat . words)
{--
>>> fetchFightingStyles dataStore
[ShaolinSoccer,FruitsBasket,KungFuHustle]
--}
{-- BONUS -------------------------------------------------------
How do you handle erroneous values? How would you convert the following and
get as much information out as possible without failing?
--}
dataStoreBonus :: [String]
dataStoreBonus =
dataStore
++ ["Fruits Basket", "Basket o' Fruits", "Shaolin Soccer", "Orange"]
{--
>>> fetchFightingStyles dataStoreBonus
[ShaolinSoccer,FruitsBasket,KungFuHustle,FruitsBasket,ShaolinSoccer]
--}
| geophf/1HaskellADay | exercises/HAD/Y2021/M04/D22/Solution.hs | mit | 1,579 | 0 | 8 | 255 | 152 | 94 | 58 | 15 | 1 |
module Paths where
import System.FilePath
type Hash = String
type BranchName = String
out :: FilePath
out = "site" </> "out"
resultsOf, reportOf, summaryOf, logsOf :: Hash -> FilePath
graphFile :: String -> FilePath
branchSummaryOf, branchMergebaseOf :: BranchName -> FilePath
logsOf hash = "logs" </> hash <.> "log"
resultsOf hash = out </> "results" </> hash <.> "csv"
reportOf hash = out </> "reports" </> hash <.> "json"
summaryOf hash = out </> "summaries" </> hash <.> "json"
graphFile bench = out </> "graphs" </> bench <.> "json"
branchSummaryOf branch = out </> "branches" </> branch <.> "json"
branchMergebaseOf branch = out </> "branches" </> branch <.> "mergebase"
| nomeata/gipeda | src/Paths.hs | mit | 682 | 0 | 7 | 113 | 206 | 113 | 93 | 16 | 1 |
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
import Data.List
main = interact nub
| cbrghostrider/Hacking | HackerRank/FunctionalProgramming/Recursion/stringReductions.hs | mit | 450 | 0 | 5 | 67 | 19 | 12 | 7 | 2 | 1 |
-----------------------------------------------------------------------------
--
-- Module : TypeNum.Test.PosInt
-- Copyright :
-- License : MIT
--
-- Maintainer : -
-- Stability :
-- Portability :
--
-- |
--
module TypeNum.Test.PosInt where
import TypeNum.Test.Common
import TypeNum.Integer.Positive
-----------------------------------------------------------------------------
posIntSpec = describe "TypeNum.Integer.Positive.PosInt" $ do
describe "is comparable at type-level (TypesEq and TypesOrd)" $ do
specify "==" $ correct(B::B( Nat2Positive 2 == PosSucc One ))
&& mistake(B::B( Nat2Positive 2 == One ))
specify ">" $ correct (B::B( Nat2Positive 2 > One ))
specify "<" $ correct (B::B( Nat2Positive 2 < Nat2Positive 3 ))
specify ">=" $ correct (B::B( Nat2Positive 3 >= Nat2Positive 3 ))
specify "<=" $ correct (B::B( Nat2Positive 2 <= Nat2Positive 3 ))
describe "has natural number operations at type-level (TypesNat)" $ do
it "provides type-level sum '(+)'"
$ correct (B::B( Nat2Positive 2 + Nat2Positive 1 == Nat2Positive 3 ))
it "provides type-level multiplication '(*)'"
$ correct (B::B( Nat2Positive 2 * Nat2Positive 3 == Nat2Positive 6 ))
it "provides type-level power '(^)'" $ example pending
describe "has integral number operations at type-level (TypesIntegral)" $ do
it "provides type-level integer division truncated toward zero 'Quot'" $
example pending
it "provides type-level integer division truncated toward negative infinity 'Div'" $
example pending
| fehu/TypeNumerics | test/TypeNum/Test/PosInt.hs | mit | 1,677 | 0 | 18 | 402 | 394 | 189 | 205 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Database.EventSafe.Types
( ResourceRef(..)
, Resource(..)
, EventPool(..)
, EventPoolM(..)
, StorableEvent(..)
) where
import Control.Monad
import Data.List
import qualified Data.ByteString.Lazy as BSL
-- | A type that can refer to a type of event.
--
-- This typeclass is used to identify a resource and filter which events concern this specific resource.
class ResourceRef e ref where
-- ^ A predicate used to know if an event concerns a resource.
concerns :: e -- ^ The event.
-> ref -- ^ The reference to the resource.
-> Bool
-- | A type that can be built from a specific type of event.
class Resource e res where
-- | Build a resource from the first chronological event concerning this resource.
firstEvent :: e -- ^ The event.
-> Maybe res -- ^ The resulting resource if available.
-- | Build a resource from a previous version of this version and an event.
-- This event will be /applied/ to the resource.
applyEvent :: e -- ^ The event.
-> res -- ^ The previous version of the resource (an accumulator).
-> Maybe res -- ^ The resulting resource if available.
-- | Build a resource from a list of events
--
-- The default implementation build a resource from the first event using 'firstEvent'
-- and apply the others with 'applyEvent.
-- Should any of these functions return 'Nothing', the whole process returns 'Nothing'.
buildResource :: [e] -- ^ The events to build the resource from.
-> Maybe res -- ^ The resulting resource if available.
buildResource [] = Nothing
buildResource (fe:es) = foldl' (\mres e -> mres >>= applyEvent e) (firstEvent fe) es
-- | A structure capable of storing events.
class EventPool p e where
-- | A pool containing no events.
emptyPool :: p e
-- | Filter the events concerning a resource (see 'Resource') specified by a reference (see 'ResourceRef').
--
-- 'filterEvent' is used to build a resource from its reference. The list of events is then passed to 'buildResource'. See 'getResource'.
filterEvents :: ResourceRef e ref
=> p e -- ^ The pool of events.
-> ref -- ^ The reference to the resource.
-> [e] -- ^ The events concerning this resource.
-- | Add an event to the pool.
addEvent :: p e -- ^ The pool of events.
-> e -- ^ The event to be added.
-> p e -- ^ A new version of the pool with the additional event.
-- | Get a resource from an 'EventPool'.
--
-- The default implementation uses 'filterEvents' and 'buildResource' in order to
-- get the events concerning a resource and build it.
getResource :: (ResourceRef e ref, Resource e res)
=> p e -- ^ The pool of events.
-> ref -- ^ The reference to the resource to build.
-> Maybe res -- ^ The resulting resource if available.
getResource pool ref = buildResource $ filterEvents pool ref
-- | Same as 'EventPool' but in a monad.
class Monad m => EventPoolM m p e where
-- | See 'emptyPool'.
emptyPoolM :: m (p e)
-- | See 'filterEvents'.
filterEventsM :: ResourceRef e ref => p e -> ref -> m [e]
-- | See 'addEvent'.
addEventM :: p e -> e -> m (p e)
-- | See 'getResource'.
getResourceM :: (ResourceRef e ref, Resource e res) => p e -> ref -> m (Maybe res)
getResourceM pool ref = buildResource `liftM` filterEventsM pool ref
-- | A type of event that can be stored on disc.
class Ord e => StorableEvent e where
-- | Encode an event to a lazy 'ByteString'.
encode :: e -> BSL.ByteString
-- | Try to decode an event from a lazy 'ByteString'.
decode :: BSL.ByteString -> Maybe e
instance Ord e => EventPool [] e where
emptyPool = []
filterEvents pool ref = filter (flip concerns ref) pool
addEvent = flip insert
| thoferon/eventsafe | src/Database/EventSafe/Types.hs | mit | 3,991 | 0 | 12 | 1,054 | 623 | 346 | 277 | 52 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.Octohat.Types ( Member(..)
, MemberWithKey(..)
, Team(..)
, TeamPermission(..)
, Repo(..)
, Organization(..)
, BearerToken(..)
, OrganizationName(..)
, TeamName(..)
, StatusInTeam(..)
, EmptyBody(..)
, DidDelete(..)
, PublicKey(..)
, PublicKeyFingerprint(..)
, TeamCreateRequest(..)
, GitHubReturnStatus(..)
, DidAddKey(..)
, AddPublicKeyRequest(..)
, Links(..)
, Pagination(..)
, runGitHub
, runGitHub'
, GitHub) where
import Control.Applicative
import Control.Monad.Reader (ReaderT(..))
import Control.Monad.State (StateT(..), evalStateT)
import Control.Monad.Trans.Either
import Data.Aeson
import Data.Aeson.TH
import Data.Char (toLower)
import Network.HTTP.Client
import Network.Wreq.Types
import System.Environment.Compat (lookupEnv)
import qualified Data.HashMap.Strict as HS
import qualified Data.Text as T
-- | Represents a user in GitHub. Contains no more than login and user ID
data Member =
Member { memberLogin :: T.Text
, memberId :: Integer
} deriving (Show, Eq)
-- | Represents the different permissions that a team can have in an organisation.
data TeamPermission = OwnerAccess -- ^ Default team of owners.
| PullAccess -- ^ This team will be able to view and clone its
-- repositories.
| PushAccess -- ^ This team will be able to read its
-- repositories, as well as push to them.
| AdminAccess -- ^ This team will be able to push/pull to its
-- repositories, as well as add other
-- collaborators to them.
deriving (Show,Eq)
-- | Represents a team in GitHub. Contains the team's ID, the team's name and an optional description
data Team =
Team { teamId :: Integer
, teamName :: T.Text
, teamDescription :: Maybe T.Text
, teamPermission :: TeamPermission
} deriving (Show, Eq)
-- | Represents a request to create a new team within an organization. The rest of the paramaters
-- are passed in the URL. Refer to <https://developer.github.com/v3/orgs/teams/#create-team>
data TeamCreateRequest =
TeamCreateRequest { newTeamName :: T.Text
, newTeamDescription :: T.Text
, newTeamPermission :: TeamPermission
} deriving (Show, Eq)
-- | Represents an organisation in GitHub. Only has name and description
data Organization =
Organization
{ orgLogin :: T.Text
, orgDescription :: Maybe T.Text
} deriving (Show, Eq)
-- | Represents a repo in GitHub. Contains the Name, Description, and Private status
data Repo =
Repo { repoName :: T.Text
, repoDescription :: Maybe T.Text
, repoPrivate :: Bool
} deriving (Show, Eq)
-- | Represents a GitHub user with its public keys and fingerprints. A GitHub user might or might not
-- have any public keys
data MemberWithKey =
MemberWithKey { member :: Member
, memberKey :: [PublicKey]
, memberKeyFingerprint :: [PublicKeyFingerprint]
} deriving (Show, Eq)
-- | Represents a PublicKey within GitHub. It includes its ID and the public key encoded as base 64
data PublicKey =
PublicKey { publicKeyId :: Integer
, publicKey :: T.Text
} deriving (Show, Eq)
-- | Represents a Fingerprint. The `fingerprintId` field should match the fingerprint's public key ID
-- within GitHub
data PublicKeyFingerprint =
PublicKeyFingerprint { fingerprintId :: Integer
, publicKeyFingerprint :: T.Text
} deriving (Show, Eq)
-- | Some Wreq functions expect a body, but often GitHub's API will request no body. The PUT verb
-- and its implementation in Wreq is an example of this.
data EmptyBody = EmptyBody deriving (Show, Eq)
-- | When adding a user to a team GitHub will add it immediately if the user already belongs to the
-- to the organization the team is in. Otherwise it will send an email for the user to accept the
-- request to join the team. Functions related adding or removing teams will return either Active
-- or Pending correspondingly.
data StatusInTeam = Active | Pending deriving (Show, Eq)
-- | Sum type to represent the success or failure of deletion of a resource within GitHub's API
data DidDelete = Deleted | NotDeleted deriving (Show, Eq)
instance FromJSON PublicKey where
parseJSON (Object o) = PublicKey <$> o .: "id" <*> o .: "key"
parseJSON _ = fail "Could not find public keys in document"
data DidAddKey = KeyAdded | KeyNotAdded
data AddPublicKeyRequest =
AddPublicKeyRequest {
addPublicKeyRequestKey :: T.Text,
addPublicKeyRequestTitle :: T.Text
}
instance FromJSON StatusInTeam where
parseJSON (Object o) =
case HS.lookup "state" o of
Just "active" -> pure Active
Just "pending" -> pure Pending
Just _ -> fail "\"state\" key not \"active\" or \"pending\""
Nothing -> (fail . maybe "No error message from GitHub" show) (HS.lookup "message" o)
parseJSON _ = fail "Expected a membership document, got something else"
instance FromJSON TeamPermission where
parseJSON (String p) =
case p of
"pull" -> pure PullAccess
"push" -> pure PushAccess
"admin" -> pure AdminAccess
"owner" -> pure OwnerAccess
_ -> fail "Expected a valid team permission ?"
parseJSON _ = fail "Expected a team permssion, got something else"
instance ToJSON TeamPermission where
toJSON p =
case p of
PullAccess -> String "pull"
PushAccess -> String "push"
AdminAccess -> String "admin"
OwnerAccess -> String "owner"
$(deriveJSON defaultOptions { fieldLabelModifier = drop 6 . map toLower } ''Member)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 4 . map toLower } ''Team)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 4 . map toLower } ''Repo)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 3 . map toLower } ''Organization)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 7 . map toLower } ''TeamCreateRequest)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 19 . map toLower } ''AddPublicKeyRequest)
-- | Error codes GitHub might return when attempting to use an API endpoint
data GitHubReturnStatus = InvalidJSON -- ^ GitHub could not parse the JSON document sent
| ValidationFailed -- ^ Validation failed, an example of this error
-- is trying to create teams with the same name
-- within one organization
| InternalError -- ^ In case GitHub returns 500 Internal Server Error
-- to some request
| NotFound -- ^ When a resource has not been found. It does not
-- imply the resource does not exist
| NotAllowed -- ^ Usually returned after GitHub replies with 403 Forbidden.
-- The user might not have permission to access/modify
-- that resource
| AllOk -- ^ This should never be returned
| RequiresAuthentication -- ^ Accesing this resource requires authentication
| UnexpectedJSON String -- ^ This library has failed to fulfill its purpose and could not
-- handle GitHub's response
deriving (Show, Eq)
-- | Instance that does not add anything to the body or headers of a PUT request
instance Putable EmptyBody where
putPayload EmptyBody req = return $ req {requestBody = RequestBodyLBS ""}
instance Postable TeamCreateRequest where
postPayload createRequest req = return $ req { requestBody = RequestBodyLBS (encode createRequest)}
instance Postable AddPublicKeyRequest where
postPayload createRequest req = return $ req { requestBody = RequestBodyLBS (encode createRequest)}
-- | GitHub's OAuth 2.0 bearer token. This is simply added in an
-- Authorization header
newtype BearerToken = BearerToken { unBearerToken :: T.Text } deriving Show
-- | OrganizationName is added in order to have type safety in functions where the
-- Organization name and the Team name are both strings and may be confused
newtype OrganizationName = OrganizationName { unOrganizationName :: T.Text } deriving Show
-- | TeamName is added in order to have type safety in functions where the
-- Team name and the Organization name are both strings and may be confused
newtype TeamName = TeamName { unTeamName :: T.Text } deriving Show
-- | Links are used in the Pagination object
data Links = Links { linkNext :: Maybe Link, linkLast :: Maybe Link
, linkFirst :: Maybe Link, linkPrev :: Maybe Link } deriving Show
-- | Pagination options that can be set, including the page number, and the per_page
data Pagination = Pagination { perPage :: Int, page :: Int, links :: Links, recurse :: Bool } deriving Show
defPagination :: Pagination
defPagination = Pagination 30 1 (Links Nothing Nothing Nothing Nothing) True
-- | The monad transformer where all operations run. Supports initial configuration
-- through a Reader monad and the possibility of failure through Either
type GitHub = EitherT GitHubReturnStatus (ReaderT BearerToken (StateT Pagination IO))
-- | Executes a computation built within the GitHub monad returning an Either within
-- the IO data type using the provided token
runGitHub' :: GitHub a -> BearerToken -> IO (Either GitHubReturnStatus a)
runGitHub' comp token = evalStateT (runReaderT (runEitherT comp) token) defPagination
-- | Executes a computation built within the GitHub monad returning an Either within
-- the IO data type. Reads an API token from an environment variable named GITHUB_TOKEN
runGitHub :: GitHub a -> IO (Either GitHubReturnStatus a)
runGitHub comp = do
maybeToken <- lookupEnv "GITHUB_TOKEN"
case maybeToken of
Just acquiredToken -> runGitHub' comp (BearerToken $ T.pack acquiredToken)
Nothing -> fail "Couldn't find GITHUB_TOKEN in environment"
| finlay/octohat | src/Network/Octohat/Types.hs | mit | 11,378 | 0 | 14 | 3,673 | 1,746 | 996 | 750 | 153 | 2 |
{-
Author: Pranav Vishnu Ramabhadran
-}
{-# LANGUAGE OverloadedStrings, DeriveGeneric #-}
module Responses where
import qualified Data.ByteString as BS
import Data.Text (Text, unpack, split, pack, dropAround, toLower, words)
import Data.Aeson
import GHC.Generics
import Data.List
import qualified Data.Trie as T
import qualified Codec.Binary.UTF8.String as C
import Test.HUnit
-- | Type for tweets. Use only the fields you are interested in.
-- The parser will filter them. To see a list of available fields
-- see <https://dev.twitter.com/docs/platform-objects/tweets>.
data Tweet =
Tweet { text :: !Text,
id :: Int
} deriving (Show, Generic)
instance FromJSON Tweet
instance ToJSON Tweet
-- ||| Functions related to Tries and building responses.
-- | Removes spaces and punctuation from around the words and converts to lowercase
cleanString :: Text -> Text
cleanString = toLower . dropAround (==' ') . dropAround (== '?') . dropAround (== ',') . dropAround (== '.') . dropAround (== '!')
-- | Keeps adding elements to Trie until there are no more.
addToTrie :: [String] -> T.Trie (String)
addToTrie inp =
(go inp T.empty)
where
go :: [String] -> T.Trie (String) -> T.Trie (String)
go [] trie = trie
go (x:xs) t =
let
new1 :: [String]
new1 = map (unpack . cleanString) $ split (==',') (pack x) in
let
temp :: BS.ByteString
temp = BS.pack $ C.encode $ new1 !! 0 in
let
temp2 :: String
temp2 = new1 !! 1 in
let
new2 :: (BS.ByteString, String)
new2 = (temp, temp2) in
go xs (T.insert temp temp2 t)
-- | Iterates through a list of words until it sees a known topic.
searchForTopic :: T.Trie(String) -> [String] -> Maybe String
searchForTopic topics tweet
| T.null topics = Nothing
| otherwise =
go topics tweet
where
go :: T.Trie(String) -> [String] -> Maybe String
go _ [] = Nothing
go tpcs (x:xs) =
case (T.lookup (BS.pack $ C.encode x) tpcs) of
Just a -> Just a
Nothing -> go tpcs xs
-- | Creates an emotion score for a given tweet
scoreEmotions :: T.Trie(String) -> [String] -> Int
scoreEmotions emotions tweet
| T.null emotions = 0
| otherwise =
go emotions tweet 0
where
go _ [] score = score
go emts (x:xs) scr =
case (T.lookup (BS.pack $ C.encode x) emts) of
Just a -> go emts xs (scr + (read a))
Nothing -> go emts xs scr
-- | Builds a Trie from file for topics
buildTopics :: IO (T.Trie(String))
buildTopics = do
file <- readFile "topics.txt"
return $ addToTrie (lines file)
-- | Builds a Trie from file for responses
buildResponses :: IO (T.Trie(String))
buildResponses = do
file <- readFile "responses.txt"
return $ addToTrie (lines file)
-- | Builds a Trie from file for emotion score
buildEmotions :: IO (T.Trie(String))
buildEmotions = do
file <- readFile "emotions.txt"
return $ addToTrie (lines file)
-- ||| Functions related to actually taking the tweet and creating a response.
-- | Gets just the words as Strings from a Tweet. Gets rid of empty strings
-- and extraneous symbols as well as converts everything to lowercase.
getWords :: Tweet -> [String]
getWords twt = filter (/="") $
map (unpack . cleanString) (Data.Text.words (text twt))
-- | Create combinations of words that occur together
getText :: [String] -> [String]
getText [] = []
getText [x] = [x]
getText (x:y:xs) = x:(x ++ " " ++ y):(getText (y:xs))
-- | Functions that take in topic info and repond with with appropriate levels
-- of enthusiasm.
responseHappy1 :: String -> String
responseHappy1 text = "Ehhh, " ++ text ++ " is okay I guess #whatever"
responseHappy2 :: String -> String
responseHappy2 text = "Whoooo, " ++ text ++ " is so great! #killinit"
responseHappy3 :: String -> String
responseHappy3 text = "OMG, " ++ text ++ " is literally the best ever #besthingever"
responseSad1 :: String -> String
responseSad1 text = "Uhhh, " ++ text ++ " is whatever #whocares"
responseSad2 :: String -> String
responseSad2 text = "Ugh, " ++ text ++ " is kinda bad #neveragain"
responseSad3 :: String -> String
responseSad3 text = "Just no. " ++ text ++ " was absolutely the worst thing to happen to humanity"
-- | The function that ties everything together. We take in a tweet and spit out
-- an appropriate response.
respond :: Tweet -> IO(String)
respond tweet = do
topics <- buildTopics
responses <- buildResponses
emotions <- buildEmotions
text <- return $ getText $ getWords tweet
emo <- return $ scoreEmotions emotions text
case (searchForTopic topics text) of
Nothing -> return "Sadly, I have no idea what you're talking about"
Just a -> do
case (searchForTopic responses [a]) of
Nothing -> return "Sadly, I have no idea what you're talking about"
Just r ->
if (emo >= 4) then return $ responseHappy3 r
else if (emo >= 2) then return $ responseHappy2 r
else if (emo >= 0) then return $ responseHappy1 r
else if (emo >= -2) then return $ responseSad1 r
else if (emo >= -4) then return $ responseSad2 r
else return $ responseSad3 r
| pvrnav/haskell-twitter-bot | Responses.hs | mit | 5,123 | 36 | 22 | 1,137 | 1,556 | 813 | 743 | 110 | 8 |
doubleMe x = x + x
doubleUs x y = doubleMe x + doubleMe y
doubleSmallNumber x = if x > 100 then x else x*2
noneArg = "None Arg" | williamHuang5468/LearningHaskell | CH2 Ready Go/FirstFunction.hs | mit | 129 | 0 | 6 | 31 | 62 | 31 | 31 | 4 | 2 |
module Handler.Sitemap (getSitemapR) where
import Import
import Yesod.Sitemap
--import Stackage.Database
getSitemapR :: Handler TypedContent
getSitemapR = track "Handler.Sitemap.getSitemapR" $ sitemap $ do
cacheSeconds $ 60 * 60 * 6
priority 1.0 $ HomeR
priority 0.9 $ OldSnapshotBranchR LtsBranch []
priority 0.8 $ OldSnapshotBranchR NightlyBranch []
priority 0.7 $ AllSnapshotsR
priority 0.7 $ PackageListR
priority 0.6 $ AuthorsR
priority 0.6 $ InstallR
priority 0.6 $ OlderReleasesR
{- FIXME
runDBSource $ do
--selectAll $= ltsSitemaps
return () $= snapshotSitemaps -- FIXME
return () $= packageMetadataSitemaps -- FIXME
selectAll :: (PersistEntity val, PersistEntityBackend val ~ YesodPersistBackend App)
=> Source (YesodDB App) val
selectAll = selectSource [] [] $= CL.map entityVal
clNub :: (Monad m, Eq a) => Conduit a m a
clNub = evalStateC [] $ awaitForever $ \a -> do
seen <- State.get
unless (a `elem` seen) $ do
State.put (a:seen)
yield a
ltsSitemaps :: SitemapFor Lts
ltsSitemaps = sequenceConduits [ltsMajorSitemap, ltsSitemap] >> return ()
ltsMajorSitemap :: SitemapFor Lts
ltsMajorSitemap = CL.map ltsMajor =$= clNub =$= awaitForever go
where
go ver = priority 0.55 $ LtsR [pack (show ver)]
ltsSitemap :: SitemapFor Lts
ltsSitemap = awaitForever go
where
show' = pack . show
go lts = url $ LtsR [slug]
where
slug = show' (ltsMajor lts) <> "." <> show' (ltsMinor lts)
-}
{-
snapshotSitemaps :: SitemapFor Snapshot
snapshotSitemaps = awaitForever go
where
go s = do
url' StackageHomeR
url' StackageCabalConfigR
url' StackageIndexR
url' SnapshotPackagesR
url' DocsR
url' HoogleR
where
url' = url . SnapshotR (snapshotName s)
packageMetadataSitemaps :: SitemapFor Package
packageMetadataSitemaps = awaitForever go
where
go m = do
url' PackageR
url' PackageSnapshotsR
where
url' floc = url $ floc $ PackageNameP $ packageName m
url :: Route App -> Sitemap
url loc = yield SitemapUrl
{ sitemapLoc = loc
, sitemapLastMod = Nothing
, sitemapChangeFreq = Nothing
, sitemapPriority = Nothing
}
-}
priority :: Monad m => Double -> Route App -> ConduitT i (SitemapUrl (Route App)) m ()
priority p loc = yield SitemapUrl
{ sitemapLoc = loc
, sitemapLastMod = Nothing
, sitemapChangeFreq = Nothing
, sitemapPriority = Just p
}
| fpco/stackage-server | src/Handler/Sitemap.hs | mit | 2,511 | 0 | 12 | 625 | 243 | 118 | 125 | 20 | 1 |
isPalindrome :: (Eq a) => [a] -> Bool
isPalindrome xs = xs == (reverse xs)
main :: IO ()
main = do
let last = isPalindrome [1, 2, 2, 1]
print last
| zeyuanxy/haskell-playground | ninety-nine-haskell-problems/vol1/06.hs | mit | 156 | 0 | 11 | 41 | 85 | 44 | 41 | 6 | 1 |
{-# LANGUAGE Safe #-}
{- |
This module re-exports the routing and controller modules.
See each module for their corresponding documentation.
Though you can implement a controller using the methods supplied by
this module (actually, "Hails.Web.Router"), we recommend using the
DSLs provided by "Hails.Web.Frank" or "Hails.Web.REST".
-}
module Hails.Web (
module Hails.Web.Router
, module Hails.Web.Responses
, module Hails.Web.Controller
, module Hails.Web.User
) where
import Hails.Web.Router
import Hails.Web.Responses
import Hails.Web.Controller
import Hails.Web.User
| scslab/hails | Hails/Web.hs | mit | 588 | 0 | 5 | 89 | 62 | 43 | 19 | 10 | 0 |
module Timestamp
(Timestamp)
where
import Data.Ratio
import Data.Word
data Timestamp = Timestamp Word64
instance Show Timestamp where
show (Timestamp a) = show a
instance Eq Timestamp where
(==) (Timestamp a) (Timestamp b) = (==) a b
instance Ord Timestamp where
compare (Timestamp a) (Timestamp b) = compare a b
instance Num Timestamp where
(+) (Timestamp a) (Timestamp b) = Timestamp (a + b)
(*) (Timestamp a) (Timestamp b) = Timestamp (a * b)
(-) (Timestamp a) (Timestamp b) = Timestamp (a - b)
negate (Timestamp a) = Timestamp (negate a)
abs (Timestamp a) = Timestamp (abs a)
signum (Timestamp a) = Timestamp (signum a)
fromInteger a = Timestamp $ fromInteger a
instance Real Timestamp where
toRational (Timestamp a) = fromIntegral a % 0
instance Enum Timestamp where
succ (Timestamp a) = Timestamp (succ a)
pred (Timestamp a) = Timestamp (pred a)
toEnum a = Timestamp $ fromIntegral a
fromEnum (Timestamp a) = fromIntegral a
enumFrom (Timestamp a) = map Timestamp (enumFrom a)
enumFromThen (Timestamp a) (Timestamp b) = map Timestamp (enumFromThen a b)
enumFromTo (Timestamp a) (Timestamp b) = map Timestamp (enumFromTo a b)
enumFromThenTo (Timestamp a) (Timestamp b) (Timestamp c) =
map Timestamp (enumFromThenTo a b c)
instance Integral Timestamp where
toInteger (Timestamp a) = fromIntegral a
quotRem (Timestamp a) (Timestamp b) =
let (c, d) = quotRem a b
in (Timestamp c, Timestamp d)
| IreneKnapp/ozweb | Haskell/Timestamp.hs | mit | 1,455 | 0 | 10 | 287 | 658 | 328 | 330 | 36 | 0 |
module Euler006 (euler6) where
euler6 :: Int
euler6 = (sumn 100) ^ 2 - sum (map (^2) [1..100])
sumn :: Int -> Int
sumn x = (x * (x + 1)) `div` 2 | TrustNoOne/Euler | haskell/src/Euler006.hs | mit | 147 | 0 | 9 | 35 | 90 | 51 | 39 | 5 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Language.Vigil.Compile where
import Language.Common.Annotation
import Language.Common.Misc ( unFix )
import Language.GoLite.Types ( stringFromSymbol )
import Language.Vigil.Syntax as T
import Language.Vigil.Syntax.TyAnn
import Language.Vigil.Syntax.Basic
import Language.Vigil.Types
import Language.X86.Core
import Language.X86.Mangling ( mangleFuncName )
import Language.X86.Virtual
import Language.X86.Virtual.Registers
import Control.Monad.Reader
import Control.Monad.State
import Data.Functor.Foldable ( cata )
import qualified Data.Map as M
import Data.List ( foldl' )
import Debug.Trace ( trace )
newtype Compiler label a
= Compiler
{ unCompiler
:: ReaderT (CompilerEnv label) (
VirtualRegisterAllocatorT (VirtualAsm label)
) a
}
deriving
( Functor
, Applicative
, Monad
, MonadReader (CompilerEnv label)
)
instance MonadVirtualRegisterAllocator (Compiler label) where
freshVirtualRegister ram rs
= Compiler $ lift $ freshVirtualRegister ram rs
-- | The read-only environment of the function compiler.
data CompilerEnv label
= CompilerEnv
{ _identMap :: M.Map GlobalId (RegisterAccessMode, VirtualOperand label)
-- ^ Precomputed map that assigns each GlobalId used by the function to
-- a virtual operand that represents that data.
}
deriving (Eq, Ord, Read, Show)
-- | Looks up how to access the given data.
lookupIdent
:: GlobalId
-> (String -> Directness String)
-> Compiler label (RegisterAccessMode, VirtualOperand label)
lookupIdent gid dir = do
imap <- asks _identMap
pure $ case M.lookup gid imap of
-- if it's not in the ident map for the function, then we need to
-- generate a reference outside the function.
Nothing -> case unFix $ gidTy gid of
-- if it's a builtin, then we have to create an external reference,
-- which will eventually be linked to the runtime.
BuiltinType _ ->
( IntegerMode
, External . Direct . stringFromSymbol $ gidOrigName gid
)
-- anything else is something on the top level, so we generate an
-- internal reference
FuncType {} ->
( IntegerMode
, Internal $ Direct $ stringFromSymbol $ gidOrigName gid
)
_ ->
( IntegerMode
, Internal $ dir $ stringFromSymbol $ gidOrigName gid
)
Just o -> o
-- | Emit raw assembly.
asm :: VirtualAsm label a -> Compiler label a
asm = Compiler . lift . lift
-- | Compiles a function.
runCompiler :: TyAnnFunDecl -> VirtualAsm label ()
runCompiler decl
= runVirtualRegisterAllocatorT $ do
env <- makeCompilerEnv decl
runReaderT (unCompiler (compileFunction decl)) env
data CompilerEnvAllocState label
= CompilerEnvAllocState
{ stkOffset :: Displacement
, intregs :: [IntegerRegister]
, floatregs :: [Int]
, identMap :: M.Map GlobalId (RegisterAccessMode, VirtualOperand label)
}
deriving (Eq, Ord, Read, Show)
makeCompilerEnv
:: forall label. TyAnnFunDecl
-> VirtualRegisterAllocatorT (VirtualAsm label) (CompilerEnv label)
makeCompilerEnv (FunDecl { _funDeclArgs = args, _funDeclVars = vars })
= CompilerEnv <$> is where
is :: VirtualRegisterAllocatorT
(VirtualAsm label)
(M.Map GlobalId (RegisterAccessMode, VirtualOperand label))
is = identMap <$> execStateT go initial
go :: StateT
(CompilerEnvAllocState label)
(VirtualRegisterAllocatorT (VirtualAsm label))
()
go = do
forM_ vars $ \(VarDecl gid) -> do
v <- case unFix $ gidTy gid of
FloatType _ -> (FloatingMode,) . Register . Direct
<$> lift (freshVirtualRegister FloatingMode Extended64)
_ -> (IntegerMode,) . Register . Direct
<$> lift (freshVirtualRegister IntegerMode Extended64)
record gid v
forM_ args $ \(VarDecl gid) -> do
case unFix $ gidTy gid of
FloatType _ -> paramRegAssign nextfreg FloatingMode xmm' gid
_ -> paramRegAssign nextireg IntegerMode rXx gid
paramRegAssign
:: StateT
(CompilerEnvAllocState label)
(VirtualRegisterAllocatorT (VirtualAsm label))
(Maybe a)
-> RegisterAccessMode
-> (a -> SizedVirtualRegister)
-> GlobalId
-> StateT
(CompilerEnvAllocState label)
(VirtualRegisterAllocatorT (VirtualAsm label))
()
paramRegAssign regAllocator ram boxer gid = do
m <- regAllocator
case m of
Just reg -> record gid (ram, Register . Direct $ boxer reg)
Nothing -> do
offset <- nextparam (8 :: Displacement)
record gid (ram, Register $ Indirect (Offset offset $ rXx Rbp))
fixhw64 = SizedRegister Extended64 . FixedHardwareRegister
xmm' = fixhw64 . hwxmm
rXx = fixhw64 . IntegerHwRegister
-- the initial state for the parameter and local allocator
initial :: CompilerEnvAllocState label
initial = CompilerEnvAllocState
{ stkOffset = 16
, intregs = [Rdi, Rsi, Rdx, Rcx, R8, R9]
, floatregs = [0..7]
, identMap = M.empty
}
-- safe head from a component of the state tuple
nextreg
:: (CompilerEnvAllocState label -> [a])
-> ( [a]
-> CompilerEnvAllocState label
-> CompilerEnvAllocState label
)
-> StateT
(CompilerEnvAllocState label)
(VirtualRegisterAllocatorT (VirtualAsm label))
(Maybe a)
nextreg getter setter = do
regs <- gets getter
case regs of
[] -> pure Nothing
(x:xs) -> do
modify (setter xs)
pure (Just x)
record k v = modify $ \s -> s { identMap = M.insert k v (identMap s) }
-- get the next integer register, or 'Nothing' if there are none
-- available
nextireg = nextreg intregs (\x s -> s { intregs = x })
-- get the next floating register, or 'Nothing' if there are none
-- available
nextfreg = nextreg floatregs (\x s -> s { floatregs = x })
nextmemory getter setter increment = do
n <- gets getter
modify (setter $ n + increment)
pure n
nextparam = nextmemory stkOffset (\x s -> s { stkOffset = x } )
-- | Compile a function
compileFunction
:: TyAnnFunDecl
-> Compiler label ()
compileFunction decl = wrapFunction $ compileBody none $ _funDeclBody decl where
none :: (Maybe label, Maybe label)
none = (Nothing, Nothing)
compileBody
:: (Maybe label, Maybe label)
-> [TyAnnStatement]
-> Compiler label ()
compileBody t = mapM_ (compileStmt t)
compileStmt
:: (Maybe label, Maybe label)
-> TyAnnStatement
-> Compiler label ()
compileStmt t = ($ t) . cata f where
f :: TyAnnStatementF ((Maybe label, Maybe label) -> Compiler label ())
-> (Maybe label, Maybe label) -> Compiler label ()
f stmt ml@(mEnd, mBeginning) = case stmt of
ExprStmt expr -> void $ compileExpr expr
CondExprStmt cond -> void $ compileCondExpr cond
Assign (Ann ty ref) expr -> do
(ramR, r) <- compileRef ref
(ramE, o) <- compileExpr expr
let copy s = do
mov rdi o
call (External $ Direct s)
mov r rax
asm $ case let t = unFix ty in trace (show t) t of
IntType _ -> mov r o
FloatType _ -> undefined -- TODO
StringType -> copy (mangleFuncName "deepcopy_array")
ArrayType _ _ -> copy (mangleFuncName "deepcopy_array")
SliceType _ -> copy (mangleFuncName "shallowcopy_slice")
StructType _ _ -> copy (mangleFuncName "deepcopy_struct")
Initialize i -> do
let diRef = mov rdi (Internal $ Direct (stringFromSymbol (gidOrigName i) ++ "_ini"))
let directI = lookupIdent i Direct
let indirectI = lookupIdent i (Indirect . Offset 0)
let cEx s i' = do
diRef
call (External $ Direct s)
mov i' rax
case unFix (gidTy i) of
IntType _ -> do
(ram, i') <- indirectI
asm $ mov i' (Immediate $ ImmI 0)
FloatType _ -> undefined -- TODO
StringType ->
asm . cEx (mangleFuncName "new_array") . snd =<< directI
ArrayType _ _ ->
asm . cEx (mangleFuncName "new_array") . snd =<< directI
SliceType _ ->
asm . cEx (mangleFuncName "new_slice") . snd =<< directI
PrintStmt vs -> forM_ vs $ \(Ann ty v) -> do
(ram, o) <- compileRef v
let sty = serializeType ty
asm $ withScratch $ do
mov rdi (Immediate $ ImmI $ fromIntegral sty)
mov rsi o
call (External . Direct $ mangleFuncName "goprint")
ReturnStmt (Just (Ann _ ref)) -> do
(ram, r) <- compileRef ref
asm $ mov rax r
ReturnStmt Nothing -> do
asm $ xor rax rax
IfStmt cond thenBody me -> do
-- sets the flags and gives us the jump variant
j <- compileCondExpr cond
l <- asm newLabel
asm $ jump j (Label l)
-- check for an else clause
mapM_ ($ ml) thenBody
asm $ setLabel l
case me of
Nothing -> pure ()
Just elseBody -> mapM_ ($ ml) elseBody
SwitchStmt
{ switchGuard = mg
, switchCases = cs
, switchDefaultCase = c
} -> do
-- to jump out of the switch
switchEnd <- asm newLabel
-- TODO refactor this (both cases are *essentially* the
-- same.
case mg of
Just expr -> do
(ramG, g) <- compileExpr expr
forM_ cs $ \(hd, bd) -> do
postCaseBody <- asm newLabel
-- to jump over the case body to
-- the next case head
preCaseBody <- asm newLabel
-- compile the conditions to check
-- to enter this case
forM_ hd $ \(e, ec) -> do
-- TODO investigate whether
-- continue/break can appear in these
-- weird contexts
mapM_ ($ none) ec
(ramE', e') <- compileExpr e
asm $ do
cmp e' g
jump OnEqual (Label preCaseBody)
-- if none of the alternatives in
-- the case head match the guard,
-- then jump past the case body
asm $ do
jump Unconditionally (Label postCaseBody)
setLabel preCaseBody
mapM_ ($ (Just switchEnd, mBeginning)) bd
asm $ do
jump Unconditionally (Label switchEnd)
setLabel postCaseBody
if null c
-- no default case: jump past the switch
then asm $ jump Unconditionally (Label switchEnd)
else do
mapM_ ($ (Just switchEnd, mBeginning)) c
Nothing -> do
forM_ cs $ \(hd, bd) -> do
postCaseBody <- asm newLabel
preCaseBody <- asm newLabel
forM_ hd $ \(e, ec) -> do
mapM_ ($ none) ec
(ramE, e') <- compileExpr e
asm $ do
test e' e'
jump OnNotEqual (Label preCaseBody)
asm $ do
jump Unconditionally (Label postCaseBody)
setLabel preCaseBody
mapM_ ($ (Just switchEnd, mBeginning)) bd
asm $ do
jump Unconditionally (Label switchEnd)
setLabel postCaseBody
-- default case
if null c
then asm $ jump Unconditionally (Label switchEnd)
else do
mapM_ ($ (Just switchEnd, mBeginning)) c
asm $ setLabel switchEnd
ForStmt Nothing body -> do
(forEnd, forStart) <- (,) <$> asm newLabel <*> asm newLabel
mapM_ ($ (Just forEnd, Just forStart)) body
asm $ setLabel forEnd
ForStmt (Just (code, cond)) body -> do
(forEnd, forStart) <- (,) <$> asm newLabel <*> asm newLabel
asm $ setLabel forStart
mapM_ ($ (Just forEnd, Just forStart)) code
j <- compileCondExpr cond
asm $ jump j (Label forEnd)
mapM_ ($ (Just forEnd, Just forStart)) body
asm $ do
jump Unconditionally (Label forStart)
setLabel forEnd
BreakStmt -> maybe
(error "invariant violation")
(asm . jump Unconditionally . Label)
mEnd
ContinueStmt -> maybe
(error "invariant violation")
(asm . jump Unconditionally . Label)
mBeginning
-- | Generates code to compute the integer absolute value of the given
-- 'VirutalOperand' in place.
integerAbs :: VirtualOperand label -> Compiler label (VirtualOperand label)
integerAbs o = do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
s <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t o
mov s o
sar t (Immediate $ ImmI 31)
xor s t
sub s t
pure s
floatingAbs :: VirtualOperand label -> Compiler label (VirtualOperand label)
floatingAbs o = undefined
-- | Generates the code to evaluate an expression. The computed
-- 'VirtualOperand' contains the result of the expression and can be accessed
-- via the returned mode.
compileExpr
:: TyAnnExpr
-> Compiler label (RegisterAccessMode, VirtualOperand label)
compileExpr (Ann ty e) = case e of
Conversion dstTy (Ann srcTy ref) -> do
(ram, o) <- compileRef ref
case (unFix dstTy, unFix srcTy) of
(IntType _, IntType _) -> pure (IntegerMode, o)
(FloatType _, FloatType _) -> pure (FloatingMode, o)
(FloatType _, IntType _) -> do
t <- Register . Direct <$> freshVirtualRegister FloatingMode Extended64
asm $ cvt ScalarDouble SingleInteger t o
pure (FloatingMode, t)
(IntType _, FloatType _) -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ cvt SingleInteger ScalarDouble t o
pure (IntegerMode, t)
_ -> error "Impossible conversion"
Binary v1 op v2 -> do
((ram1, r1), (ram2, r2)) <- (,) <$> compileVal v1 <*> compileVal v2
(ram1,) <$> case op of
Plus -> case ram1 of
IntegerMode -> do
t <- Register . Direct
<$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t r1
add t r2
pure t
FloatingMode -> do
t <- Register . Direct
<$> freshVirtualRegister FloatingMode Extended64
asm $ do
mov t r1
addsse ScalarDouble t r2
pure t
Minus -> case ram1 of
IntegerMode -> do
t <- Register . Direct
<$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t r1
sub t r2
pure t
FloatingMode -> do
t <- Register . Direct
<$> freshVirtualRegister FloatingMode Extended64
asm $ do
mov t r1
subsse ScalarDouble t r2
pure t
Times -> case ram1 of
IntegerMode -> do
t <- Register . Direct
<$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t r1
imul t r2
pure t
FloatingMode -> do
t <- Register . Direct
<$> freshVirtualRegister FloatingMode Extended64
asm $ do
mov t r1
mulsse ScalarDouble t r2
pure t
Divide -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov rax r1 -- low 64 bits of dividend
mov t r2
cqo rdx rax -- sign extend rax into rdx
idiv rdx rax t -- perform the division
mov t rax
pure t
Modulo -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
xor rdx rdx
mov rax r1
mov t r2
cqo rdx rax
idiv rdx rax t
mov t rdx
pure t
ShiftLeft -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t r1
sal t r2
pure t
ShiftRight -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t r1
sar t r2
pure t
BitwiseAnd -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t r1
bwand t r2
pure t
BitwiseOr -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t r1
bwor t r2
pure t
BitwiseAndNot -> do
t1 <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
t2 <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t1 r1
mov t2 r2
neg1 t2
bwand t1 t2
pure t1
BitwiseXor -> do
t1 <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t1 r1
xor t1 r2
pure t1
Unary op v -> case op of
Positive -> do
-- compute the integer absolute value
-- http://stackoverflow.com/questions/2639173/x86-assembly-abs-implementation
(ram, o) <- compileVal v
case ram of
IntegerMode -> (IntegerMode,) <$> integerAbs o
FloatingMode -> (FloatingMode,) <$> floatingAbs o
Negative -> do
(ram, o) <- compileVal v
case ram of
IntegerMode -> do
t <- Register . Direct
<$> freshVirtualRegister IntegerMode Extended64
asm $ do
mov t o
neg2 t
pure (IntegerMode, t)
FloatingMode -> do
t <- Register . Direct
<$> freshVirtualRegister FloatingMode Extended64
asm $ do
mov t o
pxor t t
pure (FloatingMode, t)
BitwiseNot -> do
(ram, o) <- compileVal v
case ram of
FloatingMode -> error "invariant violation: bitwise operation on float"
IntegerMode -> do
asm $ neg1 o
pure (IntegerMode, o)
Ref (Ann _ r) -> compileRef r
Cond c -> do
j <- compileCondExpr c
t <- Register . Direct <$> freshVirtualRegister IntegerMode Low8
asm $ do
mov rax (Immediate $ ImmI 0)
setc (invertFlag j)
$ Register
$ Direct
$ SizedRegister Low8
$ FixedHardwareRegister
$ IntegerHwRegister
$ Rax
mov t rax
pure (IntegerMode, t)
T.Call i vs -> do
-- for functions, the access mode will always be integer
(ram, f) <- lookupIdent i Direct
asm $ scratch Save
prepareCall vs
asm $ call f
asm $ scratch Load
case unFix ty of
VoidType -> pure (IntegerMode, rax)
FloatType _ -> do
t <- Register . Direct <$> freshVirtualRegister FloatingMode Extended64
asm $ movq t (xmm 0)
pure (FloatingMode, t)
_ -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ mov t rax
pure (FloatingMode, t)
InternalCall name vs -> do
asm $ scratch Save
prepareCall vs
asm $ call (External . Direct $ name)
asm $ scratch Load
case unFix ty of
FloatType _ -> do
t <- Register . Direct <$> freshVirtualRegister FloatingMode Extended64
asm $ movq t (xmm 0)
pure (FloatingMode, t)
_ -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ mov t rax
pure (IntegerMode, t)
-- | Compiles a conditional expression. These translate to comparisons in x86,
-- which set flags, so the only thing that a called would need to know is which
-- jump variant to invoke in order to perform the correct branch.
compileCondExpr
:: TyAnnCondExpr
-> Compiler label FlagCondition
compileCondExpr e = case e of
CondRef (Ann _ ref) -> do
(ram, r) <- compileRef ref
-- this will always be a boolean, so integer
case ram of
FloatingMode -> error "boolean cannot be floating"
IntegerMode -> do
asm $ test r r
pure OnEqual -- jump if false, so zero
BinCond v1 op v2 -> do
let simpleCompare j ssep ram o1 o2 = case ram of
IntegerMode -> asm (cmp o1 o2) *> pure j
FloatingMode -> do
t <- Register . Direct
<$> freshVirtualRegister FloatingMode Extended64
asm $ do
movq t o1
cmpsse SseEqual ScalarDouble t o2
movq rax t
test rax rax
pure OnEqual
-- don't compile v2 just yet so we can respect short-circuiting
case op of
-- both branches of a logical or must be booleans (i.e. integers)
-- at this stage, so we can disregard handling floats
LogicalOr -> do
true <- asm newLabel
-- compile the first operand
(ram1, o1) <- compileVal v1
when (ram1 == FloatingMode) $ error "boolean cannot be floating"
asm $ do
-- if it's true, jump over the second operand
test o1 o1
jump OnNotEqual (Label true)
-- compile the second operand
(ram2, o2) <- compileVal v2
when (ram2 == FloatingMode) $ error "boolean cannot be floating"
asm $ do
-- set the flags for whether it's true
test o2 o2
asm $ setLabel true
-- at this point, ZF = 1 if either one of the operands is true.
-- Hence to jump into the else branch, we would have to jump on
-- ZF = 0, i.e. OnEqual
pure OnEqual
LogicalAnd -> do
false <- asm newLabel
(ram1, o1) <- compileVal v1
when (ram1 == FloatingMode) $ error "boolean cannot be floating"
asm $ do
test o1 o1
jump OnEqual (Label false)
(ram2, o2) <- compileVal v2
when (ram2 == FloatingMode) $ error "boolean cannot be floating"
asm $ do
test o2 o2
asm $ setLabel false
-- At this point, ZF = 1 if *both* operands are true.
-- Hence, to jump into the else branch, we would have to jump
-- on ZF = 0, i.e. OnEqual.
pure OnEqual
Equal -> do
((ram1, o1), (ram2, o2)) <- (,) <$> compileVal v1 <*> compileVal v2
simpleCompare OnNotEqual SseEqual ram1 o1 o2
NotEqual -> do
((ram1, o1), (ram2, o2)) <- (,) <$> compileVal v1 <*> compileVal v2
simpleCompare OnEqual SseNotEqual ram1 o1 o2
LessThan -> do
((ram1, o1), (ram2, o2)) <- (,) <$> compileVal v1 <*> compileVal v2
simpleCompare (OnNotBelow Signed) SseLessThan ram1 o1 o2
LessThanEqual -> do
((ram1, o1), (ram2, o2)) <- (,) <$> compileVal v1 <*> compileVal v2
simpleCompare (OnAbove Signed) SseLessThanOrEqual ram1 o1 o2
GreaterThan -> do
((ram1, o1), (ram2, o2)) <- (,) <$> compileVal v1 <*> compileVal v2
invertFlag
<$> simpleCompare (OnAbove Signed) SseLessThanOrEqual ram1 o1 o2
GreaterThanEqual -> do
((ram1, o1), (ram2, o2)) <- (,) <$> compileVal v1 <*> compileVal v2
invertFlag
<$> simpleCompare (OnNotBelow Signed) SseLessThan ram1 o1 o2
UnCond op v -> case op of
LogicalNot -> do
(ram, o) <- compileVal v
asm $ test o o
-- need jump to fail if v is false, i.e. the bitwise and works out
-- to zero, so the jump must succeed if the bitwise and is nonzero
pure OnNotEqual
-- | Computes the register class for a given Vigil type.
registerClass :: Type -> RegisterAccessMode
registerClass (Fix ty) = case ty of
-- basic data types, stack-allocated
IntType _ -> IntegerMode
FloatType _ -> FloatingMode
-- heap-allocated complex data
StructType {} -> IntegerMode
ArrayType _ _ -> IntegerMode
StringType -> IntegerMode
-- impossible situations
FuncType {} -> IntegerMode
SliceType _ -> IntegerMode
VoidType -> IntegerMode
BuiltinType _ -> IntegerMode
compileVal
:: TyAnnVal
-> Compiler label (RegisterAccessMode, VirtualOperand label)
compileVal val = case val of
IdentVal ident -> lookupIdent ident (Indirect . Offset 0)
Literal lit -> compileLiteral lit
IdentValD ident -> lookupIdent ident Direct
-- | Compile a literal.
-- The strategy used is to move the literal (as an immediate) into a fresh
-- virtual register and to return the register. The determined mode of the
-- register is also returned so callers may determine whether to use integer or
-- floating point instructions.
compileLiteral
:: TyAnnLiteral
-> Compiler label (RegisterAccessMode, VirtualOperand label)
compileLiteral (Ann _ lit) = case lit of
IntLit n -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ mov t $ Immediate (ImmI $ fromIntegral n)
pure (IntegerMode, t)
FloatLit n -> do
t <- Register . Direct <$> freshVirtualRegister FloatingMode Extended64
asm $ do
pxor t t
mov rax $ Immediate (ImmF n)
movq t rax
pure (FloatingMode, t)
RuneLit n -> do
t <- Register . Direct <$> freshVirtualRegister IntegerMode Extended64
asm $ mov t (Immediate (ImmI $ fromIntegral $ fromEnum n))
pure (IntegerMode, t)
compileRef
:: Ref BasicIdent (Ident ()) TyAnnVal ()
-> Compiler label (RegisterAccessMode, VirtualOperand label)
compileRef r = case r of
ArrayRef i vs -> do
(ram, i') <- lookupIdent i (Indirect . Offset 0)
asm $ mov rax i'
forM_ vs $ \v -> do
(ramV, o') <- compileVal v -- ramV will always be IntegerMode
-- compilVal will never write to rax, so it's safe to use rax to
-- stoew the array pointer
asm $ do
mov rdi rax
mov rsi o'
call (External . Direct $ mangleFuncName "index_array")
t <- freshVirtualRegister IntegerMode Extended64
-- TODO check ultimate elem type for float
asm $ mov (Register $ Direct t) rax
pure (IntegerMode, (Register $ Indirect $ Offset 0 t))
SelectRef i sels -> do
(ram, i') <- lookupIdent i (Indirect . Offset 0)
asm $ mov rax i'
foldl'
(\acc n -> do
(ramO, o) <- acc
v <- freshVirtualRegister IntegerMode Extended64
asm $ do
mov rdi o
mov rsi (Immediate $ ImmI $ fromIntegral n)
call (External . Direct $ "struct_field")
mov (Register . Direct $ v) rax
pure $ (IntegerMode, Register $ Direct v)
)
(pure (IntegerMode, rax))
sels
SliceRef i slis -> do
-- For the first slice of the chain, we need to know if we're slicing
-- an array or a slice.
let meth = case gidTy i of
Fix (ArrayType _ _) -> "slice_array"
Fix (SliceType _) -> "slice_slice"
_ -> error "Unsliceable type"
-- slices are pointers, so this will always be integermode
(ram, i') <- lookupIdent i (Indirect . Offset 0)
o <- compileSliceExpr i' (head slis) meth
(IntegerMode,) <$> foldl'
(\cur idxs -> do
o' <- cur
compileSliceExpr o' idxs "slice_slice"
)
(pure o)
(tail slis)
ValRef val -> compileVal val
-- | Compiles a slice expression from an operand which contains the thing to
-- slice, the indices of the slice (as vals), a string indicating the slice
-- function to call (which depends on the type of the operand).
--
-- The result is a virtual register containing the new slice.
compileSliceExpr
:: Operand SizedVirtualRegister label
-> (Maybe TyAnnVal, Maybe TyAnnVal, Maybe TyAnnVal)
-> String
-> Compiler label (Operand SizedVirtualRegister label1)
compileSliceExpr o idxs func = do
let (m, l, h, b) = unMaybeSliceTriple idxs
(ramL, l') <- compileVal l
(ramH, h') <- compileVal h
(ramB, b') <- compileVal b
v <- freshVirtualRegister IntegerMode Extended64
asm $ do
mov rdi o
mov rsi $ Immediate $ ImmI $ fromIntegral m
mov rdx l'
mov rcx h'
mov r8 b'
call (External . Direct $ func)
mov (Register . Direct $ v) rax
pure $ Register $ Direct v
-- | Transform a slice triple-maybe-index into something that we can use to call
-- _slice_* functions from the runtime. The quadruple returned has the mode and
-- either the given indices or default values.
unMaybeSliceTriple
:: (Maybe TyAnnVal, Maybe TyAnnVal, Maybe TyAnnVal)
-> (Int, TyAnnVal, TyAnnVal, TyAnnVal)
unMaybeSliceTriple v@(ml, mh, mb) =
(mode v, valOrDef ml, valOrDef mh, valOrDef mb)
where
valOrDef :: Maybe TyAnnVal -> TyAnnVal
valOrDef mv = case mv of
Nothing -> Literal $ Ann (intType I8) $ IntLit 0
Just v' -> v'
mode ms = case ms of
(Nothing, Nothing, Nothing) -> 0
(Just _, Nothing, Nothing) -> 1
(Nothing , Just _, Nothing) -> 2
(Just _, Just _, Nothing) -> 3
(Nothing, Just _, Just _) -> 4
(Just _, Just _, Just _) -> 5
_ -> error "Impossible combination of slice indices"
prepareCall :: [TyAnnVal] -> Compiler label ()
prepareCall vals = mapM_ (uncurry assign) (reverse $ zip rams' vals) where
assign m v = do
case m of
Nothing -> compileVal v >>= asm . push . snd
Just r -> compileVal v >>= asm . mov (ihw r) . snd
ihw = Register . Direct . SizedRegister Extended64 . FixedHardwareRegister
-- the preferred access mode for each parameter
rams :: [RegisterAccessMode]
rams = registerClass . valType <$> vals
-- the actual hardware register for each parameter, where Nothing means "on
-- the stack"
rams' :: [Maybe HardwareRegister]
rams' = go regInitial rams where
go _ [] = []
go ([], f) (IntegerMode:xs) = Nothing : go ([], f) xs
go (i, []) (FloatingMode:xs) = Nothing : go (i, []) xs
go (i:is, fs) (IntegerMode:xs) = Just i : go (is, fs) xs
go (is, f:fs) (FloatingMode:xs) = Just f : go (is, fs) xs
regInitial =
( IntegerHwRegister <$> [Rdi, Rsi, Rdx, Rcx, R8, R9]
, hwxmm <$> [0..7]
)
-- | Wraps some code with the function prologue and epilogue.
wrapFunction :: Compiler label () -> Compiler label ()
wrapFunction v = do
asm $ do
push rbp
mov rbp rsp
asm $ prologue Save
v
asm $ prologue Load
asm $ do
mov rsp rbp
pop rbp
ret
deepSerializeType :: Type -> [Int]
deepSerializeType = cata f where
f ty = case ty of
IntType s -> [serializeType $ Fix $ IntType s]
FloatType s -> [serializeType $ Fix $ FloatType s]
StringType -> [7, 1, serializeType $ Fix $ IntType I1]
ArrayType n tyn -> 7:n:tyn
SliceType tyn -> 8:tyn
StructType fields sz -> 9:(length fields):sz:(concat $ map snd fields)
_ -> error "Type cannot be deep-serialized"
-- | Computes an integer representation of a type
serializeType :: Type -> Int
serializeType t = case unFix t of
IntType s -> case s of
I1 -> 1
I2 -> 2
I4 -> 3
I8 -> 4
FloatType _ -> 5
StringType -> 6
ArrayType _ _ -> 7
SliceType _ -> 8
StructType _ _ -> 9
_ -> 0
| djeik/goto | libgoto/Language/Vigil/Compile.hs | mit | 36,903 | 0 | 33 | 15,528 | 9,374 | 4,532 | 4,842 | 758 | 32 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module LDAP.Classy
( Uid(..)
, UidNumber(..)
, LdapConfig(..)
, LdapError(..)
, AsLdapError(..)
, LdapCredentials(..)
, LdapEnv
, HasLdapConfig(..)
, HasLdapEnv(..)
, findByDn
, search
, searchWithScope
, searchFirst
, searchFirstWithScope
, modifyEntry
, insertEntry
, deleteEntry
, modify
, insert
, delete
, setPassword
, changePassword
, resetPassword
, checkPassword
, bindLdap
, runLdap
, runLdapSimple
, connectLdap
, module LDAP
, module Types
, module Dn
, module AttrTypes
) where
import Prelude (Int, Show, fromIntegral, show)
import Control.Applicative (Applicative, pure, (<$>))
import Control.Category ((.))
import Control.Lens
import Control.Monad ((>>), (>>=))
import Control.Monad.Catch (try)
import Control.Monad.Error.Hoist ((<%!?>))
import Control.Monad.Error.Lens (catching, throwing)
import Control.Monad.Except (ExceptT, MonadError, runExceptT,
throwError)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader, ReaderT, runReaderT)
import Crypto.Password (CharType (..), PasswordFeature (..),
generatePassword)
import Data.Bool (Bool (..), not)
import Data.Either (Either, either)
import Data.Foldable (traverse_)
import Data.Function (($))
import Data.Functor (fmap)
import Data.List (filter, null)
import Data.Maybe (Maybe (..))
import Data.Text (Text, pack)
import Data.Text.Lazy (fromStrict)
import Data.Text.Lens
import Data.Tuple (snd)
import LDAP (LDAP, LDAPEntry (..),
LDAPException (..), LDAPMod (..),
LDAPModOp (..), LDAPScope (..),
SearchAttributes (..))
import qualified LDAP as L
import LDAP.Classy.AttributeType as AttrTypes
import LDAP.Classy.Decode (AsLdapEntryDecodeError,
FromLdapEntry (..),
LdapEntryDecodeError,
ToLdapEntry (..),
_LdapEntryDecodeError)
import LDAP.Classy.Dn as Dn
import LDAP.Classy.Search (LdapSearch, ldapSearchStr)
import LDAP.Classy.SSha (toSSha)
import LDAP.Classy.Types as Types
import Safe (headMay)
import System.IO (IO)
data LdapCredentials = LdapCredentials
{ _ldapCredentialsDn :: Dn
, _ldapCredentialsPassword :: Text
}
makeClassy ''LdapCredentials
data LdapConfig = LdapConfig
{ _ldapConfigHost :: Text
, _ldapConfigPort :: Int
, _ldapConfigBaseDn :: Maybe Dn
, _ldapConfigScope :: LDAPScope
, _ldapConfigCredentials :: Maybe LdapCredentials
}
makeClassy ''LdapConfig
data LdapEnv = LdapEnv
{ _ldapEnvContext :: LDAP
, _ldapEnvConfig :: LdapConfig
}
makeClassy ''LdapEnv
instance HasLdapConfig LdapEnv where
ldapConfig = ldapEnvConfig
data LdapError =
ConnectException LDAPException
| DecodeFailure LdapEntryDecodeError
| BindFailure LDAPException
deriving Show
makeClassyPrisms ''LdapError
instance AsLdapEntryDecodeError LdapError where
_LdapEntryDecodeError = _DecodeFailure . _LdapEntryDecodeError
type CanLdap m c e =
( MonadError e m
, MonadReader c m
, MonadIO m
, AsLdapEntryDecodeError e
, HasLdapEnv c
)
findByDn :: ( CanLdap m c e , AsLdapError e, Applicative m, FromLdapEntry a ) => Dn -> SearchAttributes -> m (Maybe a)
findByDn dn a = do
es <- liftLdap $ \ ldap ->
L.ldapSearch
ldap
(Just (dn^.dnText.from packed))
LdapScopeBase
Nothing
a
False
traverse fromLdapEntry . headMay $ es
searchWithScope
:: ( CanLdap m c e , AsLdapError e, Applicative m, FromLdapEntry a )
=> LdapSearch
-> SearchAttributes
-> Maybe Dn
-> LDAPScope
-> m [a]
searchWithScope q a dn s = do
es <- liftLdap $ \ ldap -> L.ldapSearch ldap (dn^?_Just.dnText.from packed) s (Just qs) a False
traverse fromLdapEntry es
where
qs = ldapSearchStr q
-- TODO: I don't like that the searchAttrs passed in are separate from
-- the FromLdapEntry instance meaning you can change one and
-- easily forget to change the other.
search :: ( CanLdap m c e , AsLdapError e, Applicative m, FromLdapEntry a )
=> LdapSearch
-> SearchAttributes
-> m [a]
search q a = do
dn <- view (ldapEnvConfig.ldapConfigBaseDn)
s <- view (ldapEnvConfig.ldapConfigScope)
searchWithScope q a dn s
searchFirstWithScope :: ( CanLdap m c e , AsLdapError e, Applicative m, FromLdapEntry a )
=> LdapSearch
-> SearchAttributes
-> Maybe Dn
-> LDAPScope
-> m (Maybe a)
searchFirstWithScope q a dn = fmap headMay . searchWithScope q a dn
searchFirst :: ( CanLdap m c e , AsLdapError e, Applicative m, FromLdapEntry a )
=> LdapSearch
-> SearchAttributes
-> m (Maybe a)
searchFirst q = fmap headMay . search q
modify :: (CanLdap m c e, AsLdapError e) => Dn -> [LDAPMod] -> m ()
modify dn mods = liftLdap $ \ ldap -> L.ldapModify ldap (dn^.dnText.from packed) mods
modifyEntry :: (CanLdap m c e, AsLdapError e,ToLdapEntry a) => a -> m ()
modifyEntry a =
modify (toLdapDn a) . L.list2ldm LdapModReplace . toLdapAttrs $ a
insert :: (CanLdap m c e, AsLdapError e) => LDAPEntry -> m ()
insert le = liftLdap $ \ ldap ->
L.ldapAdd ldap (ledn le)
. L.list2ldm LdapModAdd
. filter (not . null . snd)
. leattrs $ le
insertEntry :: (CanLdap m c e, AsLdapError e,ToLdapEntry a) => a -> m ()
insertEntry = insert . toLdapEntry
delete :: (CanLdap m c e, AsLdapError e) => Dn -> m ()
delete dn = liftLdap $ \ ldap -> L.ldapDelete ldap (dn^.dnText.from packed)
deleteEntry :: (CanLdap m c e, AsLdapError e,ToLdapEntry a) => a -> m ()
deleteEntry = delete . toLdapDn . toLdapEntry
setPassword :: (CanLdap m c e, AsLdapError e) => Dn -> Text -> m ()
setPassword dn pw = do
sSha <- liftIO $ toSSha (fromStrict pw)
modify dn [LDAPMod LdapModReplace "userPassword" [show sSha]]
changePassword :: (CanLdap m c e, AsLdapError e,Applicative m) => Dn -> Text -> Text -> m ()
changePassword dn oldPw newPw = do
checkPassword dn oldPw
setPassword dn newPw
resetPassword :: (CanLdap m c e, AsLdapError e,Applicative m) => Dn -> m Text
resetPassword dn = do
pw <- liftIO $ pack <$> generatePassword
[ Length 10
, Include Lowercase
, Include Uppercase
, Include Symbol
, Include Digit
, IncludeAtLeast 1 Symbol
, IncludeAtLeast 1 Digit
, IncludeAtLeast 2 Uppercase
, IncludeAtLeast 3 Uppercase
]
setPassword dn pw
pure pw
checkPassword :: (CanLdap m c e, AsLdapError e,Applicative m) => Dn -> Text -> m ()
checkPassword dn pw = bindLdap dn pw >> bindRootDn
bindLdap :: (CanLdap m c e, AsLdapError e) => Dn -> Text -> m ()
bindLdap d p = catching _ConnectException doBind (throwing _BindFailure)
where
doBind = liftLdap $ \ ldap ->
L.ldapSimpleBind ldap (d^.dnText.from packed) (p^.from packed)
bindRootDn :: (CanLdap m c e, AsLdapError e,Applicative m) => m ()
bindRootDn =
view (ldapEnvConfig.ldapConfigCredentials) >>= traverse_ rootLogin
where
rootLogin (LdapCredentials d p) = bindLdap d p
liftLdap :: (CanLdap m c e, AsLdapError e) => (LDAP -> IO a) -> m a
liftLdap f = view ldapEnvContext >>= tryLdap . f
tryLdap :: (MonadError e m, MonadIO m, AsLdapError e) => IO a -> m a
tryLdap m = (liftIO . try $ m) <%!?> (_ConnectException #)
connectLdap :: (Applicative m, MonadError e m, MonadIO m, AsLdapError e, AsLdapEntryDecodeError e) => LdapConfig -> m LdapEnv
connectLdap ldapC = do
let h = ldapC ^.ldapConfigHost.from packed
let p = ldapC ^.ldapConfigPort.to fromIntegral
ctx <- tryLdap $ L.ldapInit h p
let env = LdapEnv ctx ldapC
doLdap env bindRootDn
pure env
runLdap
:: ( MonadReader c m
, MonadError e m
, MonadIO m
, Applicative m
, AsLdapError e
, AsLdapEntryDecodeError e
, HasLdapConfig c
)
=> ExceptT e (ReaderT LdapEnv IO) a
-> m a
runLdap m = do
ldapC <- view ldapConfig
env <- connectLdap ldapC
doLdap env m
doLdap :: (MonadError a m, MonadIO m, Applicative m) => r -> ExceptT a (ReaderT r IO) b -> m b
doLdap env m' = do
e <- liftIO $ runReaderT (runExceptT m') env
either throwError pure e
runLdapSimple
:: ExceptT LdapError (ReaderT LdapEnv IO) a
-> LdapConfig
-> IO (Either LdapError a)
runLdapSimple m e = runExceptT $ runReaderT (runLdap m) e
| benkolera/haskell-ldap-classy | LDAP/Classy.hs | mit | 9,454 | 0 | 16 | 2,761 | 2,873 | 1,540 | 1,333 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
import qualified Data.Proxy as Proxy
import qualified Data.Text as Text
import qualified Lackey
import qualified Servant.API as Servant
import qualified Test.Hspec as Hspec
main :: IO ()
main =
Hspec.hspec
. Hspec.parallel
. Hspec.describe "Lackey"
. Hspec.describe "rubyForAPI"
$ do
Hspec.it "supports delete requests" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy (Servant.Delete '[Servant.JSON] ())
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "delete" "" "delete" "" "" False
Hspec.it "supports get requests" $ do
let
api = Proxy.Proxy :: Proxy.Proxy (Servant.Get '[Servant.JSON] ())
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get" "" "get" "" "" False
Hspec.it "supports patch requests" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy (Servant.Patch '[Servant.JSON] ())
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "patch" "" "patch" "" "" False
Hspec.it "supports post requests" $ do
let
api = Proxy.Proxy :: Proxy.Proxy (Servant.Post '[Servant.JSON] ())
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "post" "" "post" "" "" False
Hspec.it "supports put requests" $ do
let
api = Proxy.Proxy :: Proxy.Proxy (Servant.Put '[Servant.JSON] ())
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "put" "" "put" "" "" False
Hspec.it "supports alternatives" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.Get '[Servant.JSON] () Servant.:<|> Servant.Delete '[Servant.JSON] ()
)
Lackey.rubyForAPI api `Hspec.shouldBe` Text.concat
[ ruby "get" "" "get" "" "" False
, Text.singleton ';'
, ruby "delete" "" "delete" "" "" False
]
Hspec.it "supports captures" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.Capture "id" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get_by_id" ",id" "get" "#{id}" "" False
Hspec.it "supports query flags" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.QueryFlag "flag" Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby
"get"
",flag: nil"
"get"
"?flag=#{flag}"
""
False
Hspec.it "supports query params" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.QueryParam "param" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby
"get"
",param: nil"
"get"
"?param=#{param}"
""
False
Hspec.it "supports multiple query params" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.QueryParams "params" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby
"get"
",params: nil"
"get"
"?params=#{params}"
""
False
Hspec.it "supports request bodies" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.ReqBody '[Servant.JSON] () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get" ",body" "get" "" "" True
Hspec.it "supports request headers" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.Header "cookie" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby
"get"
",cookie: nil"
"get"
""
"\"cookie\"=>cookie"
False
Hspec.it "supports response headers" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
(Servant.Get '[Servant.JSON] (Servant.Headers '[] ()))
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get" "" "get" "" "" False
Hspec.it "puts the body param after captures" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.Capture "segment" () Servant.:> Servant.ReqBody '[Servant.JSON] () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby
"get_by_segment"
",segment,body"
"get"
"#{segment}"
""
True
Hspec.it "puts the body param after query params" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.QueryFlag "flag" Servant.:> Servant.ReqBody '[Servant.JSON] () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby
"get"
",flag: nil,body"
"get"
"?flag=#{flag}"
""
True
Hspec.it "sanitizes path segments" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
("A!" Servant.:> Servant.Get '[Servant.JSON] ())
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get_A!" "" "get" "A!" "" False
Hspec.it "sanitizes captures" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.Capture "A!" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get_by_A!" ",a_" "get" "#{a_}" "" False
Hspec.it "sanitizes query flags" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.QueryFlag "A!" Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get" ",a_: nil" "get" "?A!=#{a_}" "" False
Hspec.it "sanitizes query params" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.QueryParam "A!" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get" ",a_: nil" "get" "?A!=#{a_}" "" False
Hspec.it "sanitizes multiple query params" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.QueryParams "A!" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get" ",a_: nil" "get" "?A!=#{a_}" "" False
Hspec.it "sanitizes headers" $ do
let
api =
Proxy.Proxy :: Proxy.Proxy
( Servant.Header "A!" () Servant.:> Servant.Get '[Servant.JSON] ()
)
Lackey.rubyForAPI api
`Hspec.shouldBe` ruby "get" ",a_: nil" "get" "" "\"A!\"=>a_" False
-- Since every generated function has the same structure, it can be abstracted
-- away behind this function.
ruby :: String -> String -> String -> String -> String -> Bool -> Text.Text
ruby name params method path headers body = Text.pack
(concat
[ "def "
, name
, "(excon"
, params
, ")"
, "excon.request("
, ":method=>:"
, method
, ","
, ":path=>\"/"
, path
, "\","
, ":headers=>{"
, headers
, "},"
, ":body=>"
, if body then "body" else "nil"
, ")"
, "end"
]
)
| tfausak/lackey | source/test-suite/Main.hs | mit | 8,851 | 0 | 21 | 3,945 | 2,178 | 1,072 | 1,106 | 213 | 2 |
module Parser
( Parser.parse
) where
import Control.Monad.Error (throwError)
import Text.ParserCombinators.Parsec
import Types
parseNumber = do
sign <- option "" (string "-")
value <- try parseFloat <|> parseInt
return $ Number . read $ sign ++ value
where parseInt = many1 digit
parseFloat = do
value <- option "0" (many1 digit)
char '.'
decimal <- many1 digit
return $ value ++ "." ++ decimal
parseString = do
char '"'
str <- many strChars
char '"'
return $ String str
where strChars = noneOf "\""
parseBoolean = do
char '#'
value <- oneOf "tf"
return $ if value == 't' then Boolean True else Boolean False
parseSymbol = do
name <- many (letter <|> digit <|> oneOf "+-*/_\\=!@#$%^&{}?")
return $ Symbol name
parseList = do
char '('
optional spaces
contents <- parseAnyExpr `sepEndBy` (many1 space)
char ')'
return $ fromList contents
parseQuote = do
char '\''
expr <- parseAnyExpr
return $ Pair (Symbol "quote") (Pair expr Null)
parseAnyExpr = try parseList
<|> try parseQuote
<|> try parseNumber
<|> try parseString
<|> try parseBoolean
<|> try parseSymbol
parseExpr = do
optional spaces
expr <- parseAnyExpr
optional spaces
eof
return expr
parse :: String -> ThrowsError LispValue
parse input = case Text.ParserCombinators.Parsec.parse parseExpr "lisp" input of
Right result -> return $ result
Left error -> throwError . ParseError . show $ error
| dstruthers/Agate | Parser.hs | mit | 1,572 | 0 | 12 | 440 | 524 | 243 | 281 | 54 | 2 |
module Parser (parse) where
import qualified TailRecGrammar as H
import Lexer
import Control.Monad.Trans.State.Lazy
import Control.Monad
type Parse = State (Token, [Token])
parse :: String -> H.E
parse s = evalState (advance >> e) (undefined, mathLex s)
advance :: Parse ()
advance = do
(_, x:xs) <- get
put (x, xs)
getToken :: Parse Token
getToken = liftM fst $ get
eat :: Token -> Parse ()
eat t = do
t' <- getToken
if t == t' then advance else error ("Failed to parse - found " ++ show t' ++ " expected " ++ show t)
e :: Parse H.E
e = liftM2 H.E t e'
e' :: Parse H.E'
e' = do
tok <- getToken
case tok of
Plus -> eat Plus >> liftM2 H.Plus t e'
Minus -> eat Minus >> liftM2 H.Minus t e'
_ -> return H.ENone
t :: Parse H.T
t = liftM2 H.T f t'
t' :: Parse H.T'
t' = do
tok <- getToken
case tok of
Mul -> eat Mul >> liftM2 H.Mul f t'
Div -> eat Div >> liftM2 H.Div f t'
_ -> return H.TNone
f :: Parse H.F
f = do
tok <- getToken
case tok of
Num x -> eat (Num x) >> return (H.Num x)
LParen -> do
eat LParen
val <- e
eat RParen
return $ H.Paren val
| w-shackleton/MathParse | Parser.hs | gpl-2.0 | 1,131 | 0 | 14 | 311 | 536 | 266 | 270 | 46 | 3 |
{- |
Module : $Header$
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Description :
Instance of class Pretty for hybrid logic
with an arbitrary logic below.
-}
module TopHybrid.Print_AS (printNamedFormula) where
import Common.Doc
import Common.DocUtils
import Common.AS_Annotation
import TopHybrid.AS_TopHybrid
import TopHybrid.TopHybridSign
import Logic.Logic
printNamedFormula :: Named Frm_Wrap -> Doc
printNamedFormula = printAnnoted printFormula . fromLabelledSen
-- the use of the function makeNamed is vacuous, it is only needed
-- to satisfy the types of the print_named function
printFormula :: Frm_Wrap -> Doc
printFormula (Frm_Wrap l f) = case f of
UnderLogic f' -> print_named l $ makeNamed "" f'
f' -> pretty f'
instance (Pretty f) => Pretty (TH_FORMULA f) where
pretty (At n f) = keyword "@" <+> (pretty n) <+> (pretty f)
pretty (Uni n f) = keyword "forall worlds" <+> (pretty n) <+> (pretty f)
pretty (Exist n f) = keyword "exist world" <+> (pretty n) <+> (pretty f)
pretty (UnderLogic f) = keyword "{" <+> (pretty f) <+> (keyword "}")
pretty (Box m f) = keyword "[" <> (pretty m) <> keyword "]" <+> (pretty f)
pretty (Dia m f) = keyword "<" <> (pretty m) <> keyword ">" <+> (pretty f)
pretty (Conjunction f f') = pretty f <+> (keyword "/\\") <+> (pretty f')
pretty (Disjunction f f') = pretty f <+> (keyword "\\/") <+> (pretty f')
pretty (Implication f f') = pretty f <+> (keyword "->") <+> (pretty f')
pretty (BiImplication f f') = pretty f <+> (keyword "<->") <+> (pretty f')
pretty (Here n) = pretty n
pretty (Neg f) = keyword "not" <+> (pretty f)
pretty (Par f) = keyword "(" <+> (pretty f) <+> (keyword ")")
pretty TrueA = keyword "True"
pretty FalseA = keyword "False"
instance (Pretty s) => Pretty (THybridSign s) where
pretty x@(THybridSign _ _ s) =
keyword "Modalities" <+> (pretty $ modies x) $+$
keyword "Nominals" <+> (pretty $ nomies x) $+$
keyword "Under Sig {" $+$ (pretty s) $+$ (keyword "}")
instance (Pretty b) => Pretty (TH_BSPEC b) where
pretty (Bspec x b) = pretty x
$+$ keyword "Under Spec {" $+$
(pretty b)
$+$ keyword "}"
instance Pretty (TH_BASIC_ITEM) where
pretty (Simple_mod_decl x) = keyword "Modalities" <+> (pretty x)
pretty (Simple_nom_decl x) = keyword "Nominals" <+> (pretty x)
instance Pretty Frm_Wrap where
pretty (Frm_Wrap _ f) = pretty f
instance Pretty Spc_Wrap where
pretty (Spc_Wrap _ b f) = pretty b $+$ (pretty f)
instance Pretty Mor where
instance Pretty Sgn_Wrap where
pretty (Sgn_Wrap _ s) = pretty s
pretty (EmptySign) = pretty ()
| nevrenato/Hets_Fork | TopHybrid/Print_AS.hs | gpl-2.0 | 3,014 | 0 | 14 | 873 | 1,012 | 499 | 513 | 50 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./TPTP/Prover/ProverState.hs
Description : Help functions for all automatic theorem provers.
Copyright : (c) Rainer Grabbe
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : needs POSIX
Data structures and initialising functions for Prover state and configurations.
-}
module TPTP.Prover.ProverState ( ProverState (..)
, getAxioms
, insertSentenceIntoProverState
, ioShowTPTPProblem
, tptpProverState
) where
import TPTP.AS
import TPTP.Morphism
import TPTP.Pretty
import TPTP.Sign
import Common.AS_Annotation
import Common.ProofUtils
import Common.Doc
import Logic.Prover
import Data.Data
import Data.List
-- * Data structures
data PProblem = PProblem { identifier :: String
, logicalPart :: PLogicalPart
} deriving (Show, Eq, Ord, Typeable, Data)
data ProverState = ProverState { psLogicalPart :: PLogicalPart
} deriving (Show, Ord, Eq, Data, Typeable)
tptpProverState :: Sign -- ^ TPTP signature
-> [Named Sentence]
-- ^ list of named TPTP sentences containing axioms
-> [FreeDefMorphism Sentence Morphism] -- ^ freeness constraints
-> ProverState
tptpProverState _ sentences _ = ProverState {
psLogicalPart = foldr (flip insertSentence) emptyPLogicalPart axiomList}
where newSentences = prepareSenNames id sentences
axiomList = filter isAxiom newSentences
-- ** proving Logical Parts
{- |
A proving logical part consists of a symbol list, a declaration list, and a
set of formula lists. Support for clause lists and proof lists hasn't
been implemented yet.
-}
data PLogicalPart = PLogicalPart { formulaeList :: [Named Sentence]
} deriving (Show, Eq, Ord, Typeable, Data)
emptyPLogicalPart :: PLogicalPart
emptyPLogicalPart = PLogicalPart { formulaeList = [] }
-- | gets all axioms possibly used in a proof
getAxioms :: ProverState -> [String]
getAxioms = map senAttr . filter isAxiom . formulaeList . psLogicalPart
-- * TPTP specific functions for prover GUI
-- Inserts a named TPTP term into TPTP prover state.
insertSentenceIntoProverState :: ProverState
-- ^ prover state containing initial logical part
-> Named Sentence -- ^ goal to add
-> ProverState
insertSentenceIntoProverState proverState namedSentence =
proverState { psLogicalPart =
insertSentence (psLogicalPart proverState) namedSentence }
{- |
Inserts a Named Sentence (axiom or goal) into a PLogicalPart.
-}
insertSentence :: PLogicalPart -> Named Sentence -> PLogicalPart
insertSentence pLogicalPart newSentence =
pLogicalPart { formulaeList = newSentence : formulaeList pLogicalPart }
{- |
Generate a TPTP problem with time stamp while maybe adding a goal.
-}
generateTPTPProblem :: PLogicalPart
-> Maybe (Named Sentence) -> PProblem
generateTPTPProblem pLogicalPart mNewGoal = PProblem
{ identifier = "hets_exported"
, logicalPart = maybe pLogicalPart (insertSentence pLogicalPart) mNewGoal
}
{- |
Pretty printing TPTP goal in TPTP format.
-}
ioShowTPTPProblem :: String -- ^ theory name
-> ProverState -- ^ prover state containing initial logical part
-> Named Sentence -- ^ goal to print
-> [String] -- ^ extra options
-> IO String -- ^ formatted output of the goal
ioShowTPTPProblem theoryName proverState newGoal _ = do
let problem = generateTPTPProblem
(psLogicalPart proverState)
(Just newGoal)
return $ show $ printTPTPProblem theoryName problem
-- Print a newline at the end of the document for good style.
printTPTPProblem :: String -> PProblem -> Doc
printTPTPProblem theoryName problem =
text "% Problem: " <> text (identifier problem)
$+$ text "% generated from the library " <> text theoryName
$+$ vsep (map printNamedSentence $ sortBy sentenceOrder $ formulaeList $
logicalPart problem)
$+$ text ""
where
sentenceOrder :: Named Sentence -> Named Sentence -> Ordering
sentenceOrder s t =
case (formulaRole $ sentence s, formulaRole $ sentence t) of
(Unknown, _) -> LT
(Type, _) -> LT
(Definition, _) -> LT
(Conjecture, _) -> GT
(_, _) -> EQ
| gnn/Hets | TPTP/Prover/ProverState.hs | gpl-2.0 | 4,667 | 0 | 12 | 1,294 | 804 | 435 | 369 | 74 | 5 |
{-# LANGUAGE NoImplicitPrelude #-}
module Data.Maybe where
import Data.Bool
data Maybe a = Just a | Nothing
maybe n _ Nothing = n
maybe _ f (Just x) = f x
isJust Nothing = False
isJust _ = True
isNothing Nothing = True
isNothing _ = False
fromJust (Just x) = x
-- fromJust Nothing = error "Maybe.fromJust: Nothing"
fromMaybe d x = case x of Nothing -> d
Just v -> v
maybeToList Nothing = []
maybeToList (Just x) = [x]
listToMaybe [] = Nothing
listToMaybe (x:_) = Just x
-- listToMaybe = foldr (const . Just) Nothing
-- GHC uses this to fused via the foldr/build rule.
-- catMaybes ls = [ x | Just x <- ls]
mapMaybes _ [] = []
mapMaybes f (x:xs) = let rs = mapMaybes f xs
in case f x of Nothing -> rs
Just r -> r:rs
| bredelings/BAli-Phy | haskell/Data/Maybe.hs | gpl-2.0 | 828 | 0 | 10 | 260 | 265 | 136 | 129 | 21 | 2 |
{-# LANGUAGE PatternGuards,ViewPatterns #-}
-- | Simplify pass over the rich language:
--
-- * Inlines local non-recursive definitions,
-- * Eliminates known-case:
-- - when the scrutinee expression is a constructor
-- - by inlining/eliminating the scrutinee variable
-- * Beta reduction
--
-- These passes destroy sharing and make the program less efficient.
-- However, they should preserve the semantics (even in presence of
-- non-terminating programs/bottoms)
--
-- TODO: Inline non-recursive global definitions
-- Polymorphic lets
module HipSpec.Lang.SimplifyRich where
import HipSpec.Lang.Rich
import HipSpec.Lang.Type
simpFuns :: Eq a => [Function a] -> [Function a]
simpFuns = map (simpFun Global)
data Where = Global | Local deriving Eq
simpFun :: Eq a => Where -> Function a -> Function a
simpFun loc (Function f ty b) = Function f ty $ simpExpr $ case b of
-- Sometimes functions look like this
-- f = \ xs -> let g = K[g] in g,
-- then we simply replace it to f = \ xs -> K[f xs]
-- TODO: Polymorphic functions (find examples!)
(collectBinders -> (xs,Let [Function g (Forall [] _) e] (Lcl g' _)))
| g == g'
, Forall tvs inner_ty <- ty
, null tvs || loc == Global ->
let
var = case loc of
Global -> Gbl
Local -> \ a _ _ -> Lcl a inner_ty
in
makeLambda xs
((var f ty (map TyVar tvs) `apply` map (uncurry Lcl) xs // g) e)
_ -> b
simpExpr :: Eq a => Expr a -> Expr a
simpExpr = transformExpr $ \ e0 -> case e0 of
-- Beta reduction
App (Lam x _ body) arg -> simpExpr ((arg // x) body)
-- Known case on a constructor
Case e mx alts
| (Gbl u _ ts,args) <- collectArgs e
, Just (ConPat _ _ _ bs,rhs) <- findAlt u ts alts
-> simpExpr (substMany (maybe id (\ (x,_) -> ((x,e):)) mx (zip (map fst bs) args)) rhs)
Case (Let fns e) x alts -> simpExpr (Let fns (Case e x alts))
Case e x alts -> Case e Nothing
[ (p,simpExpr (removeScrutinee e x alt))
| alt@(p,_) <- alts
]
-- Inlining local non-recursive functions
-- TODO: Handle several functions, handle polymorphic functions (no examples yet)
-- Cannot inline this to several occasions if e contains a let
Let [Function f (Forall [] _) b] e
| not (f `occursIn` b)
, letFree b {- || occurrences f e <= 1 -} -> simpExpr ((b // f) e)
Let fns e -> Let (map (simpFun Local) fns) (simpExpr e)
_ -> e0
-- | Removes the scrutinee variable (and also the expression if it is a variable),
-- by inlining the pattern or the expression again (if it is a Default alt)
removeScrutinee :: Eq a => Expr a -> Maybe (a,Type a) -> Alt a -> Expr a
removeScrutinee e mx (p,rhs) = subst rhs
where
subst_expr = case p of
Default -> e
LitPat l -> Lit l
ConPat u ty ts bs -> apply (Gbl u ty ts) (map (uncurry Lcl) bs)
-- If the scrutinee is just a variable, we inline it too.
-- This can lead to triggering many known case.
subst = substMany . (`zip` repeat subst_expr) . maybe id ((:) . fst) mx $ case e of
Lcl u _ -> [u] -- The variable can only be locally bound by lambda
-- or case and thus is not applied to type args.
_ -> []
| danr/hipspec | src/HipSpec/Lang/SimplifyRich.hs | gpl-3.0 | 3,428 | 0 | 20 | 1,051 | 989 | 512 | 477 | -1 | -1 |
module RayMarch.Field where
import RayMarch.Types
sub :: Field -> Field -> Field
sub f g p = f p`max`(-g p) | phi16/RayMarch | RayMarch/Field.hs | gpl-3.0 | 109 | 1 | 8 | 21 | 52 | 29 | 23 | 4 | 1 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes #-}
module Jebediah.MIDI.Waldorf.Blofeld
where
import Jebediah.MIDI.Instrument
import Sound.MIDI.Message.Channel (Body)
data Blofeld = Blofeld
blofeld :: Blofeld
blofeld = Blofeld
ctrlNames :: [(Int, String)]
ctrlNames = sorted
[ (27, "Oscillator 1: Range")
]
instance Control Blofeld where
controlNames _ = ctrlNames
en :: Enum cv => String -> cv -> [Body]
en = enumerable blofeld
| mmarx/jebediah | src/Jebediah/MIDI/Waldorf/Blofeld.hs | gpl-3.0 | 464 | 0 | 8 | 98 | 122 | 72 | 50 | 14 | 1 |
-- This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0. If a copy of
-- the MPL was not distributed with this file, You
-- can obtain one at http://mozilla.org/MPL/2.0/.
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.Middleware.Gunzip (gunzip) where
import Control.Applicative
import Control.Exception (throwIO)
import Data.IORef
import Network.HTTP.Types (Header, hContentEncoding)
import Network.Wai (Middleware, Request, RequestBodyLength (ChunkedBody))
import Prelude
import qualified Data.ByteString as S
import qualified Data.Streaming.Zlib as Z
import qualified Network.Wai as Wai
-- | This WAI middleware transparently unzips HTTP request bodies if
-- a request header @Content-Encoding: gzip@ is found.
--
-- Please note that the 'requestBodyLength' is set to 'ChunkedBody'
-- if the body is unzipped since we do not know the uncompressed
-- length yet.
gunzip :: Middleware
gunzip app rq k
| isGzip rq = prepare >>= flip app k
| otherwise = app rq k
where
prepare = do
r <- newIORef []
i <- Z.initInflate (Z.WindowBits 31)
return $ rq { Wai.requestBody = inflate r i
, Wai.requestBodyLength = ChunkedBody -- FIXME
, Wai.requestHeaders = noGzip (Wai.requestHeaders rq)
}
inflate r i = do
buffered <- readIORef r
case buffered of
[] -> Wai.requestBody rq >>= continue r i
(x:xs) -> writeIORef r xs >> return x
continue r i b =
if S.null b then
return S.empty
else do
f <- toBytes id =<< Z.feedInflate i b
x <- f . (:[]) <$> Z.finishInflate i
case x of
[] -> return S.empty
(y:ys) -> writeIORef r ys >> return y
toBytes front p = p >>= \r -> case r of
Z.PRDone -> return front
Z.PRNext b -> toBytes (front . (:) b) p
Z.PRError e -> throwIO e
isGzip :: Request -> Bool
isGzip = maybe False ("gzip" ==) . lookup hContentEncoding . Wai.requestHeaders
noGzip :: [Header] -> [Header]
noGzip = filter (\(k, v) -> k /= hContentEncoding || v /= "gzip")
| twittner/wai-middleware-gunzip | src/Network/Wai/Middleware/Gunzip.hs | mpl-2.0 | 2,207 | 0 | 15 | 631 | 601 | 318 | 283 | 43 | 6 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.StorageTransfer
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Transfers data from external data sources to a Google Cloud Storage
-- bucket or between Google Cloud Storage buckets.
--
-- /See:/ <https://cloud.google.com/storage/transfer Google Storage Transfer API Reference>
module Network.Google.StorageTransfer
(
-- * Service Configuration
storageTransferService
-- * OAuth Scopes
, cloudPlatformScope
-- * API Declaration
, StorageTransferAPI
-- * Resources
-- ** storagetransfer.getGoogleServiceAccount
, module Network.Google.Resource.StorageTransfer.GetGoogleServiceAccount
-- ** storagetransfer.googleServiceAccounts.get
, module Network.Google.Resource.StorageTransfer.GoogleServiceAccounts.Get
-- ** storagetransfer.transferJobs.create
, module Network.Google.Resource.StorageTransfer.TransferJobs.Create
-- ** storagetransfer.transferJobs.get
, module Network.Google.Resource.StorageTransfer.TransferJobs.Get
-- ** storagetransfer.transferJobs.list
, module Network.Google.Resource.StorageTransfer.TransferJobs.List
-- ** storagetransfer.transferJobs.patch
, module Network.Google.Resource.StorageTransfer.TransferJobs.Patch
-- ** storagetransfer.transferOperations.cancel
, module Network.Google.Resource.StorageTransfer.TransferOperations.Cancel
-- ** storagetransfer.transferOperations.delete
, module Network.Google.Resource.StorageTransfer.TransferOperations.Delete
-- ** storagetransfer.transferOperations.get
, module Network.Google.Resource.StorageTransfer.TransferOperations.Get
-- ** storagetransfer.transferOperations.list
, module Network.Google.Resource.StorageTransfer.TransferOperations.List
-- ** storagetransfer.transferOperations.pause
, module Network.Google.Resource.StorageTransfer.TransferOperations.Pause
-- ** storagetransfer.transferOperations.resume
, module Network.Google.Resource.StorageTransfer.TransferOperations.Resume
-- * Types
-- ** ErrorSummary
, ErrorSummary
, errorSummary
, esErrorCount
, esErrorCode
, esErrorLogEntries
-- ** Status
, Status
, status
, sDetails
, sCode
, sMessage
-- ** ListOperationsResponse
, ListOperationsResponse
, listOperationsResponse
, lorNextPageToken
, lorOperations
-- ** Schedule
, Schedule
, schedule
, sScheduleEndDate
, sScheduleStartDate
, sStartTimeOfDay
-- ** ObjectConditions
, ObjectConditions
, objectConditions
, ocMinTimeElapsedSinceLastModification
, ocIncludePrefixes
, ocMaxTimeElapsedSinceLastModification
, ocExcludePrefixes
-- ** Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- ** Empty
, Empty
, empty
-- ** PauseTransferOperationRequest
, PauseTransferOperationRequest
, pauseTransferOperationRequest
-- ** GoogleServiceAccount
, GoogleServiceAccount
, googleServiceAccount
, gsaAccountEmail
-- ** StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- ** Date
, Date
, date
, dDay
, dYear
, dMonth
-- ** UpdateTransferJobRequest
, UpdateTransferJobRequest
, updateTransferJobRequest
, utjrTransferJob
, utjrProjectId
, utjrUpdateTransferJobFieldMask
-- ** TransferCounters
, TransferCounters
, transferCounters
, tcBytesFoundOnlyFromSink
, tcBytesDeletedFromSink
, tcObjectsDeletedFromSource
, tcObjectsFoundFromSource
, tcBytesFailedToDeleteFromSink
, tcBytesFromSourceFailed
, tcBytesCopiedToSink
, tcBytesFoundFromSource
, tcBytesDeletedFromSource
, tcObjectsDeletedFromSink
, tcObjectsFoundOnlyFromSink
, tcBytesFromSourceSkippedBySync
, tcObjectsCopiedToSink
, tcObjectsFromSourceFailed
, tcObjectsFailedToDeleteFromSink
, tcObjectsFromSourceSkippedBySync
-- ** TransferJob
, TransferJob
, transferJob
, tjCreationTime
, tjStatus
, tjSchedule
, tjDeletionTime
, tjName
, tjProjectId
, tjTransferSpec
, tjDescription
, tjLastModificationTime
-- ** GcsData
, GcsData
, gcsData
, gdBucketName
-- ** AwsS3Data
, AwsS3Data
, awsS3Data
, asdBucketName
, asdAwsAccessKey
-- ** HTTPData
, HTTPData
, hTTPData
, httpdListURL
-- ** TimeOfDay'
, TimeOfDay'
, timeOfDay
, todNanos
, todHours
, todMinutes
, todSeconds
-- ** ErrorLogEntry
, ErrorLogEntry
, errorLogEntry
, eleURL
, eleErrorDetails
-- ** OperationMetadata
, OperationMetadata
, operationMetadata
, omAddtional
-- ** TransferOptions
, TransferOptions
, transferOptions
, toDeleteObjectsUniqueInSink
, toDeleteObjectsFromSourceAfterTransfer
, toOverwriteObjectsAlreadyExistingInSink
-- ** TransferOperation
, TransferOperation
, transferOperation
, toStatus
, toCounters
, toStartTime
, toTransferJobName
, toName
, toEndTime
, toProjectId
, toTransferSpec
, toErrorBreakdowns
-- ** TransferSpec
, TransferSpec
, transferSpec
, tsGcsDataSource
, tsObjectConditions
, tsHTTPDataSource
, tsAwsS3DataSource
, tsGcsDataSink
, tsTransferOptions
-- ** ListTransferJobsResponse
, ListTransferJobsResponse
, listTransferJobsResponse
, ltjrNextPageToken
, ltjrTransferJobs
-- ** OperationResponse
, OperationResponse
, operationResponse
, orAddtional
-- ** ResumeTransferOperationRequest
, ResumeTransferOperationRequest
, resumeTransferOperationRequest
-- ** AwsAccessKey
, AwsAccessKey
, awsAccessKey
, aakSecretAccessKey
, aakAccessKeyId
) where
import Network.Google.Prelude
import Network.Google.Resource.StorageTransfer.GetGoogleServiceAccount
import Network.Google.Resource.StorageTransfer.GoogleServiceAccounts.Get
import Network.Google.Resource.StorageTransfer.TransferJobs.Create
import Network.Google.Resource.StorageTransfer.TransferJobs.Get
import Network.Google.Resource.StorageTransfer.TransferJobs.List
import Network.Google.Resource.StorageTransfer.TransferJobs.Patch
import Network.Google.Resource.StorageTransfer.TransferOperations.Cancel
import Network.Google.Resource.StorageTransfer.TransferOperations.Delete
import Network.Google.Resource.StorageTransfer.TransferOperations.Get
import Network.Google.Resource.StorageTransfer.TransferOperations.List
import Network.Google.Resource.StorageTransfer.TransferOperations.Pause
import Network.Google.Resource.StorageTransfer.TransferOperations.Resume
import Network.Google.StorageTransfer.Types
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Google Storage Transfer API service.
type StorageTransferAPI =
TransferJobsListResource :<|>
TransferJobsPatchResource
:<|> TransferJobsGetResource
:<|> TransferJobsCreateResource
:<|> GetGoogleServiceAccountResource
:<|> TransferOperationsListResource
:<|> TransferOperationsGetResource
:<|> TransferOperationsPauseResource
:<|> TransferOperationsCancelResource
:<|> TransferOperationsDeleteResource
:<|> TransferOperationsResumeResource
:<|> GoogleServiceAccountsGetResource
| rueshyna/gogol | gogol-storage-transfer/gen/Network/Google/StorageTransfer.hs | mpl-2.0 | 8,047 | 0 | 15 | 1,735 | 793 | 570 | 223 | 190 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BinaryAuthorization.Projects.Attestors.ValidateAttestationOccurrence
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns whether the given Attestation for the given image URI was signed
-- by the given Attestor
--
-- /See:/ <https://cloud.google.com/binary-authorization/ Binary Authorization API Reference> for @binaryauthorization.projects.attestors.validateAttestationOccurrence@.
module Network.Google.Resource.BinaryAuthorization.Projects.Attestors.ValidateAttestationOccurrence
(
-- * REST Resource
ProjectsAttestorsValidateAttestationOccurrenceResource
-- * Creating a Request
, projectsAttestorsValidateAttestationOccurrence
, ProjectsAttestorsValidateAttestationOccurrence
-- * Request Lenses
, pavaoXgafv
, pavaoUploadProtocol
, pavaoAccessToken
, pavaoUploadType
, pavaoPayload
, pavaoAttestor
, pavaoCallback
) where
import Network.Google.BinaryAuthorization.Types
import Network.Google.Prelude
-- | A resource alias for @binaryauthorization.projects.attestors.validateAttestationOccurrence@ method which the
-- 'ProjectsAttestorsValidateAttestationOccurrence' request conforms to.
type ProjectsAttestorsValidateAttestationOccurrenceResource
=
"v1" :>
CaptureMode "attestor"
"validateAttestationOccurrence"
Text
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ValidateAttestationOccurrenceRequest
:> Post '[JSON] ValidateAttestationOccurrenceResponse
-- | Returns whether the given Attestation for the given image URI was signed
-- by the given Attestor
--
-- /See:/ 'projectsAttestorsValidateAttestationOccurrence' smart constructor.
data ProjectsAttestorsValidateAttestationOccurrence =
ProjectsAttestorsValidateAttestationOccurrence'
{ _pavaoXgafv :: !(Maybe Xgafv)
, _pavaoUploadProtocol :: !(Maybe Text)
, _pavaoAccessToken :: !(Maybe Text)
, _pavaoUploadType :: !(Maybe Text)
, _pavaoPayload :: !ValidateAttestationOccurrenceRequest
, _pavaoAttestor :: !Text
, _pavaoCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsAttestorsValidateAttestationOccurrence' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pavaoXgafv'
--
-- * 'pavaoUploadProtocol'
--
-- * 'pavaoAccessToken'
--
-- * 'pavaoUploadType'
--
-- * 'pavaoPayload'
--
-- * 'pavaoAttestor'
--
-- * 'pavaoCallback'
projectsAttestorsValidateAttestationOccurrence
:: ValidateAttestationOccurrenceRequest -- ^ 'pavaoPayload'
-> Text -- ^ 'pavaoAttestor'
-> ProjectsAttestorsValidateAttestationOccurrence
projectsAttestorsValidateAttestationOccurrence pPavaoPayload_ pPavaoAttestor_ =
ProjectsAttestorsValidateAttestationOccurrence'
{ _pavaoXgafv = Nothing
, _pavaoUploadProtocol = Nothing
, _pavaoAccessToken = Nothing
, _pavaoUploadType = Nothing
, _pavaoPayload = pPavaoPayload_
, _pavaoAttestor = pPavaoAttestor_
, _pavaoCallback = Nothing
}
-- | V1 error format.
pavaoXgafv :: Lens' ProjectsAttestorsValidateAttestationOccurrence (Maybe Xgafv)
pavaoXgafv
= lens _pavaoXgafv (\ s a -> s{_pavaoXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pavaoUploadProtocol :: Lens' ProjectsAttestorsValidateAttestationOccurrence (Maybe Text)
pavaoUploadProtocol
= lens _pavaoUploadProtocol
(\ s a -> s{_pavaoUploadProtocol = a})
-- | OAuth access token.
pavaoAccessToken :: Lens' ProjectsAttestorsValidateAttestationOccurrence (Maybe Text)
pavaoAccessToken
= lens _pavaoAccessToken
(\ s a -> s{_pavaoAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pavaoUploadType :: Lens' ProjectsAttestorsValidateAttestationOccurrence (Maybe Text)
pavaoUploadType
= lens _pavaoUploadType
(\ s a -> s{_pavaoUploadType = a})
-- | Multipart request metadata.
pavaoPayload :: Lens' ProjectsAttestorsValidateAttestationOccurrence ValidateAttestationOccurrenceRequest
pavaoPayload
= lens _pavaoPayload (\ s a -> s{_pavaoPayload = a})
-- | Required. The resource name of the Attestor of the occurrence, in the
-- format \`projects\/*\/attestors\/*\`.
pavaoAttestor :: Lens' ProjectsAttestorsValidateAttestationOccurrence Text
pavaoAttestor
= lens _pavaoAttestor
(\ s a -> s{_pavaoAttestor = a})
-- | JSONP
pavaoCallback :: Lens' ProjectsAttestorsValidateAttestationOccurrence (Maybe Text)
pavaoCallback
= lens _pavaoCallback
(\ s a -> s{_pavaoCallback = a})
instance GoogleRequest
ProjectsAttestorsValidateAttestationOccurrence
where
type Rs
ProjectsAttestorsValidateAttestationOccurrence
= ValidateAttestationOccurrenceResponse
type Scopes
ProjectsAttestorsValidateAttestationOccurrence
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsAttestorsValidateAttestationOccurrence'{..}
= go _pavaoAttestor _pavaoXgafv _pavaoUploadProtocol
_pavaoAccessToken
_pavaoUploadType
_pavaoCallback
(Just AltJSON)
_pavaoPayload
binaryAuthorizationService
where go
= buildClient
(Proxy ::
Proxy
ProjectsAttestorsValidateAttestationOccurrenceResource)
mempty
| brendanhay/gogol | gogol-binaryauthorization/gen/Network/Google/Resource/BinaryAuthorization/Projects/Attestors/ValidateAttestationOccurrence.hs | mpl-2.0 | 6,488 | 0 | 16 | 1,352 | 780 | 456 | 324 | 126 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.FloodlightActivities.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets one floodlight activity by ID.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.floodlightActivities.get@.
module Network.Google.Resource.DFAReporting.FloodlightActivities.Get
(
-- * REST Resource
FloodlightActivitiesGetResource
-- * Creating a Request
, floodlightActivitiesGet
, FloodlightActivitiesGet
-- * Request Lenses
, fXgafv
, fUploadProtocol
, fAccessToken
, fUploadType
, fProFileId
, fId
, fCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.floodlightActivities.get@ method which the
-- 'FloodlightActivitiesGet' request conforms to.
type FloodlightActivitiesGetResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"floodlightActivities" :>
Capture "id" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] FloodlightActivity
-- | Gets one floodlight activity by ID.
--
-- /See:/ 'floodlightActivitiesGet' smart constructor.
data FloodlightActivitiesGet =
FloodlightActivitiesGet'
{ _fXgafv :: !(Maybe Xgafv)
, _fUploadProtocol :: !(Maybe Text)
, _fAccessToken :: !(Maybe Text)
, _fUploadType :: !(Maybe Text)
, _fProFileId :: !(Textual Int64)
, _fId :: !(Textual Int64)
, _fCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'FloodlightActivitiesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fXgafv'
--
-- * 'fUploadProtocol'
--
-- * 'fAccessToken'
--
-- * 'fUploadType'
--
-- * 'fProFileId'
--
-- * 'fId'
--
-- * 'fCallback'
floodlightActivitiesGet
:: Int64 -- ^ 'fProFileId'
-> Int64 -- ^ 'fId'
-> FloodlightActivitiesGet
floodlightActivitiesGet pFProFileId_ pFId_ =
FloodlightActivitiesGet'
{ _fXgafv = Nothing
, _fUploadProtocol = Nothing
, _fAccessToken = Nothing
, _fUploadType = Nothing
, _fProFileId = _Coerce # pFProFileId_
, _fId = _Coerce # pFId_
, _fCallback = Nothing
}
-- | V1 error format.
fXgafv :: Lens' FloodlightActivitiesGet (Maybe Xgafv)
fXgafv = lens _fXgafv (\ s a -> s{_fXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
fUploadProtocol :: Lens' FloodlightActivitiesGet (Maybe Text)
fUploadProtocol
= lens _fUploadProtocol
(\ s a -> s{_fUploadProtocol = a})
-- | OAuth access token.
fAccessToken :: Lens' FloodlightActivitiesGet (Maybe Text)
fAccessToken
= lens _fAccessToken (\ s a -> s{_fAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
fUploadType :: Lens' FloodlightActivitiesGet (Maybe Text)
fUploadType
= lens _fUploadType (\ s a -> s{_fUploadType = a})
-- | User profile ID associated with this request.
fProFileId :: Lens' FloodlightActivitiesGet Int64
fProFileId
= lens _fProFileId (\ s a -> s{_fProFileId = a}) .
_Coerce
-- | Floodlight activity ID.
fId :: Lens' FloodlightActivitiesGet Int64
fId = lens _fId (\ s a -> s{_fId = a}) . _Coerce
-- | JSONP
fCallback :: Lens' FloodlightActivitiesGet (Maybe Text)
fCallback
= lens _fCallback (\ s a -> s{_fCallback = a})
instance GoogleRequest FloodlightActivitiesGet where
type Rs FloodlightActivitiesGet = FloodlightActivity
type Scopes FloodlightActivitiesGet =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient FloodlightActivitiesGet'{..}
= go _fProFileId _fId _fXgafv _fUploadProtocol
_fAccessToken
_fUploadType
_fCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy FloodlightActivitiesGetResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/FloodlightActivities/Get.hs | mpl-2.0 | 5,101 | 0 | 19 | 1,228 | 821 | 474 | 347 | 113 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Support.AddCommunicationToCase
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds additional customer communication to an AWS Support case. You use the 'CaseId' value to identify the case to add communication to. You can list a set of
-- email addresses to copy on the communication using the 'CcEmailAddresses'
-- value. The 'CommunicationBody' value contains the text of the communication.
--
-- The response indicates the success or failure of the request.
--
-- This operation implements a subset of the features of the AWS Support Center.
--
-- <http://docs.aws.amazon.com/awssupport/latest/APIReference/API_AddCommunicationToCase.html>
module Network.AWS.Support.AddCommunicationToCase
(
-- * Request
AddCommunicationToCase
-- ** Request constructor
, addCommunicationToCase
-- ** Request lenses
, actcAttachmentSetId
, actcCaseId
, actcCcEmailAddresses
, actcCommunicationBody
-- * Response
, AddCommunicationToCaseResponse
-- ** Response constructor
, addCommunicationToCaseResponse
-- ** Response lenses
, actcrResult
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Support.Types
import qualified GHC.Exts
data AddCommunicationToCase = AddCommunicationToCase
{ _actcAttachmentSetId :: Maybe Text
, _actcCaseId :: Maybe Text
, _actcCcEmailAddresses :: List "ccEmailAddresses" Text
, _actcCommunicationBody :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'AddCommunicationToCase' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'actcAttachmentSetId' @::@ 'Maybe' 'Text'
--
-- * 'actcCaseId' @::@ 'Maybe' 'Text'
--
-- * 'actcCcEmailAddresses' @::@ ['Text']
--
-- * 'actcCommunicationBody' @::@ 'Text'
--
addCommunicationToCase :: Text -- ^ 'actcCommunicationBody'
-> AddCommunicationToCase
addCommunicationToCase p1 = AddCommunicationToCase
{ _actcCommunicationBody = p1
, _actcCaseId = Nothing
, _actcCcEmailAddresses = mempty
, _actcAttachmentSetId = Nothing
}
-- | The ID of a set of one or more attachments for the communication to add to
-- the case. Create the set by calling 'AddAttachmentsToSet'
actcAttachmentSetId :: Lens' AddCommunicationToCase (Maybe Text)
actcAttachmentSetId =
lens _actcAttachmentSetId (\s a -> s { _actcAttachmentSetId = a })
-- | The AWS Support case ID requested or returned in the call. The case ID is an
-- alphanumeric string formatted as shown in this example: case-/12345678910-2013-c4c1d2bf33c5cf47/
actcCaseId :: Lens' AddCommunicationToCase (Maybe Text)
actcCaseId = lens _actcCaseId (\s a -> s { _actcCaseId = a })
-- | The email addresses in the CC line of an email to be added to the support
-- case.
actcCcEmailAddresses :: Lens' AddCommunicationToCase [Text]
actcCcEmailAddresses =
lens _actcCcEmailAddresses (\s a -> s { _actcCcEmailAddresses = a })
. _List
-- | The body of an email communication to add to the support case.
actcCommunicationBody :: Lens' AddCommunicationToCase Text
actcCommunicationBody =
lens _actcCommunicationBody (\s a -> s { _actcCommunicationBody = a })
newtype AddCommunicationToCaseResponse = AddCommunicationToCaseResponse
{ _actcrResult :: Maybe Bool
} deriving (Eq, Ord, Read, Show)
-- | 'AddCommunicationToCaseResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'actcrResult' @::@ 'Maybe' 'Bool'
--
addCommunicationToCaseResponse :: AddCommunicationToCaseResponse
addCommunicationToCaseResponse = AddCommunicationToCaseResponse
{ _actcrResult = Nothing
}
-- | True if 'AddCommunicationToCase' succeeds. Otherwise, returns an error.
actcrResult :: Lens' AddCommunicationToCaseResponse (Maybe Bool)
actcrResult = lens _actcrResult (\s a -> s { _actcrResult = a })
instance ToPath AddCommunicationToCase where
toPath = const "/"
instance ToQuery AddCommunicationToCase where
toQuery = const mempty
instance ToHeaders AddCommunicationToCase
instance ToJSON AddCommunicationToCase where
toJSON AddCommunicationToCase{..} = object
[ "caseId" .= _actcCaseId
, "communicationBody" .= _actcCommunicationBody
, "ccEmailAddresses" .= _actcCcEmailAddresses
, "attachmentSetId" .= _actcAttachmentSetId
]
instance AWSRequest AddCommunicationToCase where
type Sv AddCommunicationToCase = Support
type Rs AddCommunicationToCase = AddCommunicationToCaseResponse
request = post "AddCommunicationToCase"
response = jsonResponse
instance FromJSON AddCommunicationToCaseResponse where
parseJSON = withObject "AddCommunicationToCaseResponse" $ \o -> AddCommunicationToCaseResponse
<$> o .:? "result"
| dysinger/amazonka | amazonka-support/gen/Network/AWS/Support/AddCommunicationToCase.hs | mpl-2.0 | 5,757 | 0 | 10 | 1,152 | 664 | 401 | 263 | 77 | 1 |
module Gigasecond (fromDay) where
import Data.Time.Calendar (Day, addDays)
fromDay :: Day -> Day
fromDay = addDays gigasecInDays
where gigasecInDays = floor (1e9 / 86400 :: Float)
| mscoutermarsh/exercism_coveralls | assignments/haskell/gigasecond/example.hs | agpl-3.0 | 182 | 0 | 9 | 28 | 60 | 35 | 25 | 5 | 1 |
-- | Minimize the Rosenbrock function (plus a trivial constraint) using
-- the View-based NLP interface.
-- Unfortunately, at the moment there only types here are (JV ) compound types
-- so the use of Views aren't fully illustrated.
-- todo: comment up the multiple shooting code as an example
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
module Main where
import GHC.Generics ( Generic, Generic1 )
import Data.Vector ( Vector )
import qualified Data.Vector as V
import Casadi.MX ( MX )
import Dyno.View.M ( vcat, vsplit )
import Dyno.Vectorize
import Dyno.View.View
import Dyno.Nlp
import Dyno.NlpUtils
import Dyno.Solvers
data X a = X a a deriving (Functor, Generic, Generic1, Show)
data G a = G a deriving (Functor, Generic, Generic1, Show)
instance Applicative X where {pure = vpure; (<*>) = vapply}
instance Applicative G where {pure = vpure; (<*>) = vapply}
instance Vectorize X
instance Vectorize G
myNlp :: Nlp (JV X) (JV None) (JV G) MX
myNlp = Nlp { nlpFG = fg
, nlpIn =
NlpIn
{ nlpBX = bx
, nlpBG = bg
, nlpX0 = x0
, nlpP = catJV None
, nlpLamX0 = Nothing
, nlpLamG0 = Nothing
}
, nlpScaleF = Nothing
, nlpScaleX = Nothing
, nlpScaleG = Nothing
}
where
x0 :: J (JV X) (V.Vector Double)
x0 = catJV $ X (-8) (-8)
bx :: J (JV X) (Vector Bounds)
bx = catJV $
X (Just (-21), Just 0.5)
(Just (-2), Just 2)
bg :: J (JV G) (Vector Bounds)
bg = catJV $ G (Just (-10), Just 10)
fg :: J (JV X) MX -> J (JV None) MX -> (S MX, J (JV G) MX)
fg xy _ = (f, vcat g)
where
f = (1-x)**2 + 100*(y - x**2)**2
g = G x
X x y = vsplit xy
main :: IO ()
main = do
opt <- solveNlp "basic_nlp" ipoptSolver myNlp Nothing
print opt
| ghorn/dynobud | dynobud/examples/BasicNlp.hs | lgpl-3.0 | 1,930 | 0 | 14 | 595 | 668 | 371 | 297 | 50 | 1 |
{-# LANGUAGE TypeFamilies #-}
-- | define a map from Node -> View, to keep track of what's in view.
-- not much in here, but I expect it'll expand in the future.
module Jaek.UI.Views (
ViewMap
,View (..)
,ViewChange (..)
,Zoom (..)
,isWaveView
,iMap
,mapFromTree
,getView
,updateMap
,slideX
,slideY
,zoom
,px2sampleCount
,sampleCount2px
)
where
import Jaek.Base
import Jaek.StreamExpr (getDur)
import Jaek.Tree
import qualified Data.HashMap.Strict as M
import Data.List (foldl')
import Data.Maybe
import Data.Tree
import Data.VectorSpace
import Control.Arrow ((***))
type ViewMap = M.HashMap TreePath View
data ViewChange =
NewDoc
| AddSrc
| MdNode
| ModView View
deriving (Eq, Show)
-- | Information about what's currently in view...
data View =
FullView !Double !Double !Double !Double -- ^ xSize, ySize, xOff, yOff
| WaveView !SampleCount !Duration -- ^ streamOff, streamDur
deriving (Eq, Show)
isWaveView :: View -> Bool
isWaveView (WaveView _ _) = True
isWaveView _ = False
updateMap :: TreeZip -> ViewChange -> ViewMap -> ViewMap
updateMap _z NewDoc _m = iMap
updateMap zp AddSrc mp = addWaveNode zp mp
updateMap zp MdNode mp = addWaveNode zp mp
updateMap zp (ModView view) mp = changeWaveNode zp view mp
iMap :: ViewMap
iMap = mapFromTree $ fromZipper iZip
-- | Return either the view for a node, or the default view
getView :: ViewMap -> HTree -> View
getView m (Node node _) = fromMaybe def $ M.lookup (nodePath node) m
where
def
| node == Root = FullView 1 1 0 0
| otherwise = WaveView 0 $ foldl' max 0 . map getDur $ getExprs' node
-- | Create a new map with a default view for everything in the tree.
mapFromTree :: HTree -> ViewMap
mapFromTree = M.fromList . map uf . flatten
where
uf Root = ([], FullView 1 1 0 0)
uf node = (nodePath node,
WaveView 0 $ foldl' max 0 . map getDur $ getExprs' node)
addWaveNode :: TreeZip -> ViewMap -> ViewMap
addWaveNode tz mp = M.insert (liftT nodePath cur) (WaveView 0 srcdur) mp
where
cur = hole tz
srcdur = foldl' max 0 . map getDur $ getExprs cur
changeWaveNode :: TreeZip -> View -> ViewMap -> ViewMap
changeWaveNode tz v = M.adjust (const v) (liftT nodePath $ hole tz)
slideX :: Double -> View -> View
slideX dist (FullView xs ys xOff yOff) = FullView xs ys (xOff+dist) yOff
slideX dist (WaveView off dur) = WaveView nOff dur
where
nOff = off + round (dist * fI dur)
slideY :: Double -> View -> View
slideY dist (FullView xs ys xOff yOff) = FullView xs ys xOff (yOff+dist)
slideY _ waveView = waveView
-- | Zoom factor.
-- 0 < z < 1 -> zoom in
-- z == 1 -> unchanged
-- z > 1 -> zoom out
data Zoom = Zoom Double deriving (Eq, Show, Ord)
zoom :: Zoom -> View -> View
zoom (Zoom zf) (FullView xs ys xOff yOff) =
let ((xOff', yOff'), (xs', ys')) = zoom' zf (xOff, yOff) (xs,ys)
in FullView xs' ys' xOff' yOff'
zoom (Zoom zf) (WaveView off dur) =
let (off', dur') = (round *** round) $ zoom' zf (fI off :: Double) (fI dur)
in WaveView off' dur'
zoom' :: (VectorSpace v, Scalar v ~ Double) => Scalar v -> v -> v -> (v,v)
zoom' zf p0 d0 =
let d1 = zf *^ d0
p1 = p0 ^+^ (0.5 * (1-zf)) *^ d0
in (p1,d1)
px2sampleCount :: (Int, Int) -> View -> Double -> SampleCount
px2sampleCount (winX, _winY) (WaveView off dur) x =
off + round (fI dur * (x / fI winX))
px2sampleCount (winX, _winY) (FullView xs _ xOff _) x =
round $ xOff + (xs-xOff) * (x / fI winX)
sampleCount2px :: (Int, Int) -> View -> SampleCount -> Double
sampleCount2px (winX, _winY) (WaveView off dur) x =
fI winX * (fI (x - off) / fI dur)
sampleCount2px (winX, _winY) (FullView xs _ xOff _) x =
fI winX * ((fI x - xOff) / (xs-xOff))
| JohnLato/jaek | src/Jaek/UI/Views.hs | lgpl-3.0 | 3,820 | 0 | 14 | 942 | 1,444 | 759 | 685 | 104 | 2 |
--
-- テスト方法
--
-- ghci> :load Test.hs
-- [2 of 2] Compiling Test ( Test.hs, interpreted )
-- Ok, modules loaded: Test, Hangman.
-- ghci> mapM_ runTestTT test_mostFreqPatternByLetter
module Test
(
test_mostFreqPatternByLetter
, test_patternByLetter
, test_replaceAt
, test_replaceAtByPattern
, test_reduceByPattern
, test_mathchesPattern
, test_cheatHangMan
, test_local
, test_local2
) where
import Test.HUnit
import Hangman
import Data.List( group, sort, maximumBy)
test_cheatHangMan =
[
TestCase(assertEqual "cheatHangMan"
(["jujus", "justs", "jutty", "rusts", "rusty", "rutty", "strut", "sturt", "truss", "trust", "tryst", "tutty", "tutus", "usury", "wurst", "xysts", "yurts"], "*****")
(cheatHangMan ["jujus", "justs", "jutty", "rusts", "rusty", "rutty", "strut", "sturt", "truss", "trust", "tryst", "tutty", "tutus", "usury", "wurst", "xysts", "yurts"] "*****" 'q'))
, TestCase(assertEqual "cheatHangMan"
True
(let wordList' = removeWordsOfWrongLength (length "*****") ["jujus", "justs", "jutty", "rusts", "rusty", "rutty", "strut", "sturt", "truss", "trust", "tryst", "tutty", "tutus", "usury", "wurst", "xysts", "yurts"]
(maxPattern, patternCount) = mostFreqPatternByLetter wordList' 'q'
in (countWordWithoutLetter wordList' 'q') > patternCount))
, TestCase(assertEqual "cheatHangMan"
(length ["jujus", "justs", "jutty", "rusts", "rusty", "rutty", "strut", "sturt", "truss", "trust", "tryst", "tutty", "tutus", "usury", "wurst", "xysts", "yurts"])
(countWordWithoutLetter ["jujus", "justs", "jutty", "rusts", "rusty", "rutty", "strut", "sturt", "truss", "trust", "tryst", "tutty", "tutus", "usury", "wurst", "xysts", "yurts"] 'q'))
]
test_mathchesPattern =
[
TestCase(assertEqual "matchesPattern"
False
(matchesPattern "abcaafg" ('a',[0])))
]
test_reduceByPattern =
[
TestCase(assertEqual "reduceByPattern"
["abcdefg", "abcdefg"]
(reduceByPattern ('a',[0, 3]) [
"abcaefg"
, "abcdefg"
, "abcaefg"
, "abcdefg"]))
, TestCase(assertEqual "reduceByPattern"
["abcaefg", "abcaafg"]
(reduceByPattern ('a',[0]) [
"abcaefg"
, "abcdefg"
, "abcaafg"
, "abcdef@"]))
]
test_replaceAtByPattern =
[
TestCase(assertEqual "replaceAtByPattern"
"@b@defg"
(replaceAtByPattern "abcdefg" ('@',[0, 2])))
, TestCase(assertEqual "replaceAtByPattern"
"@bcdefg"
(replaceAtByPattern "abcdefg" ('@', [0])))
]
test_replaceAt =
[
TestCase(assertEqual "replaceAt"
"ab@defg"
(replaceAt "abcdefg" '@' 2))
, TestCase(assertEqual "replaceAt"
"@bcdefg"
(replaceAt "abcdefg" '@' 0))
]
compfunc = (\l r -> compare (length l) (length r))
maxPatterns [] = []
maxPatterns xs = maximumBy compfunc $ (group . sort) xs {-[ [0,3],[0,3],[0,3],[0,3],.. -}
test_local =
[
TestCase(assertEqual "local"
[[0],[0]]
(maxPatterns $ patternByLetter 'a' [ "acceb" , "accdb" , "bccab", "b" ] ))
]
test_local2 =
[
TestCase(assertEqual "local"
2
(length $ maxPatterns $ patternByLetter 'a' [ "acceb" , "accdb" , "bccab", "b" ] ))
]
test_patternByLetter =
[
TestCase(assertEqual "patternByLetter"
[[0],[0],[3],[]]
(patternByLetter 'a' [ "acceb" , "accdb" , "bccab", "b" ]))
, TestCase(assertEqual "patternByLetter"
[[0, 3],[0, 3],[3],[]]
(patternByLetter 'a' [ "accab" , "accab" , "bccab" , "b"]))
]
test_mostFreqPatternByLetter =
[
TestCase(assertEqual "mostFreqPatternByLetter"
([0], 2)
(mostFreqPatternByLetter [ "acceb" , "accdb" , "bccab", "b" ] 'a'))
, TestCase(assertEqual "mostFreqPatternByLetter"
([0, 3], 2)
(mostFreqPatternByLetter [ "accab" , "accab" , "bccab" , "b"] 'a'))
, TestCase(assertEqual "mostFreqPatternByLetter"
([], 0)
(mostFreqPatternByLetter ["jujus", "justs", "jutty", "rusts", "rusty", "rutty", "strut", "sturt", "truss", "trust", "tryst", "tutty", "tutus", "usury", "wurst", "xysts", "yurts"] 'q'))
]
| yamanobori-old/CheatHangman | Test.hs | unlicense | 4,528 | 0 | 16 | 1,268 | 1,221 | 721 | 500 | 96 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Language.Gamma.Parser where
import Control.Applicative
import Control.Lens
import Data.Proxy
import qualified Data.HashSet as HashSet
import Text.Trifecta
import Text.Parser.Token.Style
import Language.Gamma.Types
import Language.Gamma.Parser.Commented
data GammaStyle
type GammaParser a = Commented GammaStyle Parser a
instance CommentedStyle (GammaStyle) where
commentedStyle _ = javaCommentStyle & commentNesting .~ True
ids = emptyIdents & styleReserved .~ HashSet.fromList ["let", "return", "cint", "()"]
ops = emptyOps & styleReserved .~ HashSet.fromList ["=", ":"]
pSym = ident ids
pDecl :: GammaParser (GammaDecl () ())
pDecl = (reserve ids "let" *> decltype) <*> (reserve ops "=" *> pExpr <* symbol ";")
where decltype = try (FunDecl () <$> pSym <*> parens (commaSep pBind) <*> option Nothing (Just <$> (reserve ops ":" *> pType)))
<|> VarDecl () <$> pBind
pBind = TypeBind () <$> try (pSym <* reserve ops ":") <*> pType
<|> PlainBind () <$> pSym
pStmt = DeclStmt () <$> pDecl
<|> ExprStmt () <$> (pExpr <* symbol ";")
pType = PrimType () <$> pPrimType
pPrimType = CInt <$ reserve ids "cint"
<|> Unit <$ reserve ids "()"
pExpr = try (ApplyExpr () <$> pExprAtom <*> parens (commaSep pExpr))
<|> pExprAtom
pExprAtom = LitExpr () <$> pLit
<|> SymExpr () <$> pSym
<|> parens pExpr
<|> braces ( CompoundExpr () <$> many (try pStmt) <*> optionalRet)
where optionalRet = maybe (LitExpr () UnitLit) id <$> optional pExpr
pLit = IntLit <$> integer'
<|> UnitLit <$ reserve ids "()"
| agrif/gammac | src/Language/Gamma/Parser.hs | apache-2.0 | 1,637 | 0 | 17 | 338 | 576 | 297 | 279 | -1 | -1 |
module Foundation where
import Database.Persist.Sql (ConnectionPool, runSqlPool)
import Import.NoFoundation
import Text.Hamlet (hamletFile)
-- Used only when in "auth-dummy-login" setting is enabled.
import Yesod.Auth.Dummy
import Yesod.Auth.OpenId (authOpenId, IdentifierType (Claimed))
import qualified Yesod.Core.Unsafe as Unsafe
import Yesod.Core.Types (Logger)
import Yesod.Default.Util (addStaticContentExternal)
import Yesod.Fay
import qualified Data.CaseInsensitive as CI
import qualified Data.Text.Encoding as TE
-- | The foundation datatype for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ appSettings :: AppSettings
, appStatic :: Static -- ^ Settings for static file serving.
, appConnPool :: ConnectionPool -- ^ Database connection pool.
, appHttpManager :: Manager
, appLogger :: Logger
, appFayCommandHandler :: CommandHandler App
}
data MenuItem = MenuItem
{ menuItemLabel :: Text
, menuItemRoute :: Route App
, menuItemAccessCallback :: Bool
}
data MenuTypes
= NavbarLeft MenuItem
| NavbarRight MenuItem
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the following documentation
-- for an explanation for this split:
-- http://www.yesodweb.com/book/scaffolding-and-the-site-template#scaffolding-and-the-site-template_foundation_and_application_modules
--
-- This function also generates the following type synonyms:
-- type Handler = HandlerT App IO
-- type Widget = WidgetT App IO ()
mkYesodData "App" $(parseRoutesFile "config/routes")
-- | A convenient synonym for creating forms.
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
-- Controls the base of generated URLs. For more information on modifying,
-- see: https://github.com/yesodweb/yesod/wiki/Overriding-approot
approot = ApprootRequest $ \app req ->
case appRoot $ appSettings app of
Nothing -> getApprootText guessApproot app req
Just root -> root
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = Just <$> defaultClientSessionBackend
120 -- timeout in minutes
"config/client_session_key.aes"
-- Yesod Middleware allows you to run code before and after each handler function.
-- The defaultYesodMiddleware adds the response header "Vary: Accept, Accept-Language" and performs authorization checks.
-- Some users may also want to add the defaultCsrfMiddleware, which:
-- a) Sets a cookie with a CSRF token in it.
-- b) Validates that incoming write requests include that token in either a header or POST parameter.
-- To add it, chain it together with the defaultMiddleware: yesodMiddleware = defaultYesodMiddleware . defaultCsrfMiddleware
-- For details, see the CSRF documentation in the Yesod.Core.Handler module of the yesod-core package.
yesodMiddleware = defaultYesodMiddleware
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
muser <- maybeAuthPair
mcurrentRoute <- getCurrentRoute
-- Get the breadcrumbs, as defined in the YesodBreadcrumbs instance.
(title, parents) <- breadcrumbs
-- Define the menu items of the header.
let menuItems =
[ NavbarLeft $ MenuItem
{ menuItemLabel = "Home"
, menuItemRoute = HomeR
, menuItemAccessCallback = True
}
, NavbarLeft $ MenuItem
{ menuItemLabel = "Contact"
, menuItemRoute = ContactR
, menuItemAccessCallback = True
}
, NavbarLeft $ MenuItem
{ menuItemLabel = "Profile"
, menuItemRoute = ProfileR
, menuItemAccessCallback = isJust muser
}
, NavbarRight $ MenuItem
{ menuItemLabel = "Login"
, menuItemRoute = AuthR LoginR
, menuItemAccessCallback = isNothing muser
}
, NavbarRight $ MenuItem
{ menuItemLabel = "Logout"
, menuItemRoute = AuthR LogoutR
, menuItemAccessCallback = isJust muser
}
]
let navbarLeftMenuItems = [x | NavbarLeft x <- menuItems]
let navbarRightMenuItems = [x | NavbarRight x <- menuItems]
let navbarLeftFilteredMenuItems = [x | x <- navbarLeftMenuItems, menuItemAccessCallback x]
let navbarRightFilteredMenuItems = [x | x <- navbarRightMenuItems, menuItemAccessCallback x]
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
addStylesheet $ StaticR css_bootstrap_css
$(widgetFile "default-layout")
withUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- Routes not requiring authentication.
isAuthorized (AuthR _) _ = return Authorized
isAuthorized HomeR _ = return Authorized
isAuthorized ContactR _ = return Authorized
isAuthorized FaviconR _ = return Authorized
isAuthorized RobotsR _ = return Authorized
isAuthorized (StaticR _) _ = return Authorized
isAuthorized (FaySiteR _) _ = return Authorized
isAuthorized ProfileR _ = isAuthenticated
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent ext mime content = do
master <- getYesod
let staticDir = appStaticDir $ appSettings master
addStaticContentExternal
Right
genFileName
staticDir
(StaticR . flip StaticRoute [])
ext
mime
content
where
-- Generate a unique filename based on the content itself
genFileName lbs = "autogen-" ++ base64md5 lbs
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog app _source level =
appShouldLogAll (appSettings app)
|| level == LevelWarn
|| level == LevelError
makeLogger = return . appLogger
-- Provide proper Bootstrap styling for default displays, like
-- error pages
defaultMessageWidget title body = $(widgetFile "default-message-widget")
instance YesodJquery App
instance YesodFay App where
fayRoute = FaySiteR
yesodFayCommand render command = do
master <- getYesod
appFayCommandHandler master render command
-- Define breadcrumbs.
instance YesodBreadcrumbs App where
breadcrumb HomeR = return ("Home", Nothing)
breadcrumb (AuthR _) = return ("Login", Just HomeR)
breadcrumb ProfileR = return ("Profile", Just HomeR)
breadcrumb _ = return ("home", Nothing)
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlBackend
runDB action = do
master <- getYesod
runSqlPool action $ appConnPool master
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner appConnPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
-- Override the above two destinations when a Referer: header is present
redirectToReferer _ = True
authenticate creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Authenticated uid
Nothing -> Authenticated <$> insert User
{ userIdent = credsIdent creds
, userPassword = Nothing
}
-- You can add other plugins like Google Email, email or OAuth here
authPlugins app = [authOpenId Claimed []] ++ extraAuthPlugins
-- Enable authDummy login if enabled.
where extraAuthPlugins = [authDummy | appAuthDummyLogin $ appSettings app]
authHttpManager = getHttpManager
-- | Access function to determine if a user is logged in.
isAuthenticated :: Handler AuthResult
isAuthenticated = do
muid <- maybeAuthId
return $ case muid of
Nothing -> Unauthorized "You must login to access this page"
Just _ -> Authorized
instance YesodAuthPersist App
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- Useful when writing code that is re-usable outside of the Handler context.
-- An example is background jobs that send email.
-- This can also be useful for writing code that works across multiple Yesod applications.
instance HasHttpManager App where
getHttpManager = appHttpManager
unsafeHandler :: App -> Handler a -> IO a
unsafeHandler = Unsafe.fakeHandlerGetLogger appLogger
-- Note: Some functionality previously present in the scaffolding has been
-- moved to documentation in the Wiki. Following are some hopefully helpful
-- links:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
-- https://github.com/yesodweb/yesod/wiki/Serve-static-files-from-a-separate-domain
-- https://github.com/yesodweb/yesod/wiki/i18n-messages-in-the-scaffolding
| Reyu/CanidComics | Foundation.hs | apache-2.0 | 10,709 | 0 | 16 | 2,782 | 1,528 | 824 | 704 | -1 | -1 |
-- | Miscellaneous general functions and Show, Eq, and Ord instances for PortID
{-# LANGUAGE FlexibleInstances, UndecidableInstances, StandaloneDeriving #-}
module Database.MongoDB.Internal.Util where
import Control.Applicative (Applicative(..), (<$>))
import Network (PortID(..))
import Data.UString as U (cons, append)
import Data.Bits (Bits, (.|.))
import Data.Bson
import Data.ByteString.Lazy as S (ByteString, length, append, hGet)
import System.IO (Handle)
import System.IO.Error (mkIOError, eofErrorType)
import Control.Exception (assert)
import Control.Monad.Error
import Control.Arrow (left)
import qualified Data.ByteString as BS (ByteString, unpack)
import Data.Word (Word8)
import Numeric (showHex)
import System.Random.Shuffle (shuffle')
import System.Random (newStdGen)
import Data.List as L (length)
deriving instance Show PortID
deriving instance Eq PortID
deriving instance Ord PortID
-- | MonadIO with extra Applicative and Functor superclasses
class (MonadIO m, Applicative m, Functor m) => MonadIO' m
instance (MonadIO m, Applicative m, Functor m) => MonadIO' m
shuffle :: [a] -> IO [a]
-- ^ Randomly shuffle items in list
shuffle list = shuffle' list (L.length list) <$> newStdGen
loop :: (Functor m, Monad m) => m (Maybe a) -> m [a]
-- ^ Repeatedy execute action, collecting results, until it returns Nothing
loop act = act >>= maybe (return []) (\a -> (a :) <$> loop act)
untilSuccess :: (MonadError e m, Error e) => (a -> m b) -> [a] -> m b
-- ^ Apply action to elements one at a time until one succeeds. Throw last error if all fail. Throw 'strMsg' error if list is empty.
untilSuccess = untilSuccess' (strMsg "empty untilSuccess")
untilSuccess' :: (MonadError e m) => e -> (a -> m b) -> [a] -> m b
-- ^ Apply action to elements one at a time until one succeeds. Throw last error if all fail. Throw given error if list is empty
untilSuccess' e _ [] = throwError e
untilSuccess' _ f (x : xs) = catchError (f x) (\e -> untilSuccess' e f xs)
whenJust :: (Monad m) => Maybe a -> (a -> m ()) -> m ()
whenJust mVal act = maybe (return ()) act mVal
liftIOE :: (MonadIO m) => (e -> e') -> ErrorT e IO a -> ErrorT e' m a
-- ^ lift IOE monad to ErrorT monad over some MonadIO m
liftIOE f = ErrorT . liftIO . fmap (left f) . runErrorT
runIOE :: ErrorT IOError IO a -> IO a
-- ^ Run action while catching explicit error and rethrowing in IO monad
runIOE (ErrorT action) = action >>= either ioError return
updateAssocs :: (Eq k) => k -> v -> [(k, v)] -> [(k, v)]
-- ^ Change or insert value of key in association list
updateAssocs key valu assocs = case back of [] -> (key, valu) : front; _ : back' -> front ++ (key, valu) : back'
where (front, back) = break ((key ==) . fst) assocs
bitOr :: (Bits a) => [a] -> a
-- ^ bit-or all numbers together
bitOr = foldl (.|.) 0
(<.>) :: UString -> UString -> UString
-- ^ Concat first and second together with period in between. Eg. @\"hello\" \<.\> \"world\" = \"hello.world\"@
a <.> b = U.append a (cons '.' b)
true1 :: Label -> Document -> Bool
-- ^ Is field's value a 1 or True (MongoDB use both Int and Bools for truth values). Error if field not in document or field not a Num or Bool.
true1 k doc = case valueAt k doc of
Bool b -> b
Float n -> n == 1
Int32 n -> n == 1
Int64 n -> n == 1
_ -> error $ "expected " ++ show k ++ " to be Num or Bool in " ++ show doc
hGetN :: Handle -> Int -> IO ByteString
-- ^ Read N bytes from hande, blocking until all N bytes are read. If EOF is reached before N bytes then raise EOF exception.
hGetN h n = assert (n >= 0) $ do
bytes <- hGet h n
let x = fromEnum $ S.length bytes
if x >= n then return bytes
else if x == 0 then ioError (mkIOError eofErrorType "hGetN" (Just h) Nothing)
else S.append bytes <$> hGetN h (n - x)
byteStringHex :: BS.ByteString -> String
-- ^ Hexadecimal string representation of a byte string. Each byte yields two hexadecimal characters.
byteStringHex = concatMap byteHex . BS.unpack
byteHex :: Word8 -> String
-- ^ Two char hexadecimal representation of byte
byteHex b = (if b < 16 then ('0' :) else id) (showHex b "")
| mongodb/mongoDB-haskell | Database/MongoDB/Internal/Util.hs | apache-2.0 | 4,076 | 4 | 14 | 774 | 1,315 | 710 | 605 | -1 | -1 |
module HSH.ShellStateSpec (spec) where
import Test.Hspec
import Test.QuickCheck
import Control.Monad.State
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Maybe
import HSH.MonitoredDirectory
import HSH.ShellState
genChar :: Gen Char
genChar = elements ['\0' .. '\xff']
genUndefinedEnvVar :: ShellState -> Gen String
genUndefinedEnvVar ShellState{envVars = env} =
listOf genChar `suchThat` (\str -> not (str `Set.member` definedEnvVars))
where
definedEnvVars = Set.fromList $ Map.keys env
spec :: Spec
spec = do
describe "setEnv" $
it "sets an environment variable" $ property $
\name val -> setEnv name val defaultShellState{envVars = Map.empty} == defaultShellState{envVars = Map.singleton name val}
describe "getEnv" $ do
it "retrieves environment variables set by setEnv" $ property $
\name val -> getEnv name (setEnv name val defaultShellState) == Just val
it "returns Nothing if an environment variable is unset" $ forAll (genUndefinedEnvVar defaultShellState) $
\name -> isNothing $ getEnv name defaultShellState
describe "shellPrompt" $ do
it "has a sensible default in case the PROMPT env var is unset." $
shellPrompt defaultShellState { envVars = Map.empty } `shouldBe` "Prompt Undefined > "
it "bases the shell prompt on what is in the environment" $ property $
\promptstr -> shellPrompt (setEnv "PROMPT" promptstr defaultShellState) == promptstr ++ " "
describe "resolveExecutable" $ do
let testShellState = defaultShellState {
pathDirs = [
MonitoredDirectory "/sbin" 0 $ Map.singleton "s.bad" $ QualifiedFilePath "/sbin/s.bad",
MonitoredDirectory "/bin" 0 $ Map.singleton "thecheat" $ QualifiedFilePath "/bin/thecheat"
]
}
it "returns the original command if the lookup table is empty" $
resolveExecutable defaultShellState "foobar" `shouldBe` "foobar"
it "returns the original command if no command is found in the lookup table" $
resolveExecutable testShellState "foobar" `shouldBe` "foobar"
it "returns the fully-qualified path for a command if it is found in the lookup table" $
resolveExecutable testShellState "thecheat" `shouldBe` "/bin/thecheat"
| jessekempf/hsh | test/HSH/ShellStateSpec.hs | bsd-2-clause | 2,240 | 0 | 19 | 433 | 539 | 274 | 265 | 41 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QSize.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:31
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Core.QSize (
QqqSize(..), QqSize(..)
,QqqSize_nf(..), QqSize_nf(..)
,qSize_delete
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
class QqqSize x1 where
qqSize :: x1 -> IO (QSize ())
class QqSize x1 where
qSize :: x1 -> IO (QSize ())
instance QqSize (()) where
qSize ()
= withQSizeResult $
qtc_QSize
foreign import ccall "qtc_QSize" qtc_QSize :: IO (Ptr (TQSize ()))
instance QqqSize ((QSize t1)) where
qqSize (x1)
= withQSizeResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSize1 cobj_x1
foreign import ccall "qtc_QSize1" qtc_QSize1 :: Ptr (TQSize t1) -> IO (Ptr (TQSize ()))
instance QqSize ((Size)) where
qSize (x1)
= withQSizeResult $
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QSize2 csize_x1_w csize_x1_h
foreign import ccall "qtc_QSize2" qtc_QSize2 :: CInt -> CInt -> IO (Ptr (TQSize ()))
instance QqSize ((Int, Int)) where
qSize (x1, x2)
= withQSizeResult $
qtc_QSize3 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QSize3" qtc_QSize3 :: CInt -> CInt -> IO (Ptr (TQSize ()))
class QqqSize_nf x1 where
qqSize_nf :: x1 -> IO (QSize ())
class QqSize_nf x1 where
qSize_nf :: x1 -> IO (QSize ())
instance QqSize_nf (()) where
qSize_nf ()
= withObjectRefResult $
qtc_QSize
instance QqqSize_nf ((QSize t1)) where
qqSize_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSize1 cobj_x1
instance QqSize_nf ((Size)) where
qSize_nf (x1)
= withObjectRefResult $
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QSize2 csize_x1_w csize_x1_h
instance QqSize_nf ((Int, Int)) where
qSize_nf (x1, x2)
= withObjectRefResult $
qtc_QSize3 (toCInt x1) (toCInt x2)
instance QqqboundedTo (QSize a) ((QSize t1)) (IO (QSize ())) where
qqboundedTo x0 (x1)
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSize_boundedTo cobj_x0 cobj_x1
foreign import ccall "qtc_QSize_boundedTo" qtc_QSize_boundedTo :: Ptr (TQSize a) -> Ptr (TQSize t1) -> IO (Ptr (TQSize ()))
instance QqboundedTo (QSize a) ((Size)) (IO (Size)) where
qboundedTo x0 (x1)
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QSize_boundedTo_qth cobj_x0 csize_x1_w csize_x1_h csize_ret_w csize_ret_h
foreign import ccall "qtc_QSize_boundedTo_qth" qtc_QSize_boundedTo_qth :: Ptr (TQSize a) -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance QqqexpandedTo (QSize a) ((QSize t1)) (IO (QSize ())) where
qqexpandedTo x0 (x1)
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSize_expandedTo cobj_x0 cobj_x1
foreign import ccall "qtc_QSize_expandedTo" qtc_QSize_expandedTo :: Ptr (TQSize a) -> Ptr (TQSize t1) -> IO (Ptr (TQSize ()))
instance QqexpandedTo (QSize a) ((Size)) (IO (Size)) where
qexpandedTo x0 (x1)
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QSize_expandedTo_qth cobj_x0 csize_x1_w csize_x1_h csize_ret_w csize_ret_h
foreign import ccall "qtc_QSize_expandedTo_qth" qtc_QSize_expandedTo_qth :: Ptr (TQSize a) -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance Qqheight (QSize a) (()) (IO (Int)) where
qheight x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_height cobj_x0
foreign import ccall "qtc_QSize_height" qtc_QSize_height :: Ptr (TQSize a) -> IO CInt
instance QqisEmpty (QSize a) (()) where
qisEmpty x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_isEmpty cobj_x0
foreign import ccall "qtc_QSize_isEmpty" qtc_QSize_isEmpty :: Ptr (TQSize a) -> IO CBool
instance QqisNull (QSize a) (()) where
qisNull x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_isNull cobj_x0
foreign import ccall "qtc_QSize_isNull" qtc_QSize_isNull :: Ptr (TQSize a) -> IO CBool
instance QqisValid (QSize ()) (()) where
qisValid x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_isValid cobj_x0
foreign import ccall "qtc_QSize_isValid" qtc_QSize_isValid :: Ptr (TQSize a) -> IO CBool
instance QqisValid (QSizeSc a) (()) where
qisValid x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_isValid cobj_x0
instance Qqscale (QSize a) ((Int, Int, AspectRatioMode)) where
qscale x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_scale1 cobj_x0 (toCInt x1) (toCInt x2) (toCLong $ qEnum_toInt x3)
foreign import ccall "qtc_QSize_scale1" qtc_QSize_scale1 :: Ptr (TQSize a) -> CInt -> CInt -> CLong -> IO ()
instance Qqqscale (QSize a) ((QSize t1, AspectRatioMode)) where
qqscale x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSize_scale cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QSize_scale" qtc_QSize_scale :: Ptr (TQSize a) -> Ptr (TQSize t1) -> CLong -> IO ()
instance Qqscale (QSize a) ((Size, AspectRatioMode)) where
qscale x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QSize_scale_qth cobj_x0 csize_x1_w csize_x1_h (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QSize_scale_qth" qtc_QSize_scale_qth :: Ptr (TQSize a) -> CInt -> CInt -> CLong -> IO ()
instance QqsetHeight (QSize a) ((Int)) where
qsetHeight x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_setHeight cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSize_setHeight" qtc_QSize_setHeight :: Ptr (TQSize a) -> CInt -> IO ()
instance QqsetWidth (QSize a) ((Int)) where
qsetWidth x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_setWidth cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSize_setWidth" qtc_QSize_setWidth :: Ptr (TQSize a) -> CInt -> IO ()
instance Qqtranspose (QSize a) (()) where
qtranspose x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_transpose cobj_x0
foreign import ccall "qtc_QSize_transpose" qtc_QSize_transpose :: Ptr (TQSize a) -> IO ()
instance Qqwidth (QSize a) (()) (IO (Int)) where
qwidth x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_width cobj_x0
foreign import ccall "qtc_QSize_width" qtc_QSize_width :: Ptr (TQSize a) -> IO CInt
qSize_delete :: QSize a -> IO ()
qSize_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSize_delete cobj_x0
foreign import ccall "qtc_QSize_delete" qtc_QSize_delete :: Ptr (TQSize a) -> IO ()
| uduki/hsQt | Qtc/Core/QSize.hs | bsd-2-clause | 7,033 | 0 | 13 | 1,304 | 2,478 | 1,275 | 1,203 | -1 | -1 |
module Drasil.SWHS.References (citations, bueche1986, incroperaEtAl2007, koothoor2013, lightstone2012,
parnasClements1986, parnas1972, parnasClements1984, smithLai2005) where
import Language.Drasil
import Data.Drasil.People (jBueche, fIncropera, dDewitt, tBergman, aLavine,
mLightstone)
import Data.Drasil.Citations (koothoor2013, parnasClements1986, smithLai2005, parnas1972, parnasClements1984)
----------------------------
-- Section 9 : References --
----------------------------
citations :: BibRef
citations = [bueche1986, incroperaEtAl2007, koothoor2013, lightstone2012, parnasClements1986,
smithLai2005, parnas1972, parnasClements1984]
bueche1986, incroperaEtAl2007, lightstone2012 :: Citation
bueche1986 = cBookA [jBueche]
"Introduction to Physics for Scientists"
"McGraw Hill" 1986
[edition 4, address "New York City, New York"]
"bueche1986"
incroperaEtAl2007 = cBookA [fIncropera, dDewitt, tBergman, aLavine]
"Fundamentals of Heat and Mass Transfer"
"John Wiley and Sons" 2007
[edition 6, address "Hoboken, New Jersey"]
"incroperaEtAl2007"
lightstone2012 = cMisc [
author [mLightstone],
title "Derivation of tank/pcm model",
year 2012,
note "From Marilyn Lightstone's Personal Notes"]
"lightstone2012"
| JacquesCarette/literate-scientific-software | code/drasil-example/Drasil/SWHS/References.hs | bsd-2-clause | 1,255 | 0 | 8 | 163 | 248 | 150 | 98 | 26 | 1 |
import Distribution.Simple
import Distribution.Simple.PreProcess
import Distribution.Simple.Utils
import Distribution.PackageDescription
import Distribution.Simple.LocalBuildInfo
import Data.Char
import System.Exit
import System.IO
import System.Process
import System.Directory
main = let hooks = simpleUserHooks
lhs2TeX = ("lhs", pplhs2TeX)
in defaultMainWithHooks hooks { hookedPreProcessors = lhs2TeX:knownSuffixHandlers }
lhs2TeXcustom :: String
lhs2TeXcustom = "lhs2TeX/custom.fmt"
pplhs2TeX :: BuildInfo -> LocalBuildInfo -> PreProcessor
pplhs2TeX build local =
PreProcessor {
platformIndependent = True,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
notice verbosity (inFile ++ " is being preprocessed to " ++ outFile)
header <- readFile lhs2TeXcustom
notice verbosity ("read contents from " ++ lhs2TeXcustom)
source <- readFile inFile
(Just hin, Just hout, _, _) <- createProcess (proc "lhs2TeX" ["--newcode"])
{ std_in = CreatePipe
, std_out = CreatePipe }
hPutStr hin header
notice verbosity ("dumped file " ++ lhs2TeXcustom ++ " in " ++ outFile)
hPutStr hin source
notice verbosity ("dumped file " ++ inFile ++ " in " ++ outFile)
target <- hGetContents hout
notice verbosity "done"
writeFile outFile target
}
| carlostome/uu-parsinglib-tyerr | Setup.hs | bsd-3-clause | 1,460 | 0 | 16 | 373 | 351 | 181 | 170 | 34 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-----------------------------------------------------------------------------
-- |
-- Module : Hadron.Run.FanOut
-- Copyright : Soostone Inc, 2015
-- License : BSD3
--
-- Maintainer : Ozgun Ataman
-- Stability : experimental
--
-- Haskell-native ability to stream output to multiple files in Hadoop
-- Streaming.
----------------------------------------------------------------------------
module Hadron.Run.FanOut
( FanOut
, mkFanOut
, fanWrite
, fanClose
, fanCloseAll
, FanOutSink
, sinkFanOut
, sequentialSinkFanout
, fanStats
) where
-------------------------------------------------------------------------------
import Control.Concurrent.MVar
import Control.Lens
import Control.Monad
import Control.Monad.Trans
import qualified Data.ByteString.Char8 as B
import Data.Conduit
import qualified Data.Conduit.List as C
import qualified Data.Map.Strict as M
import Data.Monoid
import System.FilePath.Lens
import System.IO
-------------------------------------------------------------------------------
import Hadron.Utils
-------------------------------------------------------------------------------
-- | An open file handle
data FileHandle = FileHandle {
_fhHandle :: !Handle
, _fhFin :: IO ()
, _fhPath :: !FilePath
, _fhCount :: !Int
, _fhPendingCount :: !Int
}
makeLenses ''FileHandle
-- | Concurrent multi-output manager.
data FanOut = FanOut {
_fanFiles :: MVar (M.Map FilePath FileHandle)
, _fanCreate :: FilePath -> IO (Handle, IO ())
}
makeLenses ''FanOut
data FileChunk = FileChunk {
_chunkOrig :: !FilePath
, _chunkTarget :: !FilePath
, _chunkCnt :: !Int
}
makeLenses ''FileChunk
-------------------------------------------------------------------------------
-- | Make a new fanout manager that will use given process creator.
-- Process is expected to pipe its stdin into the desired location.
mkFanOut
:: (FilePath -> IO (Handle, IO ()))
-- ^ Open a handle for a given target path
-> IO FanOut
mkFanOut f = FanOut <$> newMVar M.empty <*> pure f
-------------------------------------------------------------------------------
-- | Write into a file. A new process will be spawned if this is the
-- first time writing into this file.
fanWrite :: FanOut -> FilePath -> B.ByteString -> IO ()
fanWrite fo fp bs = modifyMVar_ (fo ^. fanFiles) go
where
go !m | Just fh <- M.lookup fp m = do
B.hPut (fh ^. fhHandle) bs
let newCount = fh ^. fhPendingCount + B.length bs
upFun <- case (newCount >= chunk) of
True -> do
hFlush (fh ^. fhHandle)
return $ fhPendingCount .~ 0
False -> return $ fhPendingCount .~ newCount
return $! M.insert fp (fh & upFun . (fhCount %~ (+1))) m
go !m = do
(r, p) <- (fo ^. fanCreate) fp
go $! M.insert fp (FileHandle r p fp 0 0) m
chunk = 1024 * 4
-------------------------------------------------------------------------------
closeHandle :: FileHandle -> IO ()
closeHandle fh = do
hFlush $ fh ^. fhHandle
hClose $ fh ^. fhHandle
fh ^. fhFin
-------------------------------------------------------------------------------
-- | Close a specific file.
fanClose :: FanOut -> FilePath -> IO ()
fanClose fo fp = modifyMVar_ (fo ^. fanFiles) $ \ m -> case m ^. at fp of
Nothing -> return m
Just fh -> do
closeHandle fh
return $! m & at fp .~ Nothing
-------------------------------------------------------------------------------
-- | Close all files. The same FanOut can be used after this, which
-- would spawn new processes to write into files.
fanCloseAll :: FanOut -> IO ()
fanCloseAll fo = modifyMVar_ (fo ^. fanFiles) $ \m -> do
forM_ (M.toList m) $ \ (_fp, fh) -> closeHandle fh
return M.empty
-------------------------------------------------------------------------------
-- | Grab # of writes into each file so far.
fanStats :: FanOut -> IO (M.Map FilePath Int)
fanStats fo = do
m <- modifyMVar (fo ^. fanFiles) $ \ m -> return (m,m)
return $ M.map (^. fhCount) m
-------------------------------------------------------------------------------
-- | Sink a stream into 'FanOut'.
sinkFanOut :: FanOutSink
sinkFanOut dispatch conv fo = C.foldM go 0
where
go !i a = do
bs <- conv a
liftIO (fanWrite fo (dispatch a) bs)
return $! i + 1
-------------------------------------------------------------------------------
-- | A fanout that keeps only a single file open at a time. Each time
-- the target filename changes, this will close/finalize the file and
-- start the new file.
sequentialSinkFanout :: FanOutSink
sequentialSinkFanout dispatch conv fo =
liftM fst $ C.foldM go (0, Nothing)
where
go (!i, !chunk0) a = do
bs <- conv a
let fp = dispatch a
let goNew = do
tk <- liftIO (randomToken 16)
let fp' = fp & basename %~ (<> "_" <> tk)
liftIO $ fanWrite fo fp' bs
return $! (i+1, Just (FileChunk fp fp' (B.length bs)))
case chunk0 of
Nothing -> goNew
Just c@FileChunk{..} -> case fp == _chunkOrig of
False -> do
liftIO $ fanClose fo _chunkTarget
goNew
True -> do
liftIO $ fanWrite fo _chunkTarget bs
return $! (i+1, Just $! c & chunkCnt %~ (+ (B.length bs)))
type FanOutSink = forall a m. MonadIO m => (a -> FilePath) -> (a -> m B.ByteString) -> FanOut -> Consumer a m Int
-------------------------------------------------------------------------------
test :: IO ()
test = do
fo <- mkFanOut
(\ fp -> (,) <$> openFile fp AppendMode <*> pure (return ()))
fanWrite fo "test1" "foo"
fanWrite fo "test1" "bar"
fanWrite fo "test1" "tak"
print =<< fanStats fo
fanCloseAll fo
fanWrite fo "test1" "tak"
| fpinsight/hadron | src/Hadron/Run/FanOut.hs | bsd-3-clause | 6,338 | 0 | 23 | 1,618 | 1,526 | 787 | 739 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
import Database.Persist
import Database.Persist.Sqlite
import Model
main :: IO ()
main = runSqlite ":memory:" $ runMigration migrateAll
| amir/ghc-dashboard | src/Migrate.hs | bsd-3-clause | 174 | 1 | 6 | 24 | 44 | 22 | 22 | 6 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module QuickCover.GroupRec where
import Data.Function (on)
import Data.Set as Set
concatSet :: Ord a => Set (Set a) -> Set a
concatSet = Set.foldr Set.union Set.empty
splitBy :: Ord a => (a -> a -> Bool) -> [a] -> Set [a]
splitBy _ [] = empty
splitBy eq (x : xs) = (x : takeWhile (not . eq x) xs)
`insert`
splitBy eq (dropWhile (not . eq x) xs)
groupRecBy :: Ord a => (a -> a -> Bool) -> [a] -> Set [a]
groupRecBy eq = concatSet . Set.map subgroup . splitBy eq
where
subgroup [] = error "Impossible!"
subgroup (x:[]) = Set.map (x:) (singleton [])
subgroup (x:xs) = Set.map (x:) (groupRecBy eq xs)
groupRec :: (Ord a, Ord b) => [(a,b)] -> Set [(a,b)]
groupRec = groupRecBy ((==) `on` fst)
prop_splitNotEmpty :: [(Int, Int)] -> Bool
prop_splitNotEmpty x = [] `notMember` splitBy (((==) `on` fst)) x | shayan-najd/QuickCover | QuickCover/GroupRec.hs | bsd-3-clause | 965 | 0 | 11 | 255 | 455 | 246 | 209 | 21 | 3 |
-- !!! Testing the IO module
module Main where
import IO
-- assume we've got a Directory impl
import Directory ( removeFile )
exHandler :: (IOError -> Bool)
-> IO ()
-> IO ()
exHandler pred x = x `catch` (\ err -> if pred err then
putStrLn "got expected IO error"
else
print err)
-- should fail with isDoesNotExistError
io1 :: IO ()
io1 = exHandler isDoesNotExistError $ do
h <- openFile "some_non_existing_file" ReadMode
hClose h
return ()
io2 :: IO ()
io2 = exHandler (const False) $ do
h <- openFile "some_non_existing_file" WriteMode
hClose h
io1
removeFile "some_non_existing_file"
return ()
io3 :: IO ()
io3 = exHandler (const False) $ do
h <- openFile "some_non_existing_file" ReadWriteMode
hClose h
io1
removeFile "some_non_existing_file"
return ()
-- testing whether hGetChar returns EOF error.
io4 :: IO ()
io4 = exHandler (const False) $ do
writeFile "io4_test_file" "ab"
h <- openFile "io4_test_file" ReadMode
exHandler isEOFError (loop h)
hClose h
removeFile "io4_test_file"
where
loop h = do
x <- hGetChar h
loop h
-- repeated hCloses is no longer an error
io5 :: IO ()
io5 = exHandler (const False) $ do
h <- openFile "io5_test_file" WriteMode
hClose h
exHandler isIllegalOperation (hClose h)
removeFile "io5_test_file" `catch` (\ _ -> return ())
return ()
-- hFileSize test
io6 :: IO ()
io6 = exHandler (const False) $ do
writeFile "io6_test_file" "abcde"
h <- openFile "io6_test_file" ReadMode
sz <- hFileSize h
hClose h
putStrLn ("File size: " ++ show sz)
removeFile "io6_test_file"
return ()
-- hIsEOF test
io7 :: IO ()
io7 = exHandler (const False) $ do
writeFile "io7_test_file" "abcde"
h <- openFile "io7_test_file" ReadMode
exHandler isEOFError (loop h)
hClose h
removeFile "io7_test_file"
return ()
where
loop h = do
x <- hGetChar h
flg <- hIsEOF h
print (x,flg)
loop h
-- handle buffering
io8 :: IO ()
io8 = exHandler (const False) $ do
h <- openFile "io8_test_file" WriteMode
buf0 <- hGetBuffering h
hSetBuffering h NoBuffering
buf <- hGetBuffering h
print buf
hSetBuffering h LineBuffering
buf <- hGetBuffering h
print buf
hSetBuffering h (BlockBuffering Nothing)
buf <- hGetBuffering h
print (buf==buf0)
hSetBuffering h (BlockBuffering (Just 23))
buf <- hGetBuffering h
print buf
hClose h
removeFile "io8_test_file"
return ()
-- flushing
io9 :: IO ()
io9 = exHandler (const False) $ do
h <- openFile "io9_test_file" WriteMode
hFlush h
hPutChar h 'a'
hFlush h
hClose h
h <- openFile "io9_test_file" ReadMode
exHandler (isIllegalOperation) $ hFlush h
ch <- hGetChar h
exHandler (isIllegalOperation) $ hFlush h
hClose h
print ch
removeFile "io9_test_file"
return ()
-- getting/setting file posns.
io10 :: IO ()
io10 = exHandler (const False) $ do
writeFile "io10_test_file" "abcdefg"
h <- openFile "io10_test_file" ReadMode
hGetChar h >>= print
hGetChar h >>= print
pos <- hGetPosn h
hGetChar h >>= print
hSetPosn pos
hGetChar h >>= print
hClose h
removeFile "io10_test_file"
return ()
| FranklinChen/Hugs | tests/libs/ioTest1.hs | bsd-3-clause | 3,165 | 10 | 12 | 725 | 1,157 | 509 | 648 | 114 | 2 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE BangPatterns #-}
module Language.Clojure.Cost where
import Language.Clojure.AST
import Language.Clojure.Lang
costUsingl :: Usingl u -> Int
{-# INLINE costUsingl #-}
costUsingl (UString u) = 1
costUsingl (USep u) = 1
costUsingl (UCollTy u) = 1
costUsingl (UFormTy u) = 1
costUsingl (UTag u) = 1
costUsingl (UExpr e) = costExpr e
costUsingl (USepExprList sel) = costSepExprList sel
costUsingl (UTerm t) = 2
costExpr :: Expr -> Int
{-# INLINE costExpr #-}
costExpr (Special fty e _) = 2 + costExpr e
costExpr (Dispatch e _) = 1 + costExpr e
costExpr (Collection cty sel _) = 2 + costSepExprList sel
costExpr (Term t _) = 3
costExpr (Comment s _) = 2
costExpr (Seq e1 e2 _) = 1 + costExpr e1 + costExpr e2
costExpr (Empty _) = 1
costSepExprList :: SepExprList -> Int
{-# INLINE costSepExprList #-}
costSepExprList (Nil _) = 1
costSepExprList (Cons e sep sel _) = 2 + costExpr e + costSepExprList sel
| nazrhom/vcs-clojure | src/Language/Clojure/Cost.hs | bsd-3-clause | 938 | 0 | 7 | 169 | 371 | 188 | 183 | 28 | 1 |
{-|
Module : Data.Boltzmann.System.Tuner.Algebraic
Description : Interface utilities with the Paganini tuner.
Copyright : (c) Maciej Bendkowski, 2017-2021
License : BSD3
Maintainer : [email protected]
Stability : experimental
General utilities managing the IO interface between Boltzmann Brain
and the Paganini tuner script.
-}
module Data.Boltzmann.System.Tuner.Algebraic
( writeSpecification
, paramSystem
, toPSpec
)
where
import Control.Monad
import System.IO
import qualified Data.Map.Strict as M
import Numeric.LinearAlgebra hiding ( size )
import Data.MultiSet ( MultiSet )
import qualified Data.MultiSet as B
import qualified Data.Set as S
import Data.Maybe
import Data.Boltzmann.System
import Data.Boltzmann.Internal.Utils
import Data.Boltzmann.Internal.Tuner
-- | Writes the system specification into the given
-- file handle. In paricular, to Paganini's standard
-- input handle.
writeSpecification :: System Int -> Handle -> IO ()
writeSpecification sys hout = do
let freqs = frequencies sys
let seqs = seqTypes sys
let spec = toPSpec sys
-- # of equations and frequencies
writeListLn hout [numTypes spec + numSeqTypes spec, length freqs]
-- vector of frequencies
writeListLn hout freqs
-- type specifications
let find' x = x `S.findIndex` M.keysSet (defs sys)
foldM_ (typeSpecification hout find' spec) 0 (M.elems $ defs sys)
-- sequence specifications
mapM_ (seqSpecification hout find' spec) seqs
frequencies :: System Int -> [Int]
frequencies sys = concatMap (mapMaybe frequency) ((M.elems . defs) sys)
toPSpec :: System Int -> PSpec
toPSpec sys = PSpec { numFreqs = d, numTypes = ts, numSeqTypes = ss }
where
ts = size sys
d = length $ frequencies sys
ss = length $ seqTypes sys
typeSpecification
:: Handle -> (String -> Int) -> PSpec -> Int -> [Cons Int] -> IO Int
typeSpecification hout find' spec idx cons = do
let n = length cons
hPrint hout n -- # of constructors
foldM (consSpecification hout find' spec) idx cons
consSpecification
:: Handle -> (String -> Int) -> PSpec -> Int -> Cons Int -> IO Int
consSpecification hout find' spec idx cons = do
let (vec, idx') = sparseConsVec find' spec idx cons
writeListLn hout vec -- constructor specification
return idx'
occurrences :: Cons a -> (MultiSet String, MultiSet String)
occurrences cons = occurrences' (B.empty, B.empty) $ args cons
occurrences'
:: (MultiSet String, MultiSet String)
-> [Arg]
-> (MultiSet String, MultiSet String)
occurrences' (ts, sts) [] = (ts, sts)
occurrences' (ts, sts) (Type s : xs) = occurrences' (s `B.insert` ts, sts) xs
occurrences' (ts, sts) (List s : xs) = occurrences' (ts, s `B.insert` sts) xs
-- | Prepends a pair to a list if its
-- first component is positive.
sparsePrepend :: (Num a, Ord a) => (a, b) -> [(a, b)] -> [(a, b)]
sparsePrepend x xs | fst x > 0 = x : xs
| otherwise = xs
prependWeight :: (Num a, Ord a, Num b) => Cons a -> [(a, b)] -> [(a, b)]
prependWeight cons = sparsePrepend (weight cons, 0)
prependFreq :: (Num a, Ord a, Num b) => Cons a -> b -> [(a, b)] -> [(a, b)]
prependFreq cons idx xs
| isJust (frequency cons) = sparsePrepend (weight cons, 1 + idx) xs
| -- note: offset for z
otherwise = xs
sparseTypeVec :: Num a => (b -> a) -> a -> [(b, c)] -> [(c, a)]
sparseTypeVec _ _ [] = []
sparseTypeVec find' offset ((t, n) : xs) =
(n, offset + find' t) : sparseTypeVec find' offset xs
sparseConsVec
:: (String -> Int) -> PSpec -> Int -> Cons Int -> ([(Int, Int)], Int)
sparseConsVec find' spec idx cons =
let (tocc, socc) = occurrences cons
us = numFreqs spec
-- sparse type representation
tv = sparseTypeVec find' (1 + us) (B.toOccurList tocc)
ts = numTypes spec
-- sparse sequence representation
sv = sparseTypeVec find' (1 + us + ts) (B.toOccurList socc)
-- prepend weight and frequency
xs = prependWeight cons (prependFreq cons idx $ tv ++ sv)
in case frequency cons of
Just _ -> (xs, idx + 1)
Nothing -> (xs, idx)
seqSpecification :: Handle -> (String -> Int) -> PSpec -> String -> IO ()
seqSpecification hout find' spec st = do
hPrint hout (2 :: Int) -- # of constructors
writeListLn hout ([] :: [Int]) -- empty sequence constructor
let offset = 1 + numFreqs spec
writeListLn
hout
[ (1, offset + find' st) :: (Int, Int)
, (1, offset + numTypes spec + find' st)
] -- cons constructor
evalExp
:: System Int
-> Double
-> Vector Double
-> [Double]
-> Cons Int
-> (Double, [Double])
evalExp sys rho ts us exp' =
let w = weight exp'
xs = args exp'
exp'' = (rho ^^ w) * product (map (evalA sys ts) xs)
in case frequency exp' of
Nothing -> (exp'', us)
Just _ -> (head us ^^ w * exp'', tail us)
computeExp
:: System Int
-> Double
-> Vector Double
-> [Double]
-> Double
-> Double
-> [Cons Int]
-> ([Cons Double], [Double])
computeExp _ _ _ us _ _ [] = ([], us)
computeExp sys rho ts us tw _ (e : es) = (e { weight = w' / tw } : es', us'')
where
(es', us'') = computeExp sys rho ts us' tw 0 es
(w' , us' ) = evalExp sys rho ts us e
computeProb
:: System Int
-> Double
-> Vector Double
-> [Double]
-> [(String, [Cons Int])]
-> [(String, [Cons Double])]
computeProb _ _ _ _ [] = []
computeProb sys rho ts us ((t, cons) : tys) = (t, cons') : tys'
where
(cons', us') = computeExp sys rho ts us (value t sys ts) 0 cons
tys' = computeProb sys rho ts us' tys
paramSystem
:: System Int -> Double -> Vector Double -> [Double] -> PSystem Double
paramSystem sys rho ts us = sys'
where
types' = computeProb sys rho ts us (M.toList $ defs sys)
sys' = PSystem { system = sys { defs = M.fromList types' }
, values = ts
, param = rho
, weights = sys
}
| maciej-bendkowski/boltzmann-brain | Data/Boltzmann/System/Tuner/Algebraic.hs | bsd-3-clause | 6,181 | 0 | 14 | 1,694 | 2,243 | 1,184 | 1,059 | 134 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
--------------------------------------------------------------------------
-- |
-- Module : Data.Cretheus
-- Copyright : (C) 2014 Zalora SEA
-- License : BSD3
-- Maintainer : Julian K. Arni <[email protected]>
-- Stability : experimental
--
--
-- Drop in replacement for 'aeson', with a little extra functionality. In
-- particular, 'cretheus' allows you to get JSON schemas from your FromJSON
-- instances [0].
--
-- Depends on, rather than forks, the 'aeson' library, and either
-- re-exports aeson's functions directly, or proxies them under a thin
-- layer of transformations. You can therefore pick a version of aeson by
-- just adding it to your cabal file, and not have to wait for bug-fixes
-- upstream to land here.
--
-- *How it works:*
--
-- @cretheus@ has its own, reified version of aesons's @Parser@ type. Thus,
-- when you defined a @FromJSON@ instance using the appropriate
-- combinators, @cretheus@ can apply @parseJSON@ to dummy values with each
-- of the possible outermost constructors, collect the results, and
-- generate schemas from those.
--
-- [0] Modulo some restrictions. See 'mkSchema'. These restrictions
-- influence whether a JSON schema can be created, but not whether the
-- aeson-functionality still works.
--------------------------------------------------------------------------
module Data.Cretheus where
import Control.Monad
import Control.Monad.Operational
import Control.Applicative
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as A
import qualified Data.ByteString.Lazy as BS
import Data.Data
import Data.Foldable
import Data.Function (on)
import Data.Monoid
import Data.Text hiding (singleton)
import Data.Typeable
import Data.Scientific
import Debug.Trace
-- Schema
data Schema' where
Required :: Text -> TypeRep -> Schema'
Optional :: Text -> TypeRep -> Schema'
Default :: Text -> String -> TypeRep -> Schema'
And :: Schema' -> Schema' -> Schema'
Or :: Schema' -> Schema' -> Schema'
Nil :: Schema' -- Leaf that doesn't parse
Empty :: Schema' -- Leaf that parses with no further requirements
deriving Show
instance A.ToJSON Schema' where
toJSON x = A.toJSON $ addToSchema (SchemaH [] []) x
data SchemaH = SchemaH [(Text, A.Value)] [Text]
instance A.ToJSON SchemaH where
toJSON (SchemaH xs reqs) =
A.object [ "type" A..= ("object"::String)
, "properties" A..= A.object xs
, "required" A..= reqs
]
addToSchema :: SchemaH -> Schema' -> SchemaH
addToSchema (SchemaH xs reqs) (Required field trep) =
SchemaH ((field, A.object ["type" A..= show trep]):xs) (field:reqs)
addToSchema (SchemaH xs reqs) (Optional field trep) =
SchemaH ((field, A.object ["type" A..= show trep]):xs) reqs
addToSchema (SchemaH xs reqs) (Default field def trep) =
SchemaH ((field, A.object ["type" A..= show trep, "default" A..= def]):xs) reqs
addToSchema sch (And s1 s2) = addToSchema (addToSchema sch s2) s1
addToSchema sch Empty = sch
addToSchema sch Nil = sch
-- | Make a schema from a reified parser. Assumes that only the
-- @cretheus@-provided combinators (and instance methods) are used on the
-- inner-value of 'parseJSON''s first argument (the 'Value').
mkSchema :: forall a. Typeable a => Parser a -> Schema'
mkSchema (_ :.: txt) = Required txt $ typeOf (undefined::a)
mkSchema (_ :.:? txt) = Optional txt $ typeOf (undefined::a)
mkSchema (a :.!= def) = case mkSchemaM a of
Optional txt typ -> Default txt (show def) typ
Required txt typ -> Default txt (show def) typ
Optional txt typ `And` x -> Default txt (show def) typ `And` x
z -> trace ("Here:\t" ++ show z ++ "\n") z
mkSchema Mzero = Nil
{-[>mkSchema (Mplus a b) = mkSchema a `Or` mkSchema b<]-}
mkSchema (WithText _ f _) = mkSchemaM (f undefined)
mkSchemaM :: Typeable a => ParserM a -> Schema'
mkSchemaM = eval . view
where
-- We need to pattern match on the constructors to bring the
-- typeable constraint into scope
eval :: Typeable a => ProgramView Parser a -> Schema'
eval (x@(v :.: t) :>>= f) = mkSchema x `And` mkSchemaM (f undefined)
eval (x@(v :.:? t) :>>= f) = mkSchema x `And` mkSchemaM (f undefined)
eval (x@(v :.!= t) :>>= f) = mkSchema x `And` mkSchemaM (f undefined)
eval (x@Mzero :>>= f) = mkSchema x `And` mkSchemaM (f undefined)
eval (x@(WithText{}) :>>= f) = mkSchema x `And` mkSchemaM (f undefined)
eval (Return x) = Empty
-- Compat
(.:) :: (Typeable a, FromJSON a) => A.Object -> Text -> ParserM a
x .: t = singleton $ x :.: t
(.:?) :: (Typeable b, FromJSON b) => A.Object -> Text -> ParserM (Maybe b)
x .:? t = singleton $ x :.:? t
(.!=) :: (Typeable a, Show a) => ParserM (Maybe a) -> a -> ParserM a
p .!= t = singleton $ p :.!= t
withText :: (Typeable a) => String -> (Text -> ParserM a) -> Value -> ParserM a
withText s f v = singleton $ WithText s f v
-- Non-strict version of 'A.Value'. We need this so we can call 'parseJSON'
-- with undefined.
data Value = Object A.Object
| Array A.Array
| String Text
| Number Scientific
| Bool Bool
| Null
deriving (Eq, Show, Typeable, Data)
interpretV :: Value -> A.Value
interpretV (Object !v) = A.Object v
interpretV (Array !v) = A.Array v
interpretV (String !v) = A.String v
interpretV (Number !v) = A.Number v
interpretV (Bool !v) = A.Bool v
interpretV Null = A.Null
interpretV' :: A.Value -> Value
interpretV' (A.Object v) = Object v
interpretV' (A.Array v) = Array v
interpretV' (A.String v) = String v
interpretV' (A.Number v) = Number v
interpretV' (A.Bool v) = Bool v
interpretV' A.Null = Null
class FromJSON a where
parseJSON :: Value -> ParserM a
instance FromJSON a => A.FromJSON a where
parseJSON = interpretM . parseJSON . interpretV'
-- It'll be important to keep track of which fields may be evaluated, and
-- which may not. In essence, we want all arguments that may be coming
-- from inside the outer constructor of the `Value` passed to `parseJSON`
-- to not be used - e.g., the first argument of `(:.:)`.
data Parser a where
(:.:) :: (Typeable a, FromJSON a) => A.Object -> Text -> Parser a
(:.:?) :: (Typeable b, FromJSON b) => A.Object -> Text -> Parser (Maybe b)
(:.!=) :: (Typeable a, Show a) => ParserM (Maybe a) -> a -> Parser a
Mzero :: Typeable a => Parser a
Mplus :: ParserM a -> ParserM a -> Parser a
WithText :: Typeable a => String -> (Text -> ParserM a) -> Value -> Parser a
type ParserM a = Program Parser a
interpretP :: Parser a -> A.Parser a
interpretP (v :.: t) = v A..: t
interpretP (v :.:? t) = v A..:? t
interpretP (v :.!= t) = interpretM v A..!= t
interpretP Mzero = mzero
interpretP (WithText s f v) = A.withText s (interpretM . f) (interpretV v)
-- | Convert a Parser to a Parser.
-- Currently this is used to interface with all 'aeson' functions, but
-- that suffers from a (probably avoidable) performance penalty.
interpretM :: ParserM a -> A.Parser a
interpretM = interpretWithMonad interpretP
data Tree f a = Node a
| Tree (f (Tree f a))
deriving (Foldable, Functor)
-- | Pass values with all possible outermost constructors to 'parseJSON',
-- and collect the results.
--
fillOut :: FromJSON a => a -> Tree [] (ParserM a)
fillOut _ = Tree
[ Node $ parseJSON (Array u)
, Node $ parseJSON (Object u)
, Node $ parseJSON (String u)
, Node $ parseJSON (Number u)
, Node $ parseJSON (Bool u)
, Node $ parseJSON Null
]
where u = error "I wasn't meant to be evaluated!"
----Test------------------------------------------------------------------
instance FromJSON String where
parseJSON = withText "String" (pure . unpack)
instance FromJSON a => FromJSON (Maybe a) where
parseJSON Null = pure Nothing
parseJSON a = Just <$> parseJSON a
data Test1 = Test1 { field1 :: String
, field2 :: String
} deriving (Show, Typeable)
-- 'A.decode' still works...
t1 :: Maybe Test1
t1 = A.decode "{\"field1\": \"Field 1\", \"field2\": \"Field 2\"}"
t1' :: BS.ByteString
t1' = (A.encode . mkSchemaM <$> (toList $ fillOut undefined::[ParserM Test1])) !! 1
instance FromJSON Test1 where
parseJSON (Object v) = do
f1 <- (++ "a suffix") <$> v .: "field1"
f2 <- (v .:? "field2") .!= "default-string"
let f1' = "a prefix" ++ f1
return (Test1 f1' f2)
parseJSON _ = singleton Mzero
| haskell-servant/cretheus | Data/Cretheus.hs | bsd-3-clause | 8,995 | 0 | 12 | 2,088 | 2,568 | 1,356 | 1,212 | 151 | 6 |
import System.Environment (getArgs)
import Data.List.Split (splitOn)
import Data.List (intercalate)
revgrou :: [Int] -> Int -> [Int]
revgrou xs y | length xs < y = xs
| otherwise = reverse (take y xs) ++ revgrou (drop y xs) y
revgroup :: [String] -> [Int]
revgroup [xs, ys] = revgrou (map read $ splitOn "," xs) (read ys)
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (intercalate "," . map show . revgroup . splitOn ";") $ lines input
| nikai3d/ce-challenges | moderate/reverse_groups.hs | bsd-3-clause | 528 | 0 | 14 | 133 | 246 | 123 | 123 | 13 | 1 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1998
This module contains definitions for the IdInfo for things that
have a standard form, namely:
- data constructors
- record selectors
- method and superclass selectors
- primitive operations
-}
{-# LANGUAGE CPP #-}
module MkId (
mkDictFunId, mkDictFunTy, mkDictSelId, mkDictSelRhs,
mkPrimOpId, mkFCallId,
wrapNewTypeBody, unwrapNewTypeBody,
wrapFamInstBody, unwrapFamInstScrut,
wrapTypeFamInstBody, wrapTypeUnbranchedFamInstBody, unwrapTypeFamInstScrut,
unwrapTypeUnbranchedFamInstScrut,
DataConBoxer(..), mkDataConRep, mkDataConWorkId,
-- And some particular Ids; see below for why they are wired in
wiredInIds, ghcPrimIds,
unsafeCoerceName, unsafeCoerceId, realWorldPrimId,
voidPrimId, voidArgId,
nullAddrId, seqId, lazyId, lazyIdKey,
coercionTokenId, magicDictId, coerceId,
-- Re-export error Ids
module PrelRules
) where
#include "HsVersions.h"
import Rules
import TysPrim
import TysWiredIn
import PrelRules
import Type
import FamInstEnv
import Coercion
import TcType
import MkCore
import CoreUtils ( exprType, mkCast )
import CoreUnfold
import Literal
import TyCon
import CoAxiom
import Class
import NameSet
import VarSet
import Name
import PrimOp
import ForeignCall
import DataCon
import Id
import IdInfo
import Demand
import CoreSyn
import Unique
import UniqSupply
import PrelNames
import BasicTypes hiding ( SuccessFlag(..) )
import Util
import Pair
import DynFlags
import Outputable
import FastString
import ListSetOps
import Data.Maybe ( maybeToList )
{-
************************************************************************
* *
\subsection{Wired in Ids}
* *
************************************************************************
Note [Wired-in Ids]
~~~~~~~~~~~~~~~~~~~
There are several reasons why an Id might appear in the wiredInIds:
(1) The ghcPrimIds are wired in because they can't be defined in
Haskell at all, although the can be defined in Core. They have
compulsory unfoldings, so they are always inlined and they have
no definition site. Their home module is GHC.Prim, so they
also have a description in primops.txt.pp, where they are called
'pseudoops'.
(2) The 'error' function, eRROR_ID, is wired in because we don't yet have
a way to express in an interface file that the result type variable
is 'open'; that is can be unified with an unboxed type
[The interface file format now carry such information, but there's
no way yet of expressing at the definition site for these
error-reporting functions that they have an 'open'
result type. -- sof 1/99]
(3) Other error functions (rUNTIME_ERROR_ID) are wired in (a) because
the desugarer generates code that mentiones them directly, and
(b) for the same reason as eRROR_ID
(4) lazyId is wired in because the wired-in version overrides the
strictness of the version defined in GHC.Base
In cases (2-4), the function has a definition in a library module, and
can be called; but the wired-in version means that the details are
never read from that module's interface file; instead, the full definition
is right here.
-}
wiredInIds :: [Id]
wiredInIds
= [lazyId, dollarId, oneShotId]
++ errorIds -- Defined in MkCore
++ ghcPrimIds
-- These Ids are exported from GHC.Prim
ghcPrimIds :: [Id]
ghcPrimIds
= [ -- These can't be defined in Haskell, but they have
-- perfectly reasonable unfoldings in Core
realWorldPrimId,
voidPrimId,
unsafeCoerceId,
nullAddrId,
seqId,
magicDictId,
coerceId,
proxyHashId
]
{-
************************************************************************
* *
\subsection{Data constructors}
* *
************************************************************************
The wrapper for a constructor is an ordinary top-level binding that evaluates
any strict args, unboxes any args that are going to be flattened, and calls
the worker.
We're going to build a constructor that looks like:
data (Data a, C b) => T a b = T1 !a !Int b
T1 = /\ a b ->
\d1::Data a, d2::C b ->
\p q r -> case p of { p ->
case q of { q ->
Con T1 [a,b] [p,q,r]}}
Notice that
* d2 is thrown away --- a context in a data decl is used to make sure
one *could* construct dictionaries at the site the constructor
is used, but the dictionary isn't actually used.
* We have to check that we can construct Data dictionaries for
the types a and Int. Once we've done that we can throw d1 away too.
* We use (case p of q -> ...) to evaluate p, rather than "seq" because
all that matters is that the arguments are evaluated. "seq" is
very careful to preserve evaluation order, which we don't need
to be here.
You might think that we could simply give constructors some strictness
info, like PrimOps, and let CoreToStg do the let-to-case transformation.
But we don't do that because in the case of primops and functions strictness
is a *property* not a *requirement*. In the case of constructors we need to
do something active to evaluate the argument.
Making an explicit case expression allows the simplifier to eliminate
it in the (common) case where the constructor arg is already evaluated.
Note [Wrappers for data instance tycons]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the case of data instances, the wrapper also applies the coercion turning
the representation type into the family instance type to cast the result of
the wrapper. For example, consider the declarations
data family Map k :: * -> *
data instance Map (a, b) v = MapPair (Map a (Pair b v))
The tycon to which the datacon MapPair belongs gets a unique internal
name of the form :R123Map, and we call it the representation tycon.
In contrast, Map is the family tycon (accessible via
tyConFamInst_maybe). A coercion allows you to move between
representation and family type. It is accessible from :R123Map via
tyConFamilyCoercion_maybe and has kind
Co123Map a b v :: {Map (a, b) v ~ :R123Map a b v}
The wrapper and worker of MapPair get the types
-- Wrapper
$WMapPair :: forall a b v. Map a (Map a b v) -> Map (a, b) v
$WMapPair a b v = MapPair a b v `cast` sym (Co123Map a b v)
-- Worker
MapPair :: forall a b v. Map a (Map a b v) -> :R123Map a b v
This coercion is conditionally applied by wrapFamInstBody.
It's a bit more complicated if the data instance is a GADT as well!
data instance T [a] where
T1 :: forall b. b -> T [Maybe b]
Hence we translate to
-- Wrapper
$WT1 :: forall b. b -> T [Maybe b]
$WT1 b v = T1 (Maybe b) b (Maybe b) v
`cast` sym (Co7T (Maybe b))
-- Worker
T1 :: forall c b. (c ~ Maybe b) => b -> :R7T c
-- Coercion from family type to representation type
Co7T a :: T [a] ~ :R7T a
Note [Newtype datacons]
~~~~~~~~~~~~~~~~~~~~~~~
The "data constructor" for a newtype should always be vanilla. At one
point this wasn't true, because the newtype arising from
class C a => D a
looked like
newtype T:D a = D:D (C a)
so the data constructor for T:C had a single argument, namely the
predicate (C a). But now we treat that as an ordinary argument, not
part of the theta-type, so all is well.
************************************************************************
* *
\subsection{Dictionary selectors}
* *
************************************************************************
Selecting a field for a dictionary. If there is just one field, then
there's nothing to do.
Dictionary selectors may get nested forall-types. Thus:
class Foo a where
op :: forall b. Ord b => a -> b -> b
Then the top-level type for op is
op :: forall a. Foo a =>
forall b. Ord b =>
a -> b -> b
This is unlike ordinary record selectors, which have all the for-alls
at the outside. When dealing with classes it's very convenient to
recover the original type signature from the class op selector.
-}
mkDictSelId :: Name -- Name of one of the *value* selectors
-- (dictionary superclass or method)
-> Class -> Id
mkDictSelId name clas
= mkGlobalId (ClassOpId clas) name sel_ty info
where
tycon = classTyCon clas
sel_names = map idName (classAllSelIds clas)
new_tycon = isNewTyCon tycon
[data_con] = tyConDataCons tycon
tyvars = dataConUnivTyVars data_con
arg_tys = dataConRepArgTys data_con -- Includes the dictionary superclasses
val_index = assoc "MkId.mkDictSelId" (sel_names `zip` [0..]) name
sel_ty = mkForAllTys tyvars (mkFunTy (mkClassPred clas (mkTyVarTys tyvars))
(getNth arg_tys val_index))
base_info = noCafIdInfo
`setArityInfo` 1
`setStrictnessInfo` strict_sig
info | new_tycon
= base_info `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkInlineUnfolding (Just 1) (mkDictSelRhs clas val_index)
-- See Note [Single-method classes] in TcInstDcls
-- for why alwaysInlinePragma
| otherwise
= base_info `setSpecInfo` mkSpecInfo [rule]
-- Add a magic BuiltinRule, but no unfolding
-- so that the rule is always available to fire.
-- See Note [ClassOp/DFun selection] in TcInstDcls
n_ty_args = length tyvars
-- This is the built-in rule that goes
-- op (dfT d1 d2) ---> opT d1 d2
rule = BuiltinRule { ru_name = fsLit "Class op " `appendFS`
occNameFS (getOccName name)
, ru_fn = name
, ru_nargs = n_ty_args + 1
, ru_try = dictSelRule val_index n_ty_args }
-- The strictness signature is of the form U(AAAVAAAA) -> T
-- where the V depends on which item we are selecting
-- It's worth giving one, so that absence info etc is generated
-- even if the selector isn't inlined
strict_sig = mkClosedStrictSig [arg_dmd] topRes
arg_dmd | new_tycon = evalDmd
| otherwise = mkManyUsedDmd $
mkProdDmd [ if name == sel_name then evalDmd else absDmd
| sel_name <- sel_names ]
mkDictSelRhs :: Class
-> Int -- 0-indexed selector among (superclasses ++ methods)
-> CoreExpr
mkDictSelRhs clas val_index
= mkLams tyvars (Lam dict_id rhs_body)
where
tycon = classTyCon clas
new_tycon = isNewTyCon tycon
[data_con] = tyConDataCons tycon
tyvars = dataConUnivTyVars data_con
arg_tys = dataConRepArgTys data_con -- Includes the dictionary superclasses
the_arg_id = getNth arg_ids val_index
pred = mkClassPred clas (mkTyVarTys tyvars)
dict_id = mkTemplateLocal 1 pred
arg_ids = mkTemplateLocalsNum 2 arg_tys
rhs_body | new_tycon = unwrapNewTypeBody tycon (map mkTyVarTy tyvars) (Var dict_id)
| otherwise = Case (Var dict_id) dict_id (idType the_arg_id)
[(DataAlt data_con, arg_ids, varToCoreExpr the_arg_id)]
-- varToCoreExpr needed for equality superclass selectors
-- sel a b d = case x of { MkC _ (g:a~b) _ -> CO g }
dictSelRule :: Int -> Arity -> RuleFun
-- Tries to persuade the argument to look like a constructor
-- application, using exprIsConApp_maybe, and then selects
-- from it
-- sel_i t1..tk (D t1..tk op1 ... opm) = opi
--
dictSelRule val_index n_ty_args _ id_unf _ args
| (dict_arg : _) <- drop n_ty_args args
, Just (_, _, con_args) <- exprIsConApp_maybe id_unf dict_arg
= Just (getNth con_args val_index)
| otherwise
= Nothing
{-
************************************************************************
* *
Data constructors
* *
************************************************************************
-}
mkDataConWorkId :: Name -> DataCon -> Id
mkDataConWorkId wkr_name data_con
| isNewTyCon tycon
= mkGlobalId (DataConWrapId data_con) wkr_name nt_wrap_ty nt_work_info
| otherwise
= mkGlobalId (DataConWorkId data_con) wkr_name alg_wkr_ty wkr_info
where
tycon = dataConTyCon data_con
----------- Workers for data types --------------
alg_wkr_ty = dataConRepType data_con
wkr_arity = dataConRepArity data_con
wkr_info = noCafIdInfo
`setArityInfo` wkr_arity
`setStrictnessInfo` wkr_sig
`setUnfoldingInfo` evaldUnfolding -- Record that it's evaluated,
-- even if arity = 0
wkr_sig = mkClosedStrictSig (replicate wkr_arity topDmd) (dataConCPR data_con)
-- Note [Data-con worker strictness]
-- Notice that we do *not* say the worker is strict
-- even if the data constructor is declared strict
-- e.g. data T = MkT !(Int,Int)
-- Why? Because the *wrapper* is strict (and its unfolding has case
-- expresssions that do the evals) but the *worker* itself is not.
-- If we pretend it is strict then when we see
-- case x of y -> $wMkT y
-- the simplifier thinks that y is "sure to be evaluated" (because
-- $wMkT is strict) and drops the case. No, $wMkT is not strict.
--
-- When the simplifer sees a pattern
-- case e of MkT x -> ...
-- it uses the dataConRepStrictness of MkT to mark x as evaluated;
-- but that's fine... dataConRepStrictness comes from the data con
-- not from the worker Id.
----------- Workers for newtypes --------------
(nt_tvs, _, nt_arg_tys, _) = dataConSig data_con
res_ty_args = mkTyVarTys nt_tvs
nt_wrap_ty = dataConUserType data_con
nt_work_info = noCafIdInfo -- The NoCaf-ness is set by noCafIdInfo
`setArityInfo` 1 -- Arity 1
`setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` newtype_unf
id_arg1 = mkTemplateLocal 1 (head nt_arg_tys)
newtype_unf = ASSERT2( isVanillaDataCon data_con &&
isSingleton nt_arg_tys, ppr data_con )
-- Note [Newtype datacons]
mkCompulsoryUnfolding $
mkLams nt_tvs $ Lam id_arg1 $
wrapNewTypeBody tycon res_ty_args (Var id_arg1)
dataConCPR :: DataCon -> DmdResult
dataConCPR con
| isDataTyCon tycon -- Real data types only; that is,
-- not unboxed tuples or newtypes
, isVanillaDataCon con -- No existentials
, wkr_arity > 0
, wkr_arity <= mAX_CPR_SIZE
= if is_prod then vanillaCprProdRes (dataConRepArity con)
else cprSumRes (dataConTag con)
| otherwise
= topRes
where
is_prod = isProductTyCon tycon
tycon = dataConTyCon con
wkr_arity = dataConRepArity con
mAX_CPR_SIZE :: Arity
mAX_CPR_SIZE = 10
-- We do not treat very big tuples as CPR-ish:
-- a) for a start we get into trouble because there aren't
-- "enough" unboxed tuple types (a tiresome restriction,
-- but hard to fix),
-- b) more importantly, big unboxed tuples get returned mainly
-- on the stack, and are often then allocated in the heap
-- by the caller. So doing CPR for them may in fact make
-- things worse.
{-
-------------------------------------------------
-- Data constructor representation
--
-- This is where we decide how to wrap/unwrap the
-- constructor fields
--
--------------------------------------------------
-}
type Unboxer = Var -> UniqSM ([Var], CoreExpr -> CoreExpr)
-- Unbox: bind rep vars by decomposing src var
data Boxer = UnitBox | Boxer (TvSubst -> UniqSM ([Var], CoreExpr))
-- Box: build src arg using these rep vars
newtype DataConBoxer = DCB ([Type] -> [Var] -> UniqSM ([Var], [CoreBind]))
-- Bind these src-level vars, returning the
-- rep-level vars to bind in the pattern
mkDataConRep :: DynFlags -> FamInstEnvs -> Name -> DataCon -> UniqSM DataConRep
mkDataConRep dflags fam_envs wrap_name data_con
| not wrapper_reqd
= return NoDataConRep
| otherwise
= do { wrap_args <- mapM newLocal wrap_arg_tys
; wrap_body <- mk_rep_app (wrap_args `zip` dropList eq_spec unboxers)
initial_wrap_app
; let wrap_id = mkGlobalId (DataConWrapId data_con) wrap_name wrap_ty wrap_info
wrap_info = noCafIdInfo
`setArityInfo` wrap_arity
-- It's important to specify the arity, so that partial
-- applications are treated as values
`setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` wrap_unf
`setStrictnessInfo` wrap_sig
-- We need to get the CAF info right here because TidyPgm
-- does not tidy the IdInfo of implicit bindings (like the wrapper)
-- so it not make sure that the CAF info is sane
wrap_sig = mkClosedStrictSig wrap_arg_dmds (dataConCPR data_con)
wrap_arg_dmds = map mk_dmd (dropList eq_spec wrap_bangs)
mk_dmd str | isBanged str = evalDmd
| otherwise = topDmd
-- The Cpr info can be important inside INLINE rhss, where the
-- wrapper constructor isn't inlined.
-- And the argument strictness can be important too; we
-- may not inline a contructor when it is partially applied.
-- For example:
-- data W = C !Int !Int !Int
-- ...(let w = C x in ...(w p q)...)...
-- we want to see that w is strict in its two arguments
wrap_unf = mkInlineUnfolding (Just wrap_arity) wrap_rhs
wrap_tvs = (univ_tvs `minusList` map fst eq_spec) ++ ex_tvs
wrap_rhs = mkLams wrap_tvs $
mkLams wrap_args $
wrapFamInstBody tycon res_ty_args $
wrap_body
; return (DCR { dcr_wrap_id = wrap_id
, dcr_boxer = mk_boxer boxers
, dcr_arg_tys = rep_tys
, dcr_stricts = rep_strs
, dcr_bangs = dropList ev_tys wrap_bangs }) }
where
(univ_tvs, ex_tvs, eq_spec, theta, orig_arg_tys, _) = dataConFullSig data_con
res_ty_args = substTyVars (mkTopTvSubst eq_spec) univ_tvs
tycon = dataConTyCon data_con -- The representation TyCon (not family)
wrap_ty = dataConUserType data_con
ev_tys = eqSpecPreds eq_spec ++ theta
all_arg_tys = ev_tys ++ orig_arg_tys
orig_bangs = map mk_pred_strict_mark ev_tys ++ dataConStrictMarks data_con
wrap_arg_tys = theta ++ orig_arg_tys
wrap_arity = length wrap_arg_tys
-- The wrap_args are the arguments *other than* the eq_spec
-- Because we are going to apply the eq_spec args manually in the
-- wrapper
(wrap_bangs, rep_tys_w_strs, wrappers)
= unzip3 (zipWith (dataConArgRep dflags fam_envs) all_arg_tys orig_bangs)
(unboxers, boxers) = unzip wrappers
(rep_tys, rep_strs) = unzip (concat rep_tys_w_strs)
wrapper_reqd = not (isNewTyCon tycon) -- Newtypes have only a worker
&& (any isBanged orig_bangs -- Some forcing/unboxing
-- (includes eq_spec)
|| isFamInstTyCon tycon) -- Cast result
initial_wrap_app = Var (dataConWorkId data_con)
`mkTyApps` res_ty_args
`mkVarApps` ex_tvs
`mkCoApps` map (mkReflCo Nominal . snd) eq_spec
-- Dont box the eq_spec coercions since they are
-- marked as HsUnpack by mk_dict_strict_mark
mk_boxer :: [Boxer] -> DataConBoxer
mk_boxer boxers = DCB (\ ty_args src_vars ->
do { let ex_vars = takeList ex_tvs src_vars
subst1 = mkTopTvSubst (univ_tvs `zip` ty_args)
subst2 = extendTvSubstList subst1 ex_tvs
(mkTyVarTys ex_vars)
; (rep_ids, binds) <- go subst2 boxers (dropList ex_tvs src_vars)
; return (ex_vars ++ rep_ids, binds) } )
go _ [] src_vars = ASSERT2( null src_vars, ppr data_con ) return ([], [])
go subst (UnitBox : boxers) (src_var : src_vars)
= do { (rep_ids2, binds) <- go subst boxers src_vars
; return (src_var : rep_ids2, binds) }
go subst (Boxer boxer : boxers) (src_var : src_vars)
= do { (rep_ids1, arg) <- boxer subst
; (rep_ids2, binds) <- go subst boxers src_vars
; return (rep_ids1 ++ rep_ids2, NonRec src_var arg : binds) }
go _ (_:_) [] = pprPanic "mk_boxer" (ppr data_con)
mk_rep_app :: [(Id,Unboxer)] -> CoreExpr -> UniqSM CoreExpr
mk_rep_app [] con_app
= return con_app
mk_rep_app ((wrap_arg, unboxer) : prs) con_app
= do { (rep_ids, unbox_fn) <- unboxer wrap_arg
; expr <- mk_rep_app prs (mkVarApps con_app rep_ids)
; return (unbox_fn expr) }
-------------------------
newLocal :: Type -> UniqSM Var
newLocal ty = do { uniq <- getUniqueM
; return (mkSysLocal (fsLit "dt") uniq ty) }
-------------------------
dataConArgRep
:: DynFlags
-> FamInstEnvs
-> Type -> HsBang
-> ( HsBang -- Like input but with HsUnpackFailed if necy
, [(Type, StrictnessMark)] -- Rep types
, (Unboxer, Boxer) )
dataConArgRep _ _ arg_ty HsNoBang
= (HsNoBang, [(arg_ty, NotMarkedStrict)], (unitUnboxer, unitBoxer))
dataConArgRep _ _ arg_ty (HsUserBang _ False) -- No '!'
= (HsNoBang, [(arg_ty, NotMarkedStrict)], (unitUnboxer, unitBoxer))
dataConArgRep dflags fam_envs arg_ty
(HsUserBang unpk_prag True) -- {-# UNPACK #-} !
| not (gopt Opt_OmitInterfacePragmas dflags) -- Don't unpack if -fomit-iface-pragmas
-- Don't unpack if we aren't optimising; rather arbitrarily,
-- we use -fomit-iface-pragmas as the indication
, let mb_co = topNormaliseType_maybe fam_envs arg_ty
-- Unwrap type families and newtypes
arg_ty' = case mb_co of { Just (_,ty) -> ty; Nothing -> arg_ty }
, isUnpackableType fam_envs arg_ty'
, (rep_tys, wrappers) <- dataConArgUnpack arg_ty'
, case unpk_prag of
Nothing -> gopt Opt_UnboxStrictFields dflags
|| (gopt Opt_UnboxSmallStrictFields dflags
&& length rep_tys <= 1) -- See Note [Unpack one-wide fields]
Just unpack_me -> unpack_me
= case mb_co of
Nothing -> (HsUnpack Nothing, rep_tys, wrappers)
Just (co,rep_ty) -> (HsUnpack (Just co), rep_tys, wrapCo co rep_ty wrappers)
| otherwise -- Record the strict-but-no-unpack decision
= strict_but_not_unpacked arg_ty
dataConArgRep _ _ arg_ty HsStrict
= strict_but_not_unpacked arg_ty
dataConArgRep _ _ arg_ty (HsUnpack Nothing)
| (rep_tys, wrappers) <- dataConArgUnpack arg_ty
= (HsUnpack Nothing, rep_tys, wrappers)
dataConArgRep _ _ _ (HsUnpack (Just co))
| let co_rep_ty = pSnd (coercionKind co)
, (rep_tys, wrappers) <- dataConArgUnpack co_rep_ty
= (HsUnpack (Just co), rep_tys, wrapCo co co_rep_ty wrappers)
strict_but_not_unpacked :: Type -> (HsBang, [(Type,StrictnessMark)], (Unboxer, Boxer))
strict_but_not_unpacked arg_ty
= (HsStrict, [(arg_ty, MarkedStrict)], (seqUnboxer, unitBoxer))
-------------------------
wrapCo :: Coercion -> Type -> (Unboxer, Boxer) -> (Unboxer, Boxer)
wrapCo co rep_ty (unbox_rep, box_rep) -- co :: arg_ty ~ rep_ty
= (unboxer, boxer)
where
unboxer arg_id = do { rep_id <- newLocal rep_ty
; (rep_ids, rep_fn) <- unbox_rep rep_id
; let co_bind = NonRec rep_id (Var arg_id `Cast` co)
; return (rep_ids, Let co_bind . rep_fn) }
boxer = Boxer $ \ subst ->
do { (rep_ids, rep_expr)
<- case box_rep of
UnitBox -> do { rep_id <- newLocal (TcType.substTy subst rep_ty)
; return ([rep_id], Var rep_id) }
Boxer boxer -> boxer subst
; let sco = substCo (tvCvSubst subst) co
; return (rep_ids, rep_expr `Cast` mkSymCo sco) }
------------------------
seqUnboxer :: Unboxer
seqUnboxer v = return ([v], \e -> Case (Var v) v (exprType e) [(DEFAULT, [], e)])
unitUnboxer :: Unboxer
unitUnboxer v = return ([v], \e -> e)
unitBoxer :: Boxer
unitBoxer = UnitBox
-------------------------
dataConArgUnpack
:: Type
-> ( [(Type, StrictnessMark)] -- Rep types
, (Unboxer, Boxer) )
dataConArgUnpack arg_ty
| Just (tc, tc_args) <- splitTyConApp_maybe arg_ty
, Just con <- tyConSingleAlgDataCon_maybe tc
-- NB: check for an *algebraic* data type
-- A recursive newtype might mean that
-- 'arg_ty' is a newtype
, let rep_tys = dataConInstArgTys con tc_args
= ASSERT( isVanillaDataCon con )
( rep_tys `zip` dataConRepStrictness con
,( \ arg_id ->
do { rep_ids <- mapM newLocal rep_tys
; let unbox_fn body
= Case (Var arg_id) arg_id (exprType body)
[(DataAlt con, rep_ids, body)]
; return (rep_ids, unbox_fn) }
, Boxer $ \ subst ->
do { rep_ids <- mapM (newLocal . TcType.substTy subst) rep_tys
; return (rep_ids, Var (dataConWorkId con)
`mkTyApps` (substTys subst tc_args)
`mkVarApps` rep_ids ) } ) )
| otherwise
= pprPanic "dataConArgUnpack" (ppr arg_ty)
-- An interface file specified Unpacked, but we couldn't unpack it
isUnpackableType :: FamInstEnvs -> Type -> Bool
-- True if we can unpack the UNPACK the argument type
-- See Note [Recursive unboxing]
-- We look "deeply" inside rather than relying on the DataCons
-- we encounter on the way, because otherwise we might well
-- end up relying on ourselves!
isUnpackableType fam_envs ty
| Just (tc, _) <- splitTyConApp_maybe ty
, Just con <- tyConSingleAlgDataCon_maybe tc
, isVanillaDataCon con
= ok_con_args (unitNameSet (getName tc)) con
| otherwise
= False
where
ok_arg tcs (ty, bang) = not (attempt_unpack bang) || ok_ty tcs norm_ty
where
norm_ty = topNormaliseType fam_envs ty
ok_ty tcs ty
| Just (tc, _) <- splitTyConApp_maybe ty
, let tc_name = getName tc
= not (tc_name `elemNameSet` tcs)
&& case tyConSingleAlgDataCon_maybe tc of
Just con | isVanillaDataCon con
-> ok_con_args (tcs `extendNameSet` getName tc) con
_ -> True
| otherwise
= True
ok_con_args tcs con
= all (ok_arg tcs) (dataConOrigArgTys con `zip` dataConStrictMarks con)
-- NB: dataConStrictMarks gives the *user* request;
-- We'd get a black hole if we used dataConRepBangs
attempt_unpack (HsUnpack {}) = True
attempt_unpack (HsUserBang (Just unpk) bang) = bang && unpk
attempt_unpack (HsUserBang Nothing bang) = bang -- Be conservative
attempt_unpack HsStrict = False
attempt_unpack HsNoBang = False
{-
Note [Unpack one-wide fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The flag UnboxSmallStrictFields ensures that any field that can
(safely) be unboxed to a word-sized unboxed field, should be so unboxed.
For example:
data A = A Int#
newtype B = B A
data C = C !B
data D = D !C
data E = E !()
data F = F !D
data G = G !F !F
All of these should have an Int# as their representation, except
G which should have two Int#s.
However
data T = T !(S Int)
data S = S !a
Here we can represent T with an Int#.
Note [Recursive unboxing]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data R = MkR {-# UNPACK #-} !S Int
data S = MkS {-# UNPACK #-} !Int
The representation arguments of MkR are the *representation* arguments
of S (plus Int); the rep args of MkS are Int#. This is all fine.
But be careful not to try to unbox this!
data T = MkT {-# UNPACK #-} !T Int
Because then we'd get an infinite number of arguments.
Here is a more complicated case:
data S = MkS {-# UNPACK #-} !T Int
data T = MkT {-# UNPACK #-} !S Int
Each of S and T must decide independendently whether to unpack
and they had better not both say yes. So they must both say no.
Also behave conservatively when there is no UNPACK pragma
data T = MkS !T Int
with -funbox-strict-fields or -funbox-small-strict-fields
we need to behave as if there was an UNPACK pragma there.
But it's the *argument* type that matters. This is fine:
data S = MkS S !Int
because Int is non-recursive.
Note [Unpack equality predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have a GADT with a contructor C :: (a~[b]) => b -> T a
we definitely want that equality predicate *unboxed* so that it
takes no space at all. This is easily done: just give it
an UNPACK pragma. The rest of the unpack/repack code does the
heavy lifting. This one line makes every GADT take a word less
space for each equality predicate, so it's pretty important!
-}
mk_pred_strict_mark :: PredType -> HsBang
mk_pred_strict_mark pred
| isEqPred pred = HsUnpack Nothing -- Note [Unpack equality predicates]
| otherwise = HsNoBang
{-
************************************************************************
* *
Wrapping and unwrapping newtypes and type families
* *
************************************************************************
-}
wrapNewTypeBody :: TyCon -> [Type] -> CoreExpr -> CoreExpr
-- The wrapper for the data constructor for a newtype looks like this:
-- newtype T a = MkT (a,Int)
-- MkT :: forall a. (a,Int) -> T a
-- MkT = /\a. \(x:(a,Int)). x `cast` sym (CoT a)
-- where CoT is the coercion TyCon assoicated with the newtype
--
-- The call (wrapNewTypeBody T [a] e) returns the
-- body of the wrapper, namely
-- e `cast` (CoT [a])
--
-- If a coercion constructor is provided in the newtype, then we use
-- it, otherwise the wrap/unwrap are both no-ops
--
-- If the we are dealing with a newtype *instance*, we have a second coercion
-- identifying the family instance with the constructor of the newtype
-- instance. This coercion is applied in any case (ie, composed with the
-- coercion constructor of the newtype or applied by itself).
wrapNewTypeBody tycon args result_expr
= ASSERT( isNewTyCon tycon )
wrapFamInstBody tycon args $
mkCast result_expr (mkSymCo co)
where
co = mkUnbranchedAxInstCo Representational (newTyConCo tycon) args
-- When unwrapping, we do *not* apply any family coercion, because this will
-- be done via a CoPat by the type checker. We have to do it this way as
-- computing the right type arguments for the coercion requires more than just
-- a spliting operation (cf, TcPat.tcConPat).
unwrapNewTypeBody :: TyCon -> [Type] -> CoreExpr -> CoreExpr
unwrapNewTypeBody tycon args result_expr
= ASSERT( isNewTyCon tycon )
mkCast result_expr (mkUnbranchedAxInstCo Representational (newTyConCo tycon) args)
-- If the type constructor is a representation type of a data instance, wrap
-- the expression into a cast adjusting the expression type, which is an
-- instance of the representation type, to the corresponding instance of the
-- family instance type.
-- See Note [Wrappers for data instance tycons]
wrapFamInstBody :: TyCon -> [Type] -> CoreExpr -> CoreExpr
wrapFamInstBody tycon args body
| Just co_con <- tyConFamilyCoercion_maybe tycon
= mkCast body (mkSymCo (mkUnbranchedAxInstCo Representational co_con args))
| otherwise
= body
-- Same as `wrapFamInstBody`, but for type family instances, which are
-- represented by a `CoAxiom`, and not a `TyCon`
wrapTypeFamInstBody :: CoAxiom br -> Int -> [Type] -> CoreExpr -> CoreExpr
wrapTypeFamInstBody axiom ind args body
= mkCast body (mkSymCo (mkAxInstCo Representational axiom ind args))
wrapTypeUnbranchedFamInstBody :: CoAxiom Unbranched -> [Type] -> CoreExpr -> CoreExpr
wrapTypeUnbranchedFamInstBody axiom
= wrapTypeFamInstBody axiom 0
unwrapFamInstScrut :: TyCon -> [Type] -> CoreExpr -> CoreExpr
unwrapFamInstScrut tycon args scrut
| Just co_con <- tyConFamilyCoercion_maybe tycon
= mkCast scrut (mkUnbranchedAxInstCo Representational co_con args) -- data instances only
| otherwise
= scrut
unwrapTypeFamInstScrut :: CoAxiom br -> Int -> [Type] -> CoreExpr -> CoreExpr
unwrapTypeFamInstScrut axiom ind args scrut
= mkCast scrut (mkAxInstCo Representational axiom ind args)
unwrapTypeUnbranchedFamInstScrut :: CoAxiom Unbranched -> [Type] -> CoreExpr -> CoreExpr
unwrapTypeUnbranchedFamInstScrut axiom
= unwrapTypeFamInstScrut axiom 0
{-
************************************************************************
* *
\subsection{Primitive operations}
* *
************************************************************************
-}
mkPrimOpId :: PrimOp -> Id
mkPrimOpId prim_op
= id
where
(tyvars,arg_tys,res_ty, arity, strict_sig) = primOpSig prim_op
ty = mkForAllTys tyvars (mkFunTys arg_tys res_ty)
name = mkWiredInName gHC_PRIM (primOpOcc prim_op)
(mkPrimOpIdUnique (primOpTag prim_op))
(AnId id) UserSyntax
id = mkGlobalId (PrimOpId prim_op) name ty info
info = noCafIdInfo
`setSpecInfo` mkSpecInfo (maybeToList $ primOpRules name prim_op)
`setArityInfo` arity
`setStrictnessInfo` strict_sig
`setInlinePragInfo` neverInlinePragma
-- We give PrimOps a NOINLINE pragma so that we don't
-- get silly warnings from Desugar.dsRule (the inline_shadows_rule
-- test) about a RULE conflicting with a possible inlining
-- cf Trac #7287
-- For each ccall we manufacture a separate CCallOpId, giving it
-- a fresh unique, a type that is correct for this particular ccall,
-- and a CCall structure that gives the correct details about calling
-- convention etc.
--
-- The *name* of this Id is a local name whose OccName gives the full
-- details of the ccall, type and all. This means that the interface
-- file reader can reconstruct a suitable Id
mkFCallId :: DynFlags -> Unique -> ForeignCall -> Type -> Id
mkFCallId dflags uniq fcall ty
= ASSERT( isEmptyVarSet (tyVarsOfType ty) )
-- A CCallOpId should have no free type variables;
-- when doing substitutions won't substitute over it
mkGlobalId (FCallId fcall) name ty info
where
occ_str = showSDoc dflags (braces (ppr fcall <+> ppr ty))
-- The "occurrence name" of a ccall is the full info about the
-- ccall; it is encoded, but may have embedded spaces etc!
name = mkFCallName uniq occ_str
info = noCafIdInfo
`setArityInfo` arity
`setStrictnessInfo` strict_sig
(_, tau) = tcSplitForAllTys ty
(arg_tys, _) = tcSplitFunTys tau
arity = length arg_tys
strict_sig = mkClosedStrictSig (replicate arity evalDmd) topRes
{-
************************************************************************
* *
\subsection{DictFuns and default methods}
* *
************************************************************************
Note [Dict funs and default methods]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Dict funs and default methods are *not* ImplicitIds. Their definition
involves user-written code, so we can't figure out their strictness etc
based on fixed info, as we can for constructors and record selectors (say).
NB: See also Note [Exported LocalIds] in Id
-}
mkDictFunId :: Name -- Name to use for the dict fun;
-> [TyVar]
-> ThetaType
-> Class
-> [Type]
-> Id
-- Implements the DFun Superclass Invariant (see TcInstDcls)
-- See Note [Dict funs and default methods]
mkDictFunId dfun_name tvs theta clas tys
= mkExportedLocalId (DFunId n_silent is_nt)
dfun_name
dfun_ty
where
is_nt = isNewTyCon (classTyCon clas)
(n_silent, dfun_ty) = mkDictFunTy tvs theta clas tys
mkDictFunTy :: [TyVar] -> ThetaType -> Class -> [Type] -> (Int, Type)
mkDictFunTy tvs theta clas tys
= (length silent_theta, dfun_ty)
where
dfun_ty = mkSigmaTy tvs (silent_theta ++ theta) (mkClassPred clas tys)
silent_theta
| null tvs, null theta
= []
| otherwise
= filterOut discard $
substTheta (zipTopTvSubst (classTyVars clas) tys)
(classSCTheta clas)
-- See Note [Silent Superclass Arguments]
discard pred = any (`eqPred` pred) theta
-- See the DFun Superclass Invariant in TcInstDcls
{-
************************************************************************
* *
\subsection{Un-definable}
* *
************************************************************************
These Ids can't be defined in Haskell. They could be defined in
unfoldings in the wired-in GHC.Prim interface file, but we'd have to
ensure that they were definitely, definitely inlined, because there is
no curried identifier for them. That's what mkCompulsoryUnfolding
does. If we had a way to get a compulsory unfolding from an interface
file, we could do that, but we don't right now.
unsafeCoerce# isn't so much a PrimOp as a phantom identifier, that
just gets expanded into a type coercion wherever it occurs. Hence we
add it as a built-in Id with an unfolding here.
The type variables we use here are "open" type variables: this means
they can unify with both unlifted and lifted types. Hence we provide
another gun with which to shoot yourself in the foot.
-}
lazyIdName, unsafeCoerceName, nullAddrName, seqName,
realWorldName, voidPrimIdName, coercionTokenName,
magicDictName, coerceName, proxyName, dollarName, oneShotName :: Name
unsafeCoerceName = mkWiredInIdName gHC_PRIM (fsLit "unsafeCoerce#") unsafeCoerceIdKey unsafeCoerceId
nullAddrName = mkWiredInIdName gHC_PRIM (fsLit "nullAddr#") nullAddrIdKey nullAddrId
seqName = mkWiredInIdName gHC_PRIM (fsLit "seq") seqIdKey seqId
realWorldName = mkWiredInIdName gHC_PRIM (fsLit "realWorld#") realWorldPrimIdKey realWorldPrimId
voidPrimIdName = mkWiredInIdName gHC_PRIM (fsLit "void#") voidPrimIdKey voidPrimId
lazyIdName = mkWiredInIdName gHC_MAGIC (fsLit "lazy") lazyIdKey lazyId
coercionTokenName = mkWiredInIdName gHC_PRIM (fsLit "coercionToken#") coercionTokenIdKey coercionTokenId
magicDictName = mkWiredInIdName gHC_PRIM (fsLit "magicDict") magicDictKey magicDictId
coerceName = mkWiredInIdName gHC_PRIM (fsLit "coerce") coerceKey coerceId
proxyName = mkWiredInIdName gHC_PRIM (fsLit "proxy#") proxyHashKey proxyHashId
dollarName = mkWiredInIdName gHC_BASE (fsLit "$") dollarIdKey dollarId
oneShotName = mkWiredInIdName gHC_MAGIC (fsLit "oneShot") oneShotKey oneShotId
dollarId :: Id -- Note [dollarId magic]
dollarId = pcMiscPrelId dollarName ty
(noCafIdInfo `setUnfoldingInfo` unf)
where
fun_ty = mkFunTy alphaTy openBetaTy
ty = mkForAllTys [alphaTyVar, openBetaTyVar] $
mkFunTy fun_ty fun_ty
unf = mkInlineUnfolding (Just 2) rhs
[f,x] = mkTemplateLocals [fun_ty, alphaTy]
rhs = mkLams [alphaTyVar, openBetaTyVar, f, x] $
App (Var f) (Var x)
------------------------------------------------
-- proxy# :: forall a. Proxy# a
proxyHashId :: Id
proxyHashId
= pcMiscPrelId proxyName ty
(noCafIdInfo `setUnfoldingInfo` evaldUnfolding) -- Note [evaldUnfoldings]
where
ty = mkForAllTys [kv, tv] (mkProxyPrimTy k t)
kv = kKiVar
k = mkTyVarTy kv
tv:_ = tyVarList k
t = mkTyVarTy tv
------------------------------------------------
-- unsafeCoerce# :: forall a b. a -> b
unsafeCoerceId :: Id
unsafeCoerceId
= pcMiscPrelId unsafeCoerceName ty info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
ty = mkForAllTys [openAlphaTyVar,openBetaTyVar]
(mkFunTy openAlphaTy openBetaTy)
[x] = mkTemplateLocals [openAlphaTy]
rhs = mkLams [openAlphaTyVar,openBetaTyVar,x] $
Cast (Var x) (mkUnsafeCo openAlphaTy openBetaTy)
------------------------------------------------
nullAddrId :: Id
-- nullAddr# :: Addr#
-- The reason is is here is because we don't provide
-- a way to write this literal in Haskell.
nullAddrId = pcMiscPrelId nullAddrName addrPrimTy info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding (Lit nullAddrLit)
------------------------------------------------
seqId :: Id -- See Note [seqId magic]
seqId = pcMiscPrelId seqName ty info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
`setSpecInfo` mkSpecInfo [seq_cast_rule]
ty = mkForAllTys [alphaTyVar,betaTyVar]
(mkFunTy alphaTy (mkFunTy betaTy betaTy))
-- NB argBetaTyVar; see Note [seqId magic]
[x,y] = mkTemplateLocals [alphaTy, betaTy]
rhs = mkLams [alphaTyVar,betaTyVar,x,y] (Case (Var x) x betaTy [(DEFAULT, [], Var y)])
-- See Note [Built-in RULES for seq]
seq_cast_rule = BuiltinRule { ru_name = fsLit "seq of cast"
, ru_fn = seqName
, ru_nargs = 4
, ru_try = match_seq_of_cast
}
match_seq_of_cast :: RuleFun
-- See Note [Built-in RULES for seq]
match_seq_of_cast _ _ _ [Type _, Type res_ty, Cast scrut co, expr]
= Just (Var seqId `mkApps` [Type (pFst (coercionKind co)), Type res_ty,
scrut, expr])
match_seq_of_cast _ _ _ _ = Nothing
------------------------------------------------
lazyId :: Id -- See Note [lazyId magic]
lazyId = pcMiscPrelId lazyIdName ty info
where
info = noCafIdInfo
ty = mkForAllTys [alphaTyVar] (mkFunTy alphaTy alphaTy)
oneShotId :: Id -- See Note [The oneShot function]
oneShotId = pcMiscPrelId oneShotName ty info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
ty = mkForAllTys [alphaTyVar, betaTyVar] (mkFunTy fun_ty fun_ty)
fun_ty = mkFunTy alphaTy betaTy
[body, x] = mkTemplateLocals [fun_ty, alphaTy]
x' = setOneShotLambda x
rhs = mkLams [alphaTyVar, betaTyVar, body, x'] $ Var body `App` Var x
--------------------------------------------------------------------------------
magicDictId :: Id -- See Note [magicDictId magic]
magicDictId = pcMiscPrelId magicDictName ty info
where
info = noCafIdInfo `setInlinePragInfo` neverInlinePragma
ty = mkForAllTys [alphaTyVar] alphaTy
--------------------------------------------------------------------------------
coerceId :: Id
coerceId = pcMiscPrelId coerceName ty info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
eqRTy = mkTyConApp coercibleTyCon [liftedTypeKind, alphaTy, betaTy]
eqRPrimTy = mkTyConApp eqReprPrimTyCon [liftedTypeKind, alphaTy, betaTy]
ty = mkForAllTys [alphaTyVar, betaTyVar] $
mkFunTys [eqRTy, alphaTy] betaTy
[eqR,x,eq] = mkTemplateLocals [eqRTy, alphaTy, eqRPrimTy]
rhs = mkLams [alphaTyVar, betaTyVar, eqR, x] $
mkWildCase (Var eqR) eqRTy betaTy $
[(DataAlt coercibleDataCon, [eq], Cast (Var x) (CoVarCo eq))]
{-
Note [dollarId magic]
~~~~~~~~~~~~~~~~~~~~~
The only reason that ($) is wired in is so that its type can be
forall (a:*, b:Open). (a->b) -> a -> b
That is, the return type can be unboxed. E.g. this is OK
foo $ True where foo :: Bool -> Int#
because ($) doesn't inspect or move the result of the call to foo.
See Trac #8739.
There is a special typing rule for ($) in TcExpr, so the type of ($)
isn't looked at there, BUT Lint subsequently (and rightly) complains
if sees ($) applied to Int# (say), unless we give it a wired-in type
as we do here.
Note [Unsafe coerce magic]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We define a *primitive*
GHC.Prim.unsafeCoerce#
and then in the base library we define the ordinary function
Unsafe.Coerce.unsafeCoerce :: forall (a:*) (b:*). a -> b
unsafeCoerce x = unsafeCoerce# x
Notice that unsafeCoerce has a civilized (albeit still dangerous)
polymorphic type, whose type args have kind *. So you can't use it on
unboxed values (unsafeCoerce 3#).
In contrast unsafeCoerce# is even more dangerous because you *can* use
it on unboxed things, (unsafeCoerce# 3#) :: Int. Its type is
forall (a:OpenKind) (b:OpenKind). a -> b
Note [seqId magic]
~~~~~~~~~~~~~~~~~~
'GHC.Prim.seq' is special in several ways.
a) Its second arg can have an unboxed type
x `seq` (v +# w)
Hence its second type variable has ArgKind
b) Its fixity is set in LoadIface.ghcPrimIface
c) It has quite a bit of desugaring magic.
See DsUtils.lhs Note [Desugaring seq (1)] and (2) and (3)
d) There is some special rule handing: Note [User-defined RULES for seq]
e) See Note [Typing rule for seq] in TcExpr.
Note [User-defined RULES for seq]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Roman found situations where he had
case (f n) of _ -> e
where he knew that f (which was strict in n) would terminate if n did.
Notice that the result of (f n) is discarded. So it makes sense to
transform to
case n of _ -> e
Rather than attempt some general analysis to support this, I've added
enough support that you can do this using a rewrite rule:
RULE "f/seq" forall n. seq (f n) e = seq n e
You write that rule. When GHC sees a case expression that discards
its result, it mentally transforms it to a call to 'seq' and looks for
a RULE. (This is done in Simplify.rebuildCase.) As usual, the
correctness of the rule is up to you.
To make this work, we need to be careful that the magical desugaring
done in Note [seqId magic] item (c) is *not* done on the LHS of a rule.
Or rather, we arrange to un-do it, in DsBinds.decomposeRuleLhs.
Note [Built-in RULES for seq]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We also have the following built-in rule for seq
seq (x `cast` co) y = seq x y
This eliminates unnecessary casts and also allows other seq rules to
match more often. Notably,
seq (f x `cast` co) y --> seq (f x) y
and now a user-defined rule for seq (see Note [User-defined RULES for seq])
may fire.
Note [lazyId magic]
~~~~~~~~~~~~~~~~~~~
lazy :: forall a?. a? -> a? (i.e. works for unboxed types too)
Used to lazify pseq: pseq a b = a `seq` lazy b
Also, no strictness: by being a built-in Id, all the info about lazyId comes from here,
not from GHC.Base.hi. This is important, because the strictness
analyser will spot it as strict!
Also no unfolding in lazyId: it gets "inlined" by a HACK in CorePrep.
It's very important to do this inlining *after* unfoldings are exposed
in the interface file. Otherwise, the unfolding for (say) pseq in the
interface file will not mention 'lazy', so if we inline 'pseq' we'll totally
miss the very thing that 'lazy' was there for in the first place.
See Trac #3259 for a real world example.
lazyId is defined in GHC.Base, so we don't *have* to inline it. If it
appears un-applied, we'll end up just calling it.
Note [The oneShot function]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the context of making left-folds fuse somewhat okish (see ticket #7994
and Note [Left folds via right fold]) it was determined that it would be useful
if library authors could explicitly tell the compiler that a certain lambda is
called at most once. The oneShot function allows that.
Like most magic functions it has a compulsary unfolding, so there is no need
for a real definition somewhere. We have one in GHC.Magic for the convenience
of putting the documentation there.
It uses `setOneShotLambda` on the lambda's binder. That is the whole magic:
A typical call looks like
oneShot (\y. e)
after unfolding the definition `oneShot = \f \x[oneshot]. f x` we get
(\f \x[oneshot]. f x) (\y. e)
--> \x[oneshot]. ((\y.e) x)
--> \x[oneshot] e[x/y]
which is what we want.
It is only effective if this bits survives as long as possible and makes it into
the interface in unfoldings (See Note [Preserve OneShotInfo]). Also see
https://ghc.haskell.org/trac/ghc/wiki/OneShot.
Note [magicDictId magic]
~~~~~~~~~~~~~~~~~~~~~~~~~
The identifier `magicDict` is just a place-holder, which is used to
implement a primitve that we cannot define in Haskell but we can write
in Core. It is declared with a place-holder type:
magicDict :: forall a. a
The intention is that the identifier will be used in a very specific way,
to create dictionaries for classes with a single method. Consider a class
like this:
class C a where
f :: T a
We are going to use `magicDict`, in conjunction with a built-in Prelude
rule, to cast values of type `T a` into dictionaries for `C a`. To do
this, we define a function like this in the library:
data WrapC a b = WrapC (C a => Proxy a -> b)
withT :: (C a => Proxy a -> b)
-> T a -> Proxy a -> b
withT f x y = magicDict (WrapC f) x y
The purpose of `WrapC` is to avoid having `f` instantiated.
Also, it avoids impredicativity, because `magicDict`'s type
cannot be instantiated with a forall. The field of `WrapC` contains
a `Proxy` parameter which is used to link the type of the constraint,
`C a`, with the type of the `Wrap` value being made.
Next, we add a built-in Prelude rule (see prelude/PrelRules.hs),
which will replace the RHS of this definition with the appropriate
definition in Core. The rewrite rule works as follows:
magicDict@t (wrap@a@b f) x y
---->
f (x `cast` co a) y
The `co` coercion is the newtype-coercion extracted from the type-class.
The type class is obtain by looking at the type of wrap.
-------------------------------------------------------------
@realWorld#@ used to be a magic literal, \tr{void#}. If things get
nasty as-is, change it back to a literal (@Literal@).
voidArgId is a Local Id used simply as an argument in functions
where we just want an arg to avoid having a thunk of unlifted type.
E.g.
x = \ void :: Void# -> (# p, q #)
This comes up in strictness analysis
Note [evaldUnfoldings]
~~~~~~~~~~~~~~~~~~~~~~
The evaldUnfolding makes it look that some primitive value is
evaluated, which in turn makes Simplify.interestingArg return True,
which in turn makes INLINE things applied to said value likely to be
inlined.
-}
realWorldPrimId :: Id -- :: State# RealWorld
realWorldPrimId = pcMiscPrelId realWorldName realWorldStatePrimTy
(noCafIdInfo `setUnfoldingInfo` evaldUnfolding -- Note [evaldUnfoldings]
`setOneShotInfo` stateHackOneShot)
voidPrimId :: Id -- Global constant :: Void#
voidPrimId = pcMiscPrelId voidPrimIdName voidPrimTy
(noCafIdInfo `setUnfoldingInfo` evaldUnfolding) -- Note [evaldUnfoldings]
voidArgId :: Id -- Local lambda-bound :: Void#
voidArgId = mkSysLocal (fsLit "void") voidArgIdKey voidPrimTy
coercionTokenId :: Id -- :: () ~ ()
coercionTokenId -- Used to replace Coercion terms when we go to STG
= pcMiscPrelId coercionTokenName
(mkTyConApp eqPrimTyCon [liftedTypeKind, unitTy, unitTy])
noCafIdInfo
pcMiscPrelId :: Name -> Type -> IdInfo -> Id
pcMiscPrelId name ty info
= mkVanillaGlobalWithInfo name ty info
-- We lie and say the thing is imported; otherwise, we get into
-- a mess with dependency analysis; e.g., core2stg may heave in
-- random calls to GHCbase.unpackPS__. If GHCbase is the module
-- being compiled, then it's just a matter of luck if the definition
-- will be in "the right place" to be in scope.
| bitemyapp/ghc | compiler/basicTypes/MkId.hs | bsd-3-clause | 54,258 | 0 | 21 | 14,840 | 7,139 | 3,887 | 3,252 | 556 | 6 |
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.PeekPoke
-- Copyright : (c) Sven Panne 2003
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This is a purely internal module with peek- and poke-related utilities.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.PeekPoke (
poke1, poke2, poke3, poke4,
peek1, peek2, peek3, peek4
) where
import Foreign.Ptr ( Ptr )
import Foreign.Storable ( Storable(peekElemOff,pokeElemOff) )
--------------------------------------------------------------------------------
-- The implementation is little bit verbose/redundant, but seems to generate
-- better code than mapM/zipWithM_.
--------------------------------------------------------------------------------
{-# INLINE poke1 #-}
poke1 :: Storable a => Ptr a -> a -> IO ()
poke1 ptr x =
pokeElemOff ptr 0 x
{-# INLINE poke2 #-}
poke2 :: Storable a => Ptr a -> a -> a -> IO ()
poke2 ptr x y = do
pokeElemOff ptr 0 x
pokeElemOff ptr 1 y
{-# INLINE poke3 #-}
poke3 :: Storable a => Ptr a -> a -> a -> a -> IO ()
poke3 ptr x y z = do
pokeElemOff ptr 0 x
pokeElemOff ptr 1 y
pokeElemOff ptr 2 z
{-# INLINE poke4 #-}
poke4 :: Storable a => Ptr a -> a -> a -> a -> a -> IO ()
poke4 ptr x y z w = do
pokeElemOff ptr 0 x
pokeElemOff ptr 1 y
pokeElemOff ptr 2 z
pokeElemOff ptr 3 w
--------------------------------------------------------------------------------
{-# INLINE peek1 #-}
peek1 :: Storable a => (a -> b) -> Ptr a -> IO b
peek1 f ptr = do
x <- peekElemOff ptr 0
return $ f x
{-# INLINE peek2 #-}
peek2 :: Storable a => (a -> a -> b) -> Ptr a -> IO b
peek2 f ptr = do
x <- peekElemOff ptr 0
y <- peekElemOff ptr 1
return $ f x y
{-# INLINE peek3 #-}
peek3 :: Storable a => (a -> a -> a -> b) -> Ptr a -> IO b
peek3 f ptr = do
x <- peekElemOff ptr 0
y <- peekElemOff ptr 1
z <- peekElemOff ptr 2
return $ f x y z
{-# INLINE peek4 #-}
peek4 :: Storable a => (a -> a -> a -> a -> b) -> Ptr a -> IO b
peek4 f ptr = do
x <- peekElemOff ptr 0
y <- peekElemOff ptr 1
z <- peekElemOff ptr 2
w <- peekElemOff ptr 3
return $ f x y z w
| OS2World/DEV-UTIL-HUGS | libraries/Graphics/Rendering/OpenGL/GL/PeekPoke.hs | bsd-3-clause | 2,400 | 0 | 12 | 523 | 741 | 365 | 376 | 53 | 1 |
module Problems.Problem14
( getSolution
) where
-- This is stupid slow
getSolution :: Int
getSolution = snd $ maximum $ map (\n -> (findLengthSimple n, n)) [1 .. 1000000]
findLengthSimple :: Int -> Int
findLengthSimple 1 = 1
findLengthSimple seed = 1 + findLengthSimple (nextCollatz seed)
nextCollatz :: Int -> Int
nextCollatz n = if n `mod` 2 == 0 then n `div` 2 else (3 * n) + 1
| sarangj/eulerhs | src/Problems/Problem14.hs | bsd-3-clause | 393 | 0 | 10 | 82 | 148 | 83 | 65 | 9 | 2 |
--------------------------------------------------------------------
-- |
-- Module : Language.While.Parser
--
-- Provides parsing of while-language code.
-- Supports reading either a file or stdin, resulting in an AST.
module Language.While.Parser (loadFile, loadStdin) where
import Control.Applicative ((<$>), (<*))
import Control.Monad (liftM)
import Language.While.Types
import Text.Parsec
import Text.Parsec.Expr
import Text.Parsec.Indent
import Text.Parsec.Language (GenLanguageDef)
import Text.Parsec.String
import qualified Text.Parsec.Token as P
-- | Parse the specified file and return either failure or the program.
loadFile :: FilePath -> IO (Either String Stm)
loadFile path = parseString "Failed to parse file; " <$> readFile path
-- | Parse stdin and return either failure or the program.
loadStdin :: IO (Either String Stm)
loadStdin = parseString "Failed to parse stdin; " <$> getContents
-- | Parse the supplied string and return either failure or the program.
parseString errMsg input =
case parseResult of
Left err -> Left $ errMsg ++ show err
Right res -> Right res
where
parseResult = runIndent "" $ runParserT program () "" input
-- | Parse a binary operation.
binaryOp name fun = Infix body
where
body = reservedOp name >> return fun
-- | Parse a prefix operation, e.g. negation.
prefixOp name fun = Prefix $ reservedOp name >> return fun
-- | Parse a whole program, as a series of statements.
program = do
whiteSpace
st <- semiSep consecutive
return $ foldr1 Scomp . concat $ st
-- | Parse a single statement followed optionally by another one.
-- | Needs to be done due to the while-statement.
consecutive = do
s1 <- statement
(s1:) <$> option [] (liftM return statement)
-- | Parse a program statement.
statement
= try stmAssignment
<|> try stmTryCatch
<|> try stmSkip
<|> try stmIf
<|> try stmWhile <* whiteSpace
<|> parens stmWhile <* whiteSpace
-- | Parse an arithmetic atom.
arithmeticAtom
= Numeral <$> integer
<|> Variable <$> identifier
<|> parens arithmeticExpr
-- | Table of supported arithmetic operations.
arithmeticOperation =
[ [binaryOp "*" Amul AssocLeft]
, [binaryOp "+" Aadd AssocLeft, binaryOp "-" Asub AssocLeft , binaryOp "/" Adiv AssocLeft ]
]
-- Parse an arithmetic expression, consisting of parenthesis and supported operators.
arithmeticExpr = buildExpressionParser arithmeticOperation arithmeticAtom
-- | Since Bexp members operate over different domains,
-- there is some boxing/unboxing being done with WrapAtom.
data WrapAtom
= BexpW Bexp
| AexpW Aexp
-- | Parse a boolean atom.
booleanAtom
= (try (symbol "true") >> truthVal Btrue)
<|> (try (symbol "false") >> truthVal Bfalse)
<|> (try $ AexpW <$> arithmeticExpr)
<|> parens booleanExpr'
where truthVal = return . BexpW
-- | Table of supported boolean operations.
booleanOperation =
[ [prefixOp "!" bneg]
, [binaryOp "=" beq AssocLeft
, binaryOp "<=" bleq AssocLeft
, binaryOp "^" band AssocLeft ]
]
where
bneg (BexpW b) = BexpW $ Bneg b
beq (AexpW a1) (AexpW a2) = BexpW $ Beq a1 a2
bleq (AexpW a1) (AexpW a2) = BexpW $ Bleq a1 a2
band (BexpW b1) (BexpW b2) = BexpW $ Band b1 b2
-- | Parse a boolean expression.
booleanExpr = do
result <- booleanExpr'
case result of
(BexpW val) -> return val
_ -> error "Parse error: failed to extract boolean"
booleanExpr' = buildExpressionParser booleanOperation booleanAtom
-- | Assignment statement.
stmAssignment = do
var <- identifier
symbol ":="
expr <- arithmeticExpr
return $ Sass var expr
-- | Skip statement.
stmSkip = symbol "skip" >> return Sskip
-- | If-then-else statement.
stmIf = do
symbol "if"
check <- booleanExpr
symbol "then"
s1 <- program
symbol "else"
s2 <- program
return $ Sif check s1 s2
-- | While statement.
stmWhile = do
symbol "while"
check <- booleanExpr
symbol "do"
prog <- liftM (foldr1 Scomp) . block $ do
s <- statement
optional semi
return s
return $ Swhile check prog
-- | Try-catch statement.
stmTryCatch = do
symbol "try"
s1 <- program
symbol "catch"
s2 <- program
return $ Stry s1 s2
identifier = P.identifier whileLexer
integer = P.integer whileLexer
parens = P.parens whileLexer
reservedOp = P.reservedOp whileLexer
semi = P.semi whileLexer
symbol = P.symbol whileLexer
semiSep = P.semiSep whileLexer
whiteSpace = P.whiteSpace whileLexer
whileLexer = P.makeTokenParser whileStyle
whileStyle :: Monad m => GenLanguageDef String u m
whileStyle = P.LanguageDef
{ P.commentStart = ""
, P.commentEnd = ""
, P.commentLine = "#"
, P.nestedComments = True
, P.identStart = letter <|> char '_'
, P.identLetter = alphaNum <|> oneOf "_'"
, P.opStart = P.opLetter whileStyle
, P.opLetter = oneOf ":!#$%&*+./<=>?@\\^|-~"
, P.reservedOpNames= []
, P.reservedNames = []
, P.caseSensitive = True
}
| davnils/while-lang-parser | Language/While/Parser.hs | bsd-3-clause | 4,911 | 0 | 12 | 983 | 1,296 | 659 | 637 | 119 | 2 |
----------------------------------------------------------------
-- Модуль приложения
-- Скрипты графического интерфейса (HScript)
-- Утилиты генератора языка JavaScript
----------------------------------------------------------------
module WebUI.Scripts.JavaScript.HJSUtils
( smartTrim, smartTrimStr
, smartTab , smartTabStr
, ujs
, upjs
) where
-- Импорт модулей
import Prelude as PRL
import System.IO.Unsafe (unsafePerformIO)
import Data.Char (toLower)
import Data.String.Utils (strip)
import qualified Data.Text.Lazy as DTL
import Data.Int
import WebUI.Scripts.HScript
import WebUI.Scripts.JavaScript.HJSTypes
import WebUI.Scripts.JavaScript.HJSBuilder
-- | Умная обрезка текста кода после Julius (String)
smartTrimStr :: String
-> String
smartTrimStr txt = DTL.unpack $ smartTrim $ DTL.pack txt
-- | Умная обрезка текста кода после Julius
smartTrim :: DTL.Text
-> DTL.Text
smartTrim txt =
let linesTxt = DTL.lines txt in
DTL.unlines $ dropWhole linesTxt $ findCount linesTxt maxCounter
where
findCount :: [DTL.Text] -> Int64 -> Int64
findCount (x:xs) counter = let len = lenSP x counter in
if len < counter
then findCount xs len
else findCount xs counter
findCount [] counter = if counter == maxCounter
then 0
else counter
maxCounter :: Int64
maxCounter = 1000000
lenSP :: DTL.Text -> Int64 -> Int64
lenSP txt counter = let res = lenSP_ txt counter in if res < 0
then 0
else res
lenSP_ :: DTL.Text -> Int64 -> Int64
lenSP_ txt counter = if (DTL.length txt) == 0 || (DTL.length txt) == (DTL.length $ DTL.takeWhile (==' ') txt)
then counter
else DTL.length $ DTL.takeWhile (==' ') txt
dropWhole :: [DTL.Text] -> Int64 -> [DTL.Text]
dropWhole (x:xs) count = if (DTL.length x) > count
then (DTL.drop count x):(dropWhole xs count)
else (dropWhole xs count)
dropWhole [] _ = []
-- | Умная табуляция кода (String)
smartTabStr :: String
-> String
-> String
smartTabStr txt tab = DTL.unpack $ smartTab (DTL.pack txt) (DTL.pack tab)
-- | Умная табуляция кода
smartTab :: DTL.Text
-> DTL.Text
-> DTL.Text
smartTab txt tab =
let linesTxt = DTL.lines txt in
DTL.unlines $ tabWhole linesTxt $ tab
where
tabWhole :: [DTL.Text] -> DTL.Text -> [DTL.Text]
tabWhole (x:xs) tb = if (DTL.length x) > 0
then (DTL.append tb x):(tabWhole xs tb)
else x:(tabWhole xs tb)
tabWhole [] _ = []
-- | Распаковать значение из монадгного трансформера с конфигурацией
-- по умолчанию и пустым исходным состоянияем
ujs :: HSL HLangJS HLangJS
-> HLangJS
ujs srcHSL =
unsafePerformIO $ do
(a, s, log) <- runRWST srcHSL defaultHBConfig HLangJS
return a
-- | Распаковать значение из монадгного трансформера с конфигурацией
-- по умолчанию и пустым исходным состоянияем
upjs :: HSL HLangJS HLangJS
-> HBConfig
-> HLangJS
-> HLangJS
upjs srcHSL hbConf st =
unsafePerformIO $ do
(a, s, log) <- runRWST srcHSL hbConf st
return a
| iqsf/HFitUI | src/WebUI/Scripts/JavaScript/HJSUtils.hs | bsd-3-clause | 4,235 | 0 | 13 | 1,507 | 906 | 486 | 420 | 74 | 8 |
{-# LANGUAGE CPP #-}
#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 702)
{-# LANGUAGE Trustworthy #-}
#endif
{-# LANGUAGE DeriveDataTypeable #-}
module Data.IterIO.Inum
(-- * Base types
Inum, Onum
-- * Concatenation and fusing operators
, (|$), (.|$), cat, lcat, (|.), (.|)
-- * Exception functions
, inumCatch, inumFinally, inumOnException
, resumeI, verboseResumeI
-- * Simple enumerator construction function
-- $mkInumIntro
, ResidHandler, CtlHandler
, mkInumC, mkInum, mkInumP
, inumBracket
-- * Utilities
, pullupResid
, noCtl, passCtl, consCtl, mkCtl, mkFlushCtl
, runIterM, runIterMC, runInum
-- * Some basic Inums
, inumNop, inumNull, inumPure, enumPure, inumRepeat
, inumTee
-- * Enumerator construction from Codecs
, Codec, runCodec, runCodecC
-- * Enumerator construction monad
-- $mkInumMIntro
, InumM, mkInumM, mkInumAutoM
, setCtlHandler, setAutoEOF, setAutoDone
, addCleanup, withCleanup
, ifeed, ifeed1, ipipe, irun, irepeat, ipopresid, idone
) where
import Prelude hiding (null)
import Control.Exception (Exception(..))
import Control.Monad
import Control.Monad.Trans
import Data.Maybe
import Data.Monoid
import Data.Typeable
import System.Environment (getProgName)
import System.IO
import Data.IterIO.Iter
import Data.IterIO.Trans
--
-- Enumerator types
--
-- | The type of an /iterator-enumerator/, which transcodes data from
-- some input type @tIn@ to some output type @tOut@. An @Inum@ acts
-- as an 'Iter' when consuming data, then acts as an enumerator when
-- feeding transcoded data to another 'Iter'.
--
-- At a high level, one can think of an @Inum@ as a function from
-- 'Iter's to 'IterR's, where an @Inum@'s input and output types are
-- different. A simpler-seeming alternative to @Inum@ might have
-- been:
--
-- > type Inum' tIn tOut m a = Iter tOut m a -> Iter tIn m a
--
-- In fact, given an @Inum@ object @inum@, it is possible to construct
-- a function of type @Inum'@ with @(inum '.|')@. But sometimes one
-- might like to concatenate @Inum@s. For instance, consider a
-- network protocol that changes encryption or compression modes
-- midstream. Transcoding is done by @Inum@s. To change transcoding
-- methods after applying an @Inum@ to an iteratee requires the
-- ability to \"pop\" the iteratee back out of the @Inum@ so as to be
-- able to hand it to another @Inum@. @Inum@'s return type (@Iter tIn
-- m (IterR tOut m a)@ as opposed to @Iter tIn m a@) allows the
-- monadic bind operator '>>=' to accomplish this popping in
-- conjunction with the 'tryRI' and 'reRunIter' functions.
--
-- All @Inum@s must obey the following two rules.
--
-- 1. /An/ @Inum@ /may never feed a chunk with the EOF flag set to/
-- /it's target/ 'Iter'. Instead, upon receiving EOF, the @Inum@
-- should simply return the state of the inner 'Iter' (this is how
-- \"popping\" the iteratee back out works--If the @Inum@ passed
-- the EOF through to the 'Iter', the 'Iter' would stop requesting
-- more input and could not be handed off to a new @Inum@).
--
-- 2. /An/ @Inum@ /must always return the state of its target/ 'Iter'.
-- This is true even when the @Inum@ fails, and is why the 'Fail'
-- state contains a @'Maybe' a@ field.
--
-- In addition to returning when it receives an EOF or fails, an
-- @Inum@ should return when the target 'Iter' returns a result or
-- fails. An @Inum@ may also unilaterally return the state of the
-- iteratee at any earlier point, for instance if it has reached some
-- logical message boundary (e.g., many protocols finish processing
-- headers upon reading a blank line).
--
-- @Inum@s are generally constructed with one of the 'mkInum' or
-- 'mkInumM' functions, which hide most of the error handling details
-- and ensure the above rules are obeyed. Most @Inum@s are
-- polymorphic in the last type, @a@, in order to work with iteratees
-- returning any type. There isn't much reason for an @Inum@ to care
-- about the type @a@. Had this module used the Rank2Types Haskell
-- extension, it would define @Inum@ as:
--
-- > type Inum tIn tOut m = forall a. Iter tOut m a
-- > -> Iter tIn m (IterR tOut m a)
type Inum tIn tOut m a = Iter tOut m a -> Iter tIn m (IterR tOut m a)
-- | An @Onum t m a@ is just an 'Inum' in which the input is
-- @()@--i.e., @'Inum' () t m a@--so that there is no meaningful input
-- data to transcode. Such an enumerator is called an
-- /outer enumerator/, because it must produce the data it feeds to
-- 'Iter's by either executing actions in monad @m@, or from its own
-- internal pure state (as for 'enumPure').
--
-- As with 'Inum's, an @Onum@ should under no circumstances ever feed
-- a chunk with the EOF bit set to its 'Iter' argument. When the
-- @Onum@ runs out of data, it must simply return the current state of
-- the 'Iter'. This way more data from another source can still be
-- fed to the iteratee, as happens when enumerators are concatenated
-- with the 'cat' function.
--
-- @Onum@s should generally be constructed using the 'mkInum' or
-- 'mkInumM' function, just like 'Inum's, the only difference being
-- that for an @Onum@ the input type is @()@, so executing 'Iter's to
-- consume input will be of little use.
type Onum t m a = Inum () t m a
-- Concatenation and fusing functions
-- | Run an 'Onum' on an 'Iter'. This is the main way of actually
-- executing IO with 'Iter's. @|$@ is a type-restricted version of
-- the following code, in which @inum@ must be an 'Onum':
--
-- @
-- inum |$ iter = 'run' (inum .| iter)
-- infixr 2 |$
-- @
(|$) :: (ChunkData t, Monad m) => Onum t m a -> Iter t m a -> m a
(|$) inum iter = run (inum .| iter)
infixr 2 |$
-- | @.|$@ is a variant of '|$' that allows you to apply an 'Onum'
-- from within an 'Iter' monad. This is often useful in conjuction
-- with 'enumPure', if you want to parse at some coarse-granularity
-- (such as lines), and then re-parse the contents of some
-- coarser-grained parse unit. For example:
--
-- > rawcommand <- lineI
-- > command <- enumPure rawcommand .|$ parseCommandI
-- > return Request { cmd = command, rawcmd = rawcommand }
--
-- @.|$@ has the same fixity as @|$@, namely:
--
-- > infixr 2 .|$
--
-- Note the important distinction between @(.|$)@ and @('.|')@.
-- @(.|$)@ runs an 'Onum' and does not touch the current input, while
-- ('.|') pipes the current input through an 'Inum'. For instance, to
-- send the contents of a file to standard output (regardless of the
-- current input), you must say @'enumFile' \".signature\" .|$
-- 'stdoutI'@. But to take the current input, compress it, and send
-- the result to standard output, you must use '.|', as in @'inumGzip'
-- '.|' 'stdoutI'@.
--
-- As suggested by the types, @enum .|$ iter@ is sort of equivalent to
-- @'lift' (enum |$ iter)@, except that the latter will call 'throw'
-- on failures, causing language-level exceptions that cannot be
-- caught within the outer 'Iter'. Thus, it is better to use @.|$@
-- than @'lift' (... '|$' ...)@, though in the less general case of
-- the IO monad, @enum .|$ iter@ is equivalent to @'liftIO' (enum '|$'
-- iter)@ as illustrated by the following examples:
--
-- > -- Catches exception, because .|$ propagates failure through the outer
-- > -- Iter Monad, where it can still be caught.
-- > apply1 :: IO String
-- > apply1 = enumPure "test1" |$ iter `catchI` handler
-- > where
-- > iter = enumPure "test2" .|$ fail "error"
-- > handler (SomeException _) _ = return "caught error"
-- >
-- > -- Does not catch error. |$ turns the Iter failure into a language-
-- > -- level exception, which can only be caught in the IO Monad.
-- > apply2 :: IO String
-- > apply2 = enumPure "test1" |$ iter `catchI` handler
-- > where
-- > iter = lift (enumPure "test2" |$ fail "error")
-- > handler (SomeException _) _ = return "caught error"
-- >
-- > -- Catches the exception, because liftIO uses the IO catch function to
-- > -- turn language-level exceptions into monadic Iter failures. (By
-- > -- contrast, lift works in any Monad, so cannot do this in apply2.)
-- > -- This example illustrates how liftIO is not equivalent to lift.
-- > apply3 :: IO String
-- > apply3 = enumPure "test1" |$ iter `catchI` handler
-- > where
-- > iter = liftIO (enumPure "test2" |$ fail "error")
-- > handler (SomeException _) _ = return "caught error"
(.|$) :: (ChunkData tIn, ChunkData tOut, Monad m) =>
Onum tOut m a -> Iter tOut m a -> Iter tIn m a
(.|$) enum iter = runI (enum .| iter)
infixr 2 .|$
-- | Concatenate the outputs of two enumerators. For example,
-- @'enumFile' \"file1\" \`cat\` 'enumFile' \"file2\"@ produces an
-- 'Onum' that outputs the concatenation of files \"file1\" and
-- \"file2\". Unless the first 'Inum' fails, @cat@ always invokes the
-- second 'Inum', as the second 'Inum' may have monadic side-effects
-- that must be executed even when the 'Iter' has already finished.
-- See 'lcat' if you want to stop when the 'Iter' no longer requires
-- input. If you want to continue executing even in the event of an
-- 'InumFail' condition, you can wrap the first 'Inum' with
-- 'inumCatch' and invoke 'resumeI' from within the exception handler.
--
-- @cat@ (and 'lcat', described below) are useful in right folds.
-- Say, for instance, that @files@ is a list of files you wish to
-- concatenate. You can use a construct such as:
--
-- @
-- catFiles :: ('MonadIO' m) => ['FilePath'] -> 'Onum' 'L.ByteString' m a
-- catFiles files = 'foldr' ('cat' . 'enumFile') 'inumNull' files
-- @
--
-- Note the use of 'inumNull' as the starting value for 'foldr'. This
-- is not to be confused with 'inumNop'. 'inumNull' acts as a no-op
-- for concatentation, producing no output analogously to
-- @\/dev\/null@. By contrast 'inumNop' is the no-op for fusing (see
-- '|.' and '.|' below) because it passes all data through untouched.
--
-- @cat@ has fixity:
--
-- > infixr 3 `cat`
cat :: (ChunkData tIn, ChunkData tOut, Monad m) =>
Inum tIn tOut m a -- ^
-> Inum tIn tOut m a
-> Inum tIn tOut m a
cat a b iter = tryRI (runInum a iter) >>= either reRunIter (b . reRunIter)
-- Note this was carefully constructed to preserve the return value in
-- errors. Something like: cat a b iter = a iter >>= b . reRunIter
-- would turn a @('Fail' e ('Just' r) c)@ result from @a@ into
-- @('Fail' e 'Nothing' c)@; since the input and output types of >>=
-- do not have to be the same, >>= must convert error results to
-- 'Nothing'.
infixr 3 `cat`
-- | Lazy cat. Like 'cat', except that it does not run the second
-- 'Inum' if the 'Iter' is no longer active after completion of the
-- first 'Inum'. Also has fixity @infixr 3 \`lcat\`@.
lcat :: (ChunkData tIn, ChunkData tOut, Monad m) =>
Inum tIn tOut m a -- ^
-> Inum tIn tOut m a
-> Inum tIn tOut m a
lcat a b iter = tryRI (runInum a iter) >>= either reRunIter check
where check r = if isIterActive r then b $ reRunIter r else return r
infixr 3 `lcat`
-- | Transforms the result of an 'Inum' into the result of the 'Iter'
-- that it contains. Used by '|.' and '.|' to collapse their result
-- types.
--
-- Note that because the input type if the inner 'Iter', @tMid@, gets
-- squeezed out of the return type, @joinR@ will feed an EOF to the
-- inner 'Iter' if it is still active. This is what ensures that
-- active 'Iter's end up seeing an EOF, even though 'Inum's themselves
-- are never supposed to feed an EOF to the underlying 'Iter'. All
-- 'Iter's in right-hand arguments of '.|' and '|.' get fed an EOF by
-- @joinR@ (if they don't finish on their own), while the outermost
-- 'Inum' is fed an iter by the 'run' function (or by '|$' which
-- invokes 'run' internally).
joinR :: (ChunkData tIn, ChunkData tMid, Monad m) =>
IterR tIn m (IterR tMid m a)
-> IterR tIn m a
joinR (Done i c) = runIterR (runR i) c
joinR (Fail e Nothing c) = Fail e Nothing c
--
-- Note that 'runR' in the following function is serving two purposes,
-- one of them subtle. The obvious purpose is to preserve the state
-- of the non-failed target 'Iter' when an 'Inum' has failed.
-- However, a subtler, more important purpose is to guarantee that all
-- (non-failed) 'Iter's eventually receive EOF even when 'Inum's fail.
-- This is critical for things like EOF transmission and file
-- descriptor closing, and is how functions such as 'pairFinalizer'
-- can make sense.
joinR (Fail e (Just i) c) = flip onDoneR (runR i) $ \r ->
case r of
Done a _ -> Fail e (Just a) c
Fail e' a _ -> Fail e' a c
_ -> error "joinR"
joinR _ = error "joinR: not done"
-- | Left-associative pipe operator. Fuses two 'Inum's when the
-- output type of the first 'Inum' is the same as the input type of
-- the second. More specifically, if @inum1@ transcodes type @tIn@ to
-- @tOut@ and @inum2@ transcodes @tOut@ to @tOut2@, then @inum1
-- |. inum2@ produces a new 'Inum' that transcodes from @tIn@ to
-- @tOut2@.
--
-- Typically types @i@ and @iR@ are @'Iter' tOut2 m a@ and @'IterR'
-- tOut2 m a@, respectively, in which case the second argument and
-- result of @|.@ are also 'Inum's.
--
-- This function is equivalent to:
--
-- @
-- outer |. inner = \\iter -> outer '.|' inner iter
-- infixl 4 |.
-- @
--
-- But if you like point-free notation, think of it as @outer |. inner
-- = (outer '.|') . inner@, or better yet @(|.) = (.) . ('.|')@.
(|.) :: (ChunkData tIn, ChunkData tOut, Monad m) =>
Inum tIn tOut m iR -- ^
-> (i -> Iter tOut m iR)
-> (i -> Iter tIn m iR)
(|.) outer inner = \iter -> onDone joinR $ outer $ inner iter
infixl 4 |.
-- | Right-associative pipe operator. Fuses an 'Inum' that transcodes
-- @tIn@ to @tOut@ with an 'Iter' taking input type @tOut@ to produce
-- an 'Iter' taking input type @tIn@. If the 'Iter' is still active
-- when the 'Inum' terminates (either normally or through an
-- exception), then @.|@ sends it an EOF.
--
-- Has fixity:
--
-- > infixr 4 .|
(.|) :: (ChunkData tIn, ChunkData tOut, Monad m) =>
Inum tIn tOut m a -- ^
-> Iter tOut m a
-> Iter tIn m a
(.|) inum iter = onDone joinR $ inum iter
infixr 4 .|
--
-- Exception functions
--
-- | Catches errors thrown by an 'Inum', or a set of fused 'Inum's.
-- Note that only errors in 'Inum's that are lexically within the
-- scope of the argument to 'inumCatch' will be caught. For example:
--
-- > inumBad :: (ChunkData t, Monad m) => Inum t t m a
-- > inumBad = mkInum $ fail "inumBad"
-- >
-- > skipError :: (ChunkData tIn, MonadIO m) =>
-- > SomeException
-- > -> IterR tIn m (IterR tOut m a)
-- > -> Iter tIn m (IterR tOut m a)
-- > skipError e iter = do
-- > liftIO $ hPutStrLn stderr $ "skipping error: " ++ show e
-- > resumeI iter
-- >
-- > -- Throws an exception, because inumBad was fused outside the argument
-- > -- to inumCatch.
-- > test1 :: IO ()
-- > test1 = inumCatch (enumPure "test") skipError |. inumBad |$ nullI
-- >
-- > -- Does not throw an exception, because inumBad fused within the
-- > -- argument to inumCatch.
-- > test2 :: IO ()
-- > test2 = inumCatch (enumPure "test" |. inumBad) skipError |$ nullI
-- >
-- > -- Again no exception, because inumCatch is wrapped around inumBad.
-- > test3 :: IO ()
-- > test3 = enumPure "test" |. inumCatch inumBad skipError |$ nullI
--
-- Note that @\`inumCatch\`@ has the default infix precedence (@infixl
-- 9 \`inumcatch\`@), which binds more tightly than any concatenation
-- or fusing operators.
--
-- As noted for 'catchI', exception handlers receive both the
-- exception thrown and the failed 'IterR'. Particularly in the case
-- of @inumCatch@, it is important to re-throw exceptions by
-- re-executing the failed 'Iter' with 'reRunIter', not passing the
-- exception itself to 'throwI'. That way, if the exception is
-- re-caught, 'resumeI' will continue to work properly. For example,
-- to copy two files to standard output and ignore file not found
-- errors but re-throw any other kind of error, you could use the
-- following:
--
-- @
-- resumeTest :: IO ()
-- resumeTest = doFile \"file1\" ``cat`` doFile \"file2\" |$ 'stdoutI'
-- where
-- doFile path = inumCatch (`enumFile'` path) $ \\err r ->
-- if 'isDoesNotExistError' err
-- then 'verboseResumeI' r
-- else 'reRunIter' r
-- @
--
inumCatch :: (Exception e, ChunkData tIn, Monad m) =>
Inum tIn tOut m a
-- ^ 'Inum' that might throw an exception
-> (e -> IterR tIn m (IterR tOut m a) -> Iter tIn m (IterR tOut m a))
-- ^ Exception handler
-> Inum tIn tOut m a
inumCatch enum handler iter = catchI (enum iter) check
where check e r@(Fail _ (Just _) _) = handler e r
check _ r = reRunIter r
-- | Execute some cleanup action when an 'Inum' finishes.
inumFinally :: (ChunkData tIn, Monad m) =>
Inum tIn tOut m a -> Iter tIn m b -> Inum tIn tOut m a
inumFinally inum cleanup iter = inum iter `finallyI` cleanup
-- | Execute some cleanup action if an 'Inum' fails. Does not execute
-- the action if the 'Iter' (or some inner 'Inum') fails. Has the
-- same scoping rules as 'inumCatch'.
inumOnException :: (ChunkData tIn, Monad m) =>
Inum tIn tOut m a -> Iter tIn m b -> Inum tIn tOut m a
inumOnException inum cleanup iter = inum iter `onExceptionI` cleanup
-- | Used in an exception handler, after an 'Inum' failure, to resume
-- processing of the 'Iter' by the next enumerator in a 'cat'ed
-- series. See 'inumCatch' for an example.
resumeI :: (ChunkData tIn, Monad m) =>
IterR tIn m (IterR tOut m a) -> Iter tIn m (IterR tOut m a)
resumeI (Fail _ (Just a) _) = return a
resumeI _ = error "resumeI: not an Inum failure"
-- | Like 'resumeI', but if the 'Iter' is resumable, also prints an
-- error message to standard error before resuming.
verboseResumeI :: (ChunkData tIn, MonadIO m) =>
IterR tIn m (IterR tOut m a) -> Iter tIn m (IterR tOut m a)
verboseResumeI (Fail e (Just a) _) = do
liftIO $ do prog <- liftIO getProgName
hPutStrLn stderr $ prog ++ ": " ++ show e
return a
verboseResumeI _ = error "verboseResumeI: not an Inum failure"
--
-- Control handlers
--
-- | A @ResidHandler@ specifies how to handle residual data in an
-- 'Inum'. Typically, when an 'Inum' finishes executing, there are
-- two kinds of residual data. First, the 'Inum' itself (in its role
-- as an iteratee) may have left some unconsumed data. Second, the
-- target 'Iter' being fed by the 'Inum' may have some resitual data,
-- and this data may be of a different type. A @ResidHandler@ allows
-- this residual data to be adjusted by untranslating the residual
-- data of the target 'Iter' and sticking the result back into the
-- `Inum`'s residual data.
--
-- The two most common @ResidHandler@s are 'pullupResid' (to pull the
-- target `Iter`'s residual data back up to the 'Inum' as is), and
-- 'id' (to do no adjustment of residual data).
--
-- @ResidHandler@s are used by the 'mkInumC' function, and by the
-- 'passCtl' 'CtlHandler'.
type ResidHandler tIn tOut = (tIn, tOut) -> (tIn, tOut)
withResidHandler :: ResidHandler tIn tOut
-> Chunk tOut
-> (Chunk tOut -> Iter tIn mIn a)
-> Iter tIn mIn a
withResidHandler adjust (Chunk tOut0 eofOut) cont =
Iter $ \(Chunk tIn0 eofIn) ->
case adjust (tIn0, tOut0) of
(tIn, tOut) -> runIter (cont $ Chunk tOut eofOut) $ Chunk tIn eofIn
-- | A control handler maps control requests to 'IterR' results.
-- Generally the type parameter @m1@ is @'Iter' t' m@.
type CtlHandler m1 t m a = CtlArg t m a -> m1 (IterR t m a)
-- | Reject all control requests.
noCtl :: (Monad m1) => CtlHandler m1 t m a
noCtl (CtlArg _ n c) = return $ runIter (n CtlUnsupp) c
-- | Pass all control requests through to the enclosing 'Iter' monad.
-- The 'ResidHandler' argument says how to adjust residual data, in
-- case some enclosing 'CtlHandler' decides to flush pending input
-- data, it is advisable to un-translate any data in the output type
-- @tOut@ back to the input type @tIn@.
passCtl :: (Monad mIn) =>
ResidHandler tIn tOut
-> CtlHandler (Iter tIn mIn) tOut m a
passCtl adj (CtlArg a n c0) = withResidHandler adj c0 runn
where runn c = do mcr <- safeCtlI a
return $ runIter (n mcr) c
-- | Create a 'CtlHandler' given a function of a particular control
-- argument type and a fallback 'CtlHandler' to run if the argument
-- type does not match. @consCtl@ is used to chain handlers, with the
-- rightmost handler being either 'noCtl' or 'passCtl'.
--
-- For example, to create a control handler that implements seek on
-- @'SeekC'@ requests, returns the size of the file on @'SizeC'@
-- requests, and passes everything else out to the enclosing
-- enumerator (if any), you could use the following:
--
-- @
-- fileCtl :: (ChunkData t, MonadIO m) => Handle -> CtlHandler (Iter () m) t m a
-- fileCtl h = ('mkFlushCtl' $ \(SeekC mode pos) -> liftIO (hSeek h mode pos))
-- \`consCtl\` ('mkCtl' $ \SizeC -> liftIO (hFileSize h))
-- \`consCtl\` 'passCtl' 'id'
-- @
--
-- Has fixity:
--
-- > infixr 9 `consCtl`
consCtl :: (CtlCmd carg cres, ChunkData tIn, Monad mIn) =>
(carg -> (cres -> Iter t m a) -> Chunk t
-> Iter tIn mIn (IterR t m a))
-> CtlHandler (Iter tIn mIn) t m a
-> CtlHandler (Iter tIn mIn) t m a
consCtl fn fallback ca@(CtlArg a0 n c) = maybe (fallback ca) runfn $ cast a0
where runfn a = fn a (n . CtlDone . fromJust . cast) c
`catchI` \e _ -> return $ runIter (n $ CtlFail e) c
infixr 9 `consCtl`
-- | Make a control function suitable for use as the first argument to
-- 'consCtl'.
mkCtl :: (CtlCmd carg cres, Monad m1) =>
(carg -> Iter t1 m1 cres)
-> carg -> (cres -> Iter t m a) -> Chunk t -> Iter t1 m1 (IterR t m a)
mkCtl f a n c = do cres <- f a; return $ runIter (n cres) c
-- | Like 'mkCtl', except that it flushes all input and clears the EOF
-- flag in both 'Iter' monads after executing the control function.
mkFlushCtl :: (CtlCmd carg cres, Monad mIn, ChunkData tIn, ChunkData t) =>
(carg -> Iter tIn mIn cres)
-> carg -> (cres -> Iter t m a) -> Chunk t
-> Iter tIn mIn (IterR t m a)
mkFlushCtl f a n _ = do cres <- onDone (flip setResid mempty) $ f a
return $ runIter (n cres) mempty
--
-- Basic tools
--
-- $mkInumIntro
--
-- The 'mkInum' function allows you to create stateless 'Inum's out of
-- simple transcoding 'Iter's. As an example, suppose you are
-- processing a list of @L.ByteString@s representing packets, and want
-- to concatenate them all into one continuous stream of bytes. You
-- could implement an 'Inum' called @inumConcat@ to do this as
-- follows:
--
-- #mkInumExample#
--
-- @
--iterConcat :: (Monad m) => 'Iter' [L.ByteString] m L.ByteString
--iterConcat = L.concat ``liftM`` 'dataI'
--
--inumConcat :: (Monad m) => 'Inum' [L.ByteString] L.ByteString m a
--inumConcat = 'mkInum' iterConcat
-- @
--
-- | Like 'runIterMC', but only for 'IterM'--may return 'IterC'.
runIterM :: (Monad m, MonadTrans mt, Monad (mt m)) =>
Iter t m a -> Chunk t -> mt m (IterR t m a)
runIterM iter c = check $ runIter iter c
where check (IterM m) = lift m >>= check
check r = return r
runIterRMC :: (Monad m) =>
CtlHandler (Iter tIn m) tOut m a
-> IterR tOut m a -> Iter tIn m (IterR tOut m a)
runIterRMC ch = check
where check (IterM m) = lift m >>= check
check (IterC ca) = ch ca >>= check
check r = return r
-- | Run an 'Iter' just like 'runIter', but then keep stepping the
-- result for as long as it is in the 'IterM' or 'IterC' state (using
-- the supplied 'CtlHandler' for 'IterC' states). 'Inum's should
-- generally use this function or 'runIterM' in preference to
-- 'runIter', as it is convenient if 'Inum's avoid ever returning
-- 'IterR's in the 'IterM' state.
runIterMC :: (Monad m) =>
CtlHandler (Iter tIn m) tOut m a
-> Iter tOut m a -> Chunk tOut -> Iter tIn m (IterR tOut m a)
runIterMC ch iter c = runIterRMC ch $ runIter iter c
-- | Takes an 'Inum' that might return 'IterR's in the 'IterM' state
-- (which is considered impolite--see 'runIterMC') and transforms it
-- into an 'Inum' that never returns 'IterR's in the 'IterM' state.
runInum :: (ChunkData tIn, Monad m) =>
Inum tIn tOut m a -> Inum tIn tOut m a
runInum inum = onDone check . inum
where
check (Done (IterM m) c) = IterM $ m >>= \r -> return $ check $ Done r c
check r = r
-- | Create a stateless 'Inum' from a \"codec\" 'Iter' that transcodes
-- the input type to the output type. The codec is invoked repeately
-- until one of the following occurs:
--
-- 1. The input is at an EOF marker AND the codec returns 'null'
-- data. ('Onum's are always fed EOF, but other 'Inum's might
-- have reason to return 'mempty' data.)
--
-- 2. The codec throws an exception. If the exception is an EOF
-- exception--thrown either by 'throwEOFI', or by some IO action
-- inside 'liftIO'--this is considered normal termination, and is
-- the normal way for a codec to cause the 'Inum' to return. If
-- the exception is of any other type, then the 'Inum' will
-- further propagate the exception as an 'Inum' failure.
--
-- 3. The underlying target 'Iter' either returns a result or throws
-- an exception.
--
-- @mkInumC@ requires two other arguments before the codec. First, a
-- 'ResidHandler' allows residual data to be adjusted between the
-- input and output 'Iter' monads. Second, a 'CtlHandler' specifies a
-- handler for control requests. For example, to pass up control
-- requests and ensure no residual data is lost when the 'Inum' is
-- fused to an 'Iter', the @inumConcat@ function given previously for
-- 'mkInum' at <#mkInumExample> could be re-written:
--
-- > inumConcat :: (Monad m) => Inum [L.ByteString] L.ByteString m a
-- > inumConcat = mkInumC reList (passCtl reList) iterConcat
-- > where reList (a, b) = (b:a, mempty)
mkInumC :: (ChunkData tIn, ChunkData tOut, Monad m) =>
ResidHandler tIn tOut
-- ^ Adjust residual data (use 'id' for no adjustment)
-> CtlHandler (Iter tIn m) tOut m a
-- ^ Handle control requests (use 'noCtl' or 'passCtl' if
-- 'Inum' shouldn't implement any specific control functions).
-> Iter tIn m tOut
-- ^ Generate transcoded data chunks
-> Inum tIn tOut m a
mkInumC adj ch codec iter0 = doIter iter0
where
doIter iter = tryEOFI codec >>= maybe (return $ IterF iter) (doInput iter)
doInput iter input = do
r <- runIterMC ch iter (Chunk input False)
eof <- Iter $ \c@(Chunk t eof) -> Done (eof && null t) c
case r of
(IterF i) | not (eof && null input) -> doIter i
_ | isIterActive r -> return r
_ -> withResidHandler adj (getResid r) $ return . setResid r
-- | Create an 'Inum' based on an 'Iter' that transcodes the input to
-- the output type. This is a simplified version of 'mkInumC' that
-- rejects all control requests and does not adjust residual data.
--
-- > mkInum = mkInumC id noCtl
mkInum :: (ChunkData tIn, ChunkData tOut, Monad m) =>
Iter tIn m tOut -> Inum tIn tOut m a
mkInum = mkInumC id noCtl
-- | A simplified version of 'mkInum' that passes all control requests
-- to enclosing enumerators. It requires a 'ResidHandler' to describe
-- how to adjust residual data. (E.g., use 'pullupResid' when @tIn@
-- and @tOut@ are the same type.)
--
-- > mkInumP adj = mkInumC adj (passCtl adj)
mkInumP :: (ChunkData tIn, ChunkData tOut, Monad m) =>
ResidHandler tIn tOut -> Iter tIn m tOut -> Inum tIn tOut m a
mkInumP adj = mkInumC adj (passCtl adj)
-- | @pullupResid (a, b) = (mappend a b, mempty)@. See 'ResidHandler'.
pullupResid :: (ChunkData t) => (t, t) -> (t, t)
pullupResid (a, b) = (mappend a b, mempty)
-- | Bracket an 'Inum' with a start and end function, which can be
-- used to acquire and release a resource, must like the IO monad's
-- @'bracket'@ function. For example:
--
-- > enumFile :: (MonadIO m, ChunkData t, LL.ListLikeIO t e) =>
-- > FilePath -> Onum t m a
-- > enumFile path = inumBracket (liftIO $ openBinaryFile path ReadMode)
-- > (liftIO . hClose)
-- > enumHandle
inumBracket :: (ChunkData tIn, Monad m) =>
Iter tIn m b
-- ^ Computation to run first
-> (b -> Iter tIn m c)
-- ^ Computation to run last
-> (b -> Inum tIn tOut m a)
-- ^ Inum to bracket
-> Inum tIn tOut m a
inumBracket start end inum iter = tryFI start >>= check
where check (Left e) = Iter $ Fail e (Just $ IterF iter) . Just
check (Right b) = inum b iter `finallyI` end b
--
-- Basic Inums
--
-- | @inumNop@ passes all data through to the underlying 'Iter'. It
-- acts as a no-op when fused to other 'Inum's with '|.' or when fused
-- to 'Iter's with '.|'.
--
-- @inumNop@ is particularly useful for conditionally fusing 'Inum's
-- together. Even though most 'Inum's are polymorphic in the return
-- type, this library does not use the Rank2Types extension, which
-- means any given 'Inum' must have a specific return type. Here is
-- an example of incorrect code:
--
-- @
-- let enum = if debug then base_enum '|.' 'inumStderr' else base_enum -- Error
-- @
--
-- This doesn't work because @base_enum@ cannot have the same type as
-- @(base_enum |. inumStderr)@. Instead, you can use the following:
--
-- @
-- let enum = base_enum '|.' if debug then 'inumStderr' else inumNop
-- @
inumNop :: (ChunkData t, Monad m) => Inum t t m a
inumNop = mkInumP pullupResid dataI
-- | @inumNull@ feeds empty data to the underlying 'Iter'. It pretty
-- much acts as a no-op when concatenated to other 'Inum's with 'cat'
-- or 'lcat'.
--
-- There may be cases where @inumNull@ is required to avoid deadlock.
-- In an expression such as @enum '|$' iter@, if @enum@ immediately
-- blocks waiting for some event, and @iter@ immediately starts out
-- triggering that event before reading any input, then to break the
-- deadlock you can re-write the code as @cat inumNull enum '|$'
-- iter@.
inumNull :: (ChunkData tOut, Monad m) => Inum tIn tOut m a
inumNull = inumPure mempty
-- | Feed pure data to an 'Iter'.
inumPure :: (Monad m) => tOut -> Inum tIn tOut m a
inumPure t iter = runIterM iter $ chunk t
-- | Type-restricted version of 'inumPure'.
enumPure :: (Monad m) => tOut -> Onum tOut m a
enumPure = inumPure
-- | Repeat an 'Inum' until the input receives an EOF condition, the
-- 'Iter' no longer requires input, or the 'Iter' is in an unhandled
-- 'IterC' state (which presumably will continue to be unhandled by
-- the same 'Inum', so no point in executing it again).
inumRepeat :: (ChunkData tIn, Monad m) =>
Inum tIn tOut m a -> Inum tIn tOut m a
inumRepeat inum iter0 = do
er <- tryRI $ runInum inum iter0
stop <- atEOFI
case (stop, er) of
(False, Right (IterF iter)) -> inumRepeat inum iter
(_, Right r) -> return r
(_, Left r) -> reRunIter r
--
-- Codec-based Inum creation
--
-- | A @Codec@ produces some input to feed to an 'Iter', and
-- optionally returns an 'Inum' that will produce the rest of the
-- input. The funciton 'runCodec' can be used to build an 'Inum' out
-- of a 'Codec'. Using 'runCodec' is much simpler than 'mkInumM', but
-- more expressive than 'mkInum'. For example, a possible
-- implementation of 'mkInum' would be:
--
-- @
-- mkInum :: ('ChunkData' tIn, 'ChunkData' tOut, 'Monad' m) =>
-- 'Iter' tIn m tOut -> 'Inum' tIn tOut m a
-- mkInum trans = inum
-- where inum = 'runCodec' 'id' $
-- 'tryEOFI' trans >>= 'maybe' (return (mempty, Nothing)) doinput
-- doinput input = do
-- eof <- if null input then return False else 'atEOFI'
-- return (input, if eof then Nothing else Just inum)
-- @
type Codec tIn tOut m a = Iter tIn m (tOut, Maybe (Inum tIn tOut m a))
-- | A generalized version of 'runCodec' that allows a 'CtlHandler' to
-- be specified.
--
-- @
-- runCodec adj = runCodecC adj (passCtl adj)
-- @
runCodecC :: (ChunkData tIn, ChunkData tOut, Monad m) =>
ResidHandler tIn tOut
-> CtlHandler (Iter tIn m) tOut m a
-> Codec tIn tOut m a
-> Inum tIn tOut m a
runCodecC adj ch codec iter = do
(tOut, minum) <- codec
r <- runIterMC ch iter $ chunk tOut
case (minum, r) of
(Just inum, IterF i) -> inum i
_ | isIterActive r -> return r
_ -> withResidHandler adj (getResid r) $ return . setResid r
-- | Build an 'Inum' from a 'Codec'.
runCodec :: (ChunkData tIn, ChunkData tOut, Monad m) =>
ResidHandler tIn tOut -> Codec tIn tOut m a -> Inum tIn tOut m a
runCodec adj = runCodecC adj (passCtl adj)
--
-- Complex Inum creation
--
{- $mkInumMIntro
Complex 'Inum's that need state and non-trivial control flow can be
constructed using the 'mkInumM' function to produce an 'Inum' out of a
computation in the 'InumM' monad. The 'InumM' monad implicitly keeps
track of the state of the 'Iter' to which the 'Inum' is feeding data,
which we call the \"target\" 'Iter'.
'InumM' is an 'Iter' monad, and so can consume input by invoking
ordinary 'Iter' actions. However, to keep track of the state of the
target 'Iter', 'InumM' wraps its inner monadic type with an
'IterStateT' transformer. Specifically, when creating an enumerator
of type @'Inum' tIn tOut m a@, the 'InumM' action is of a type like
@'Iter' tIn ('IterStateT' (InumState ...) m) ()@. That means that to
execute actions of type @'Iter' tIn m a@ that are not polymorphic in
@m@, you have to transform them with the 'liftI' function.
Output can be fed to the target 'Iter' by means of the 'ifeed'
function. As an example, here is another version of the @inumConcat@
function given previously for 'mkInum' at <#mkInumExample>:
@
inumConcat :: (Monad m) => 'Inum' [L.ByteString] L.ByteString m a
inumConcat = 'mkInumM' loop
where loop = do
'Chunk' t eof <- 'chunkI'
done <- 'ifeed' $ L.concat t
if not (eof || done)
then loop
else do resid <- 'ipopresid'
'ungetI' [resid]
@
There are several points to note about this function. It reads data
in 'Chunk's using 'chunkI', rather than just inputting data with
'dataI'. The choice of 'chunkI' rather than 'dataI' allows
@inumConcat@ to see the @eof@ flag and know when there is no more
input. 'chunkI' also avoids throwing an 'IterEOF' exception on end of
file, as 'dataI' would. In contrast to 'mkInum', which gracefully
interprets 'IterEOF' exceptions as an exit request, 'mkInumM' by
default treats such exceptions as an 'Inum' failure.
As previously mentioned, data is fed to the target 'Iter', which here
is of type @'Iter' L.ByteString m a@, using 'ifeed'. 'ifeed' returns
a 'Bool' that is @'True'@ when the 'Iter' is no longer active. This
brings us to another point--there is no implicit looping or
repetition. We explicitly loop via a tail-recursive call to @loop@ so
long as the @eof@ flag is clear and 'ifeed' returned @'False'@
indicating the target 'Iter' has not finished.
What happens when @eof@ or @done@ is set? One possibility is to do
nothing. This is often correct. Falling off the end of the 'InumM'
do-block causes the 'Inum' to return the current state of the 'Iter'.
However, it may be that the 'Inum' has been fused to the target
'Iter', in which case any left-over residual data fed to, but not
consumed by, the target 'Iter' will be discarded. We may instead want
to put the data back onto the input stream. The 'ipopresid' function
extracts any left-over data from the target 'Iter', while 'ungetI'
places data back in the input stream. Since here the input stream is
a list of @L.ByteString@s, we have to place @resid@ in a list. (After
doing this, the list element boundaries may be different, but all the
input bytes will be there.) Note that the version of @inumConcat@
implemented with 'mkInum' at <#mkInumExample> does not have this
input-restoring feature.
The code above looks much clumsier than the version based on 'mkInum',
but several of these steps can be made implicit. There is an
/AutoEOF/ flag, controlable with the 'setAutoEOF' function, that
causes 'IterEOF' exceptions to produce normal termination of the
'Inum', rather than failure (just as 'mkInum' handles such
exceptions). Another flag, /AutoDone/, is controlable with the
'setAutoDone' function and causes the 'Inum' to exit immediately when
the underlying 'Iter' is no longer active (i.e., the 'ifeed' function
returns @'True'@). Both of these flags are set at once by the
'mkInumAutoM' function, which yields the following simpler
implementation of @inumConcat@:
@
inumConcat = 'mkInumAutoM' $ do 'addCleanup' $ 'ipopresid' >>= 'ungetI' . (: [])
loop
where loop = do
t <- 'dataI' -- AutoEOF flag will handle IterEOF err
'ifeed' $ L.concat t -- AutoDone flag will catch True result
loop
@
The 'addCleanup' function registers actions that should always be
executed when the 'Inum' finishes. Here we use it to place residual
data from the target 'Iter' back into the `Inum`'s input stream.
Finally, there is a function 'irepeat' that automatically sets the
/AutoEOF/ and /AutoDone/ flags and then loops forever on an 'InumM'
computation. Using 'irepeat' to simplify further, we have:
@
'inumConcat' = 'mkInumM' $ 'withCleanup' ('ipopresid' >>= 'ungetI' . (: [])) $
'irepeat' $ 'dataI' >>= 'ifeed' . L.concat
@
'withCleanup', demonstrated here, is a variant of 'addCleanup' that
cleans up after a particular action, rather than at the end of the
`Inum`'s whole execution. (At the outermost level, as used here,
`withCleanup`'s effects are identical to `addCleanup`'s.)
In addition to 'ifeed', the 'ipipe' function invokes a different
'Inum' from within the 'InumM' monad, piping its output directly to
the target 'Iter'. As an example, consider an 'Inum' that processes a
mail message and appends a signature line, implemented as follows:
@
inumAddSig :: (Monad m) => 'Inum' L.ByteString L.ByteString m a
inumAddSig = 'mkInumM' $ do
'ipipe' 'inumNop'
'ifeed' $ L8.pack \"\\n--\\nSent from my Haskell interpreter.\\n\"
@
Here we start by using 'inumNop' to \"pipe\" all input to the target
'Iter' unmodified. On reading an end of file, 'inumNop' returns, at
which point we use 'ifeed' to append our signature.
A similar function 'irun' runs an 'Onum' (or 'Inum' of a different
type) on the target 'Iter'. For instance, to read the signature from
a file called @\".signature\"@, one could use:
@
inumAddSig :: ('MonadIO' m) => 'Inum' L.ByteString L.ByteString m a
inumAddSig = 'mkInumM' $ do
'ipipe' 'inumNop'
'irun' $ 'enumFile' \".signature\"
@
Of course, these examples are a bit contrived. An even simpler
implementation is:
@
inumAddSig = 'inumNop' ``cat`` 'runI' . 'enumFile' \".signature\"
@
The @.@ between 'runI' and @'enumFile'@ is because 'Inum's are
functions from 'Iter's to 'IterR's; we want to apply 'runI' to the
result of applying @'enumFile' \".signature\"@ to an 'Iter'. Spelled
out, the type of @'enumFile'@ is:
@
enumFile :: (MonadIO m, ChunkData t, ListLikeIO t e) =>
FilePath
-> 'Iter' t m a
-> 'Iter' () m a ('IterR' t m a)
@
-}
-- | Internal data structure for the 'InumM' monad's state.
data InumState tIn tOut m a = InumState {
insAutoEOF :: !Bool
, insAutoDone :: !Bool
, insCtl :: !(CtlHandler (Iter tIn m) tOut m a)
, insIter :: !(IterR tOut m a)
, insCleanup :: !(InumM tIn tOut m a ())
, insCleaning :: !Bool
}
defaultInumState :: (ChunkData tIn, Monad m) => InumState tIn tOut m a
defaultInumState = InumState {
insAutoEOF = False
, insAutoDone = False
, insCtl = noCtl
, insIter = IterF $ Iter $ const $ error "insIter"
, insCleanup = return ()
, insCleaning = False
}
-- | An InumState that passes all control messages up and pulls up all
-- residual data at the end. Requires the input and output types to
-- be the same.
nopInumState :: (ChunkData t, Monad m) => InumState t t m a
nopInumState = s
where s = (defaultInumState `asTypeOf` s) {
insCtl = passCtl pullupResid
, insCleanup = ipopresid >>= ungetI }
-- | A monad in which to define the actions of an @'Inum' tIn tOut m
-- a@. Note @InumM tIn tOut m a@ is a 'Monad' of kind @* -> *@, where
-- @a@ is the (almost always parametric) return type of the 'Inum'. A
-- fifth type argument is required for monadic computations of kind
-- @*@, e.g.:
--
-- > seven :: InumM tIn tOut m a Int
-- > seven = return 7
--
-- Another important thing to note about the 'InumM' monad, as
-- described in the documentation for 'mkInumM', is that you must call
-- @'lift'@ twice to execute actions in monad @m@, and you must use
-- the 'liftI' function to execute actions in monad @'Iter' t m a@.
type InumM tIn tOut m a = Iter tIn (IterStateT (InumState tIn tOut m a) m)
-- | Set the control handler an 'Inum' should use from within an
-- 'InumM' computation. (The default is 'noCtl'.)
setCtlHandler :: (ChunkData tIn, Monad m) =>
CtlHandler (Iter tIn m) tOut m a
-> InumM tIn tOut m a ()
setCtlHandler ch = imodify $ \s -> s { insCtl = ch }
-- | Set the /AutoEOF/ flag within an 'InumM' computation. If this
-- flag is 'True', handle 'IterEOF' exceptions like a normal but
-- immediate termination of the 'Inum'. If this flag is @'False'@
-- (the default), then 'IterEOF' exceptions must be manually caught or
-- they will terminate the thread.
setAutoEOF :: (ChunkData tIn, Monad m) => Bool -> InumM tIn tOut m a ()
setAutoEOF val = imodify $ \s -> s { insAutoEOF = val }
-- | Set the /AutoDone/ flag within an 'InumM' computation. When
-- @'True'@, the 'Inum' will immediately terminate as soon as the
-- 'Iter' it is feeding enters a non-active state (i.e., 'Done' or a
-- failure state). If this flag is @'False'@ (the default), the
-- 'InumM' computation will need to monitor the results of the
-- 'ifeed', 'ipipe', and 'irun' functions to ensure the 'Inum'
-- terminates when one of these functions returns @'False'@.
setAutoDone :: (ChunkData tIn, Monad m) => Bool -> InumM tIn tOut m a ()
setAutoDone val = imodify $ \s -> s { insAutoDone = val }
-- | Like imodify, but throws an error if the insCleaning flag is set.
ncmodify :: (ChunkData tIn, Monad m) =>
(InumState tIn tOut m a -> InumState tIn tOut m a)
-> InumM tIn tOut m a ()
ncmodify fn = imodify $ \s -> if insCleaning s
then error "illegal call within Cleanup function"
else fn s
-- | Add a cleanup action to be executed when the 'Inum' finishes, or,
-- if used in conjunction with the 'withCleanup' function, when the
-- innermost enclosing 'withCleanup' action finishes.
addCleanup :: (ChunkData tIn, Monad m) =>
InumM tIn tOut m a () -> InumM tIn tOut m a ()
addCleanup clean = ncmodify $ \s -> s { insCleanup = clean >> insCleanup s }
-- | Run an 'InumM' with some cleanup action in effect. The cleanup
-- action specified will be executed when the main action returns,
-- whether normally, through an exception, because of the /AutoDone/
-- or /AutoEOF/ flags, or because 'idone' is invoked.
--
-- Note @withCleanup@ also defines the scope of actions added by the
-- 'addCleanup' function. In other words, given a call such as
-- @withCleanup cleaner1 main@, if @main@ invokes @'addCleanup'
-- cleaner2@, then both @cleaner1@ and @cleaner2@ will be executed
-- upon @main@'s return, even if the overall 'Inum' has not finished
-- yet.
withCleanup :: (ChunkData tIn, Monad m) =>
InumM tIn tOut m a () -- ^ Cleanup action
-> InumM tIn tOut m a b -- ^ Main action to execute
-> InumM tIn tOut m a b
withCleanup clean action = do
old <- igets insCleanup
ncmodify $ \s -> s { insCleanup = clean }
action `finallyI` do
newclean <- igets insCleanup
imodify $ \s -> s { insCleanup = old }
newclean
-- | Convert an 'InumM' computation into an 'Inum', given some
-- @'InumState'@ to run on.
runInumM :: (ChunkData tIn, ChunkData tOut, Monad m) =>
InumM tIn tOut m a b
-- ^ Monadic computation defining the 'Inum'.
-> InumState tIn tOut m a
-- ^ State to run on
-> Iter tIn m (IterR tOut m a)
runInumM inumm s0 = do
(err1, s1) <- getErr =<< runIterStateT inumm s0
(err2, s2) <- getErr =<< runIterStateT (insCleanup s1)
s1 { insAutoDone = False, insCleaning = True }
let r = insIter s2
Iter $ maybe (Done r) (\e -> Fail e (Just r) . Just) $ mplus err2 err1
where
getErr (Fail (IterEOFErr _) _ _, s) | insAutoEOF s = return (Nothing, s)
getErr (Fail e _ _, s) = return (Just e, s)
getErr (_, s) = return (Nothing, s)
-- | A variant of 'mkInumM' that sets /AutoEOF/ and /AutoDone/ to
-- 'True' by default. (Equivalent to calling @'setAutoEOF' 'True' >>
-- 'setAutoDone' 'True'@ as the first thing inside 'mkInumM'.)
mkInumAutoM :: (ChunkData tIn, ChunkData tOut, Monad m) =>
InumM tIn tOut m a b -> Inum tIn tOut m a
mkInumAutoM inumm iter0 =
runInumM inumm defaultInumState { insIter = IterF iter0
, insAutoEOF = True
, insAutoDone = True
}
-- | Build an 'Inum' out of an 'InumM' computation. If you run
-- 'mkInumM' inside the @'Iter' tIn m@ monad (i.e., to create an
-- enumerator of type @'Inum' tIn tOut m a@), then the 'InumM'
-- computation will be in a Monad of type @'Iter' t tm@ where @tm@ is
-- a transformed version of @m@. This has the following two
-- consequences:
--
-- - If you wish to execute actions in monad @m@ from within your
-- 'InumM' computation, you will have to apply @'lift'@ twice (as
-- in @'lift' $ 'lift' action_in_m@) rather than just once.
--
-- - If you need to execute actions in the @'Iter' t m@ monad, you
-- will have to lift them with the 'liftI' function.
--
-- The 'InumM' computation you construct can feed output of type
-- @tOut@ to the target 'Iter' (which is implicitly contained in the
-- monad state), using the 'ifeed', 'ipipe', and 'irun' functions.
mkInumM :: (ChunkData tIn, ChunkData tOut, Monad m) =>
InumM tIn tOut m a b -> Inum tIn tOut m a
mkInumM inumm iter0 =
runInumM inumm defaultInumState { insIter = IterF iter0 }
-- | Used from within the 'InumM' monad to feed data to the target
-- 'Iter'. Returns @'False'@ if the target 'Iter' is still active and
-- @'True'@ if the iter has finished and the 'Inum' should also
-- return. (If the @autoDone@ flag is @'True'@, then @ifeed@,
-- @ipipe@, and @irun@ will never actually return @'True'@, but
-- instead just immediately run cleanup functions and exit the
-- 'Inum' when the target 'Iter' stops being active.)
ifeed :: (ChunkData tIn, ChunkData tOut, Monad m) =>
tOut -> InumM tIn tOut m a Bool
ifeed = ipipe . inumPure
-- | A variant of 'ifeed' that throws an exception of type 'IterEOF'
-- if the data being fed is 'null'. Convenient when reading input
-- with a function (such as "Data.ListLike"'s @hget@) that returns 0
-- bytes instead of throwing an EOF exception to indicate end of file.
-- For instance, the main loop of @'enumFile'@ could be implemented
-- as:
--
-- @
-- 'irepeat' $ 'liftIO' ('LL.hGet' h 'defaultChunkSize') >>= 'ifeed1'
-- @
ifeed1 :: (ChunkData tIn, ChunkData tOut, Monad m) =>
tOut -> InumM tIn tOut m a Bool
ifeed1 dat = if null dat then throwEOFI "ifeed1" else ifeed dat
-- | Apply another 'Inum' to the target 'Iter' from within the 'InumM'
-- monad. As with 'ifeed', returns @'True'@ when the 'Iter' is
-- finished.
--
-- Note that the applied 'Inum' must handle all control requests. (In
-- other words, ones it passes on are not caught by whatever handler
-- is installed by 'setCtlHandler', but if the 'Inum' returns the
-- 'IterR' in the 'IterC' state, as 'inumPure' does, then requests
-- will be handled.)
ipipe :: (ChunkData tIn, ChunkData tOut, Monad m) =>
Inum tIn tOut m a -> InumM tIn tOut m a Bool
ipipe inum = do
s <- iget
r <- tryRI (liftI (inum $ reRunIter $ insIter s)) >>= getIter
>>= liftI . runIterRMC (insCtl s)
iput s { insIter = r }
let done = not $ isIterActive r
if done && insAutoDone s then idone else return done
where
getIter (Right i) = return i
getIter (Left r@(Fail _ (Just i) _)) = do
imodify $ \s -> s { insIter = i }
reRunIter r
getIter (Left r) = reRunIter r
-- | Apply an 'Onum' (or 'Inum' of an arbitrary, unused input type) to
-- the 'Iter' from within the 'InumM' monad. As with 'ifeed', returns
-- @'True'@ when the 'Iter' is finished.
irun :: (ChunkData tAny, ChunkData tIn, ChunkData tOut, Monad m) =>
Inum tAny tOut m a -> InumM tIn tOut m a Bool
irun onum = ipipe $ runI . onum
-- | Repeats an action until the 'Iter' is done or an EOF error is
-- thrown. (Also stops if a different kind of exception is thrown, in
-- which case the exception propagates further and may cause the
-- 'Inum' to fail.) @irepeat@ sets both the /AutoEOF/ and
-- /AutoDone/ flags to @'True'@.
irepeat :: (ChunkData tIn, Monad m) =>
InumM tIn tOut m a b -> InumM tIn tOut m a ()
irepeat action = do
imodify $ \s -> s { insAutoEOF = True, insAutoDone = True }
let loop = action >> loop in loop
-- | If the target 'Iter' being fed by the 'Inum' is no longer active
-- (i.e., if it is in the 'Done' state or in an error state), this
-- funciton pops the residual data out of the 'Iter' and returns it.
-- If the target is in any other state, returns 'mempty'.
ipopresid :: (ChunkData tIn, ChunkData tOut, Monad m) =>
InumM tIn tOut m a tOut
ipopresid = do
s <- iget
case insIter s of
r | isIterActive r -> return mempty
| otherwise -> do let (Chunk t _) = getResid r
iput s { insIter = setResid r mempty }
return t
-- | Immediately perform a successful exit from an 'InumM' monad,
-- terminating the 'Inum' and returning the current state of the
-- target 'Iter'. Can be used to end an 'irepeat' loop. (Use
-- @'throwI' ...@ for an unsuccessful exit.)
idone :: (ChunkData tIn, Monad m) => InumM tIn tOut m a b
idone = setAutoEOF True >> throwEOFI "idone"
-- | An 'Inum' that acts like 'inumNop', except that before passing
-- data on, it feeds a copy to a \"tee\" 'Iter' (by analogy with the
-- Unix @tee@ utility), which may, for instance, transform and log the
-- data.
--
-- The tee `Iter`'s return value is ignored. If the tee 'Iter'
-- returns before an EOF is received and before the target 'Iter' has
-- finished processing input, then @inumTee@ will continue to pass
-- data to the target 'Iter'. However, if the tee 'Iter' fails, then
-- this will cause @inumTee@ to fail immediately.
--
-- As an example, one could implement something close to
-- @'inumStderr'@ (from "Data.IterIO.ListLike") as follows:
--
-- > inumStderr = inumTee $ handleI stderr
--
-- (Except note that the real @'inumStderr'@ does not close its file
-- descriptor, while the above implementation will send an EOF to
-- @'handleI'@, causing @stderr@ to be closed.)
inumTee :: (ChunkData t, Monad m) =>
Iter t m b -> Inum t t m a
inumTee tee0 iter0 = runInumM (chunk0I >>= loop tee0)
nopInumState { insIter = IterF iter0 }
where chunk0I = Iter $ \c@(Chunk _ eof) -> Done c (Chunk mempty eof)
loop tee c = liftI (runIterMC (passCtl pullupResid) tee c) >>= feed c
feed (Chunk d False) (IterF tee) = do
done <- ifeed d `onExceptionI` liftI (runI tee)
if done then liftI (runI tee) >> return () else chunkI >>= loop tee
feed (Chunk d True) (IterF _) = ifeed d >> return ()
feed _ (Fail r _ c) = reRunIter $ Fail r Nothing c
feed (Chunk d eof) (Done _ _) = do
done <- ifeed d
unless (done || eof) $ ipipe inumNop >> return ()
feed _ _ = error "inumTee"
| scslab/iterIO | Data/IterIO/Inum.hs | bsd-3-clause | 53,177 | 0 | 18 | 12,602 | 7,321 | 4,030 | 3,291 | 360 | 6 |
{-# LANGUAGE ScopedTypeVariables #-}
module Pipe where
import TestMain
import Helper
import AST hiding (Process)
import Render
import Control.Distributed.Process
import Control.Distributed.Process.Serializable
import Data.Typeable.Internal
import GHC.Int
test_pipe :: IO ()
test_pipe = mytest pipe
pipe :: Process ()
pipe = do
self <- getSelfPid
n <- getRandInRange 1 100
head <- init_pipe (\x -> x+1) n self
send head (0 :: Int)
sink
init_pipe :: (Int -> Int) -> Int -> ProcessId -> Process ProcessId
init_pipe _ 0 next = return next
init_pipe f n next = do
new <- spawnLocal (pipe_node f next)
pid <- init_pipe f (n-1) new
return pid
sink :: Process ()
sink = receiveWait [match (\(stat :: Int) -> say (show stat))]
pipe_node f next =
do
receiveWait [match handler]
where handler (msg :: Int) = do send next (f msg)
pipe_node f next
pipe_config :: Config ()
pipe_config = Config {
cTypes = [],
cSets = [],
cUnfold = [],
cProcs = []
}
| abakst/symmetry | checker/src/cloud-haskell-tests/Pipe.hs | mit | 1,096 | 0 | 12 | 319 | 399 | 207 | 192 | 38 | 1 |
{-# LANGUAGE ExplicitForAll #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Database.Persist.Sql.Orphan.PersistUnique
()
where
import Control.Exception (throwIO)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Reader (ask)
import qualified Data.Conduit.List as CL
import Data.Function (on)
import Data.List (nubBy)
import qualified Data.Text as T
import Data.Foldable (toList)
import Database.Persist
import Database.Persist.Class.PersistUnique (defaultUpsertBy, defaultPutMany, persistUniqueKeyValues)
import Database.Persist.Sql.Types.Internal
import Database.Persist.Sql.Raw
import Database.Persist.Sql.Orphan.PersistStore (withRawQuery)
import Database.Persist.Sql.Util (dbColumns, parseEntityValues, updatePersistValue, mkUpdateText')
instance PersistUniqueWrite SqlBackend where
upsertBy uniqueKey record updates = do
conn <- ask
let refCol n = T.concat [connEscapeTableName conn t, ".", n]
let mkUpdateText = mkUpdateText' (connEscapeFieldName conn) refCol
case connUpsertSql conn of
Just upsertSql -> case updates of
[] -> defaultUpsertBy uniqueKey record updates
_:_ -> do
let upds = T.intercalate "," $ map mkUpdateText updates
sql = upsertSql t (persistUniqueToFieldNames uniqueKey) upds
vals = map toPersistValue (toPersistFields record)
++ map updatePersistValue updates
++ unqs uniqueKey
x <- rawSql sql vals
return $ head x
Nothing -> defaultUpsertBy uniqueKey record updates
where
t = entityDef $ Just record
unqs uniqueKey' = concatMap persistUniqueToValues [uniqueKey']
deleteBy uniq = do
conn <- ask
let sql' = sql conn
vals = persistUniqueToValues uniq
rawExecute sql' vals
where
t = entityDef $ dummyFromUnique uniq
go = toList . fmap snd . persistUniqueToFieldNames
go' conn x = connEscapeFieldName conn x `mappend` "=?"
sql conn =
T.concat
[ "DELETE FROM "
, connEscapeTableName conn t
, " WHERE "
, T.intercalate " AND " $ map (go' conn) $ go uniq]
putMany [] = return ()
putMany rsD = do
let uKeys = persistUniqueKeys . head $ rsD
case uKeys of
[] -> insertMany_ rsD
_ -> go
where
go = do
let rs = nubBy ((==) `on` persistUniqueKeyValues) (reverse rsD)
let ent = entityDef rs
let nr = length rs
let toVals r = map toPersistValue $ toPersistFields r
conn <- ask
case connPutManySql conn of
(Just mkSql) -> rawExecute (mkSql ent nr) (concatMap toVals rs)
Nothing -> defaultPutMany rs
instance PersistUniqueWrite SqlWriteBackend where
deleteBy uniq = withBaseBackend $ deleteBy uniq
upsert rs us = withBaseBackend $ upsert rs us
putMany rs = withBaseBackend $ putMany rs
instance PersistUniqueRead SqlBackend where
getBy uniq = do
conn <- ask
let sql =
T.concat
[ "SELECT "
, T.intercalate "," $ toList $ dbColumns conn t
, " FROM "
, connEscapeTableName conn t
, " WHERE "
, sqlClause conn]
uvals = persistUniqueToValues uniq
withRawQuery sql uvals $
do row <- CL.head
case row of
Nothing -> return Nothing
Just [] -> error "getBy: empty row"
Just vals ->
case parseEntityValues t vals of
Left err ->
liftIO $ throwIO $ PersistMarshalError err
Right r -> return $ Just r
where
sqlClause conn =
T.intercalate " AND " $ map (go conn) $ toFieldNames' uniq
go conn x = connEscapeFieldName conn x `mappend` "=?"
t = entityDef $ dummyFromUnique uniq
toFieldNames' = toList . fmap snd . persistUniqueToFieldNames
instance PersistUniqueRead SqlReadBackend where
getBy uniq = withBaseBackend $ getBy uniq
instance PersistUniqueRead SqlWriteBackend where
getBy uniq = withBaseBackend $ getBy uniq
dummyFromUnique :: Unique v -> Maybe v
dummyFromUnique _ = Nothing
| paul-rouse/persistent | persistent/Database/Persist/Sql/Orphan/PersistUnique.hs | mit | 4,653 | 0 | 23 | 1,664 | 1,191 | 596 | 595 | 103 | 1 |
import Data.List
main :: IO ()
main = putStrLn . show . head . flip (drop) (primes) $ 10000
primes :: [Int]
primes = filter (isPrime) generator
generator :: [Int]
generator = (2::Int):(3::Int) : (concat [[6*k-1, 6*k+1] | k <- [1..]])
isPrime :: Int -> Bool
isPrime x = (not . even $ x) && (length (factors x) == 2)
factors :: Int -> [Int]
factors a = factor_aux a [2..sq]
where sq = floor . sqrt . fromIntegral $ a
factor_aux :: Int -> [Int] -> [Int]
factor_aux a [] = [a,1]
factor_aux a (h:t) = if a `mod` h == 0 then h:(factors $ quot a h)
else factor_aux a t
| jrgdiz/euler | 7.hs | mit | 593 | 0 | 11 | 144 | 335 | 182 | 153 | 16 | 2 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Main
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Main (main) where
import Test.Tasty
import Test.AWS.KMS
import Test.AWS.KMS.Internal
main :: IO ()
main = defaultMain $ testGroup "KMS"
[ testGroup "tests" tests
, testGroup "fixtures" fixtures
]
| fmapfmapfmap/amazonka | amazonka-kms/test/Main.hs | mpl-2.0 | 522 | 0 | 8 | 103 | 76 | 47 | 29 | 9 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Kinesis.MergeShards
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Merges two adjacent shards in a stream and combines them into a single
-- shard to reduce the stream\'s capacity to ingest and transport data. Two
-- shards are considered adjacent if the union of the hash key ranges for
-- the two shards form a contiguous set with no gaps. For example, if you
-- have two shards, one with a hash key range of 276...381 and the other
-- with a hash key range of 382...454, then you could merge these two
-- shards into a single shard that would have a hash key range of
-- 276...454. After the merge, the single child shard receives data for all
-- hash key values covered by the two parent shards.
--
-- 'MergeShards' is called when there is a need to reduce the overall
-- capacity of a stream because of excess capacity that is not being used.
-- You must specify the shard to be merged and the adjacent shard for a
-- stream. For more information about merging shards, see
-- <http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-sdk-java-resharding-merge.html Merge Two Shards>
-- in the /Amazon Kinesis Developer Guide/.
--
-- If the stream is in the 'ACTIVE' state, you can call 'MergeShards'. If a
-- stream is in the 'CREATING', 'UPDATING', or 'DELETING' state,
-- 'MergeShards' returns a 'ResourceInUseException'. If the specified
-- stream does not exist, 'MergeShards' returns a
-- 'ResourceNotFoundException'.
--
-- You can use DescribeStream to check the state of the stream, which is
-- returned in 'StreamStatus'.
--
-- 'MergeShards' is an asynchronous operation. Upon receiving a
-- 'MergeShards' request, Amazon Kinesis immediately returns a response and
-- sets the 'StreamStatus' to 'UPDATING'. After the operation is completed,
-- Amazon Kinesis sets the 'StreamStatus' to 'ACTIVE'. Read and write
-- operations continue to work while the stream is in the 'UPDATING' state.
--
-- You use DescribeStream to determine the shard IDs that are specified in
-- the 'MergeShards' request.
--
-- If you try to operate on too many streams in parallel using
-- CreateStream, DeleteStream, 'MergeShards' or SplitShard, you will
-- receive a 'LimitExceededException'.
--
-- 'MergeShards' has limit of 5 transactions per second per account.
--
-- /See:/ <http://docs.aws.amazon.com/kinesis/latest/APIReference/API_MergeShards.html AWS API Reference> for MergeShards.
module Network.AWS.Kinesis.MergeShards
(
-- * Creating a Request
mergeShards
, MergeShards
-- * Request Lenses
, msStreamName
, msShardToMerge
, msAdjacentShardToMerge
-- * Destructuring the Response
, mergeShardsResponse
, MergeShardsResponse
) where
import Network.AWS.Kinesis.Types
import Network.AWS.Kinesis.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Represents the input for 'MergeShards'.
--
-- /See:/ 'mergeShards' smart constructor.
data MergeShards = MergeShards'
{ _msStreamName :: !Text
, _msShardToMerge :: !Text
, _msAdjacentShardToMerge :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'MergeShards' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'msStreamName'
--
-- * 'msShardToMerge'
--
-- * 'msAdjacentShardToMerge'
mergeShards
:: Text -- ^ 'msStreamName'
-> Text -- ^ 'msShardToMerge'
-> Text -- ^ 'msAdjacentShardToMerge'
-> MergeShards
mergeShards pStreamName_ pShardToMerge_ pAdjacentShardToMerge_ =
MergeShards'
{ _msStreamName = pStreamName_
, _msShardToMerge = pShardToMerge_
, _msAdjacentShardToMerge = pAdjacentShardToMerge_
}
-- | The name of the stream for the merge.
msStreamName :: Lens' MergeShards Text
msStreamName = lens _msStreamName (\ s a -> s{_msStreamName = a});
-- | The shard ID of the shard to combine with the adjacent shard for the
-- merge.
msShardToMerge :: Lens' MergeShards Text
msShardToMerge = lens _msShardToMerge (\ s a -> s{_msShardToMerge = a});
-- | The shard ID of the adjacent shard for the merge.
msAdjacentShardToMerge :: Lens' MergeShards Text
msAdjacentShardToMerge = lens _msAdjacentShardToMerge (\ s a -> s{_msAdjacentShardToMerge = a});
instance AWSRequest MergeShards where
type Rs MergeShards = MergeShardsResponse
request = postJSON kinesis
response = receiveNull MergeShardsResponse'
instance ToHeaders MergeShards where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("Kinesis_20131202.MergeShards" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON MergeShards where
toJSON MergeShards'{..}
= object
(catMaybes
[Just ("StreamName" .= _msStreamName),
Just ("ShardToMerge" .= _msShardToMerge),
Just
("AdjacentShardToMerge" .= _msAdjacentShardToMerge)])
instance ToPath MergeShards where
toPath = const "/"
instance ToQuery MergeShards where
toQuery = const mempty
-- | /See:/ 'mergeShardsResponse' smart constructor.
data MergeShardsResponse =
MergeShardsResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'MergeShardsResponse' with the minimum fields required to make a request.
--
mergeShardsResponse
:: MergeShardsResponse
mergeShardsResponse = MergeShardsResponse'
| fmapfmapfmap/amazonka | amazonka-kinesis/gen/Network/AWS/Kinesis/MergeShards.hs | mpl-2.0 | 6,225 | 0 | 12 | 1,279 | 598 | 373 | 225 | 79 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Ambiata.Cli.Json (
toAddress
, toAccess
, toSecret
, toToken'
, toText
, addressToText
) where
import Data.Aeson
import Data.Aeson.Types
import Data.Text.Encoding
import P
import Network.AWS.Data
import Mismi.S3
import Mismi.S3.Amazonka
toAddress :: Value -> Parser Address
toAddress (String s) =
maybe (fail "s3_path must be a valid s3 address") pure $ addressFromText s
toAddress _ = fail ("s3_path must be a string and a valid s3 address")
toAccess :: Value -> Parser AccessKey
toAccess (String s) =
pure $ AccessKey (encodeUtf8 s)
toAccess _ = fail ("aws_key must be a string")
toSecret :: Value -> Parser SecretKey
toSecret (String s) =
pure $ SecretKey (encodeUtf8 s)
toSecret _ = fail ("aws_secret must be a string")
toToken' :: Value -> Parser SessionToken
toToken' (String s) =
pure $ SessionToken (encodeUtf8 s)
toToken' _ = fail ("aws_session_token must be a string")
| ambiata/tatooine-cli | src/Ambiata/Cli/Json.hs | apache-2.0 | 1,107 | 0 | 8 | 269 | 284 | 151 | 133 | 33 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module TemplateHaskellCodeExecFFI where
import Language.Haskell.TH.Syntax (lift)
import Language.Haskell.TH
import ForeignImport (pureFfiDep)
foo = $([|$(lift . pureFfiDep $ 4)|])
| robinp/haskell-indexer | haskell-indexer-backend-ghc/testdata/basic/TemplateHaskellCodeExecFFI.hs | apache-2.0 | 215 | 0 | 6 | 24 | 43 | 29 | 14 | 6 | 1 |
module CustomSql where
import Database.HaskellDB
import Database.HaskellDB.DBLayout
import Database.HaskellDB.Query
import Database.HaskellDB.PrimQuery
import System.Time
--
-- Fields for getting results of a given type
--
data Timefield = Timefield
instance FieldTag Timefield where fieldName _ = "timefield"
timefield = mkAttr Timefield :: Attr Timefield CalendarTime
data Intfield = Intfield
instance FieldTag Intfield where fieldName _ = "intfield"
intfield = mkAttr Intfield :: Attr Intfield Int
data Boolfield = Boolfield
instance FieldTag Boolfield where fieldName _ = "boolfield"
boolfield = mkAttr Boolfield :: Attr Boolfield Bool
--
-- Utilities
--
binop :: String -> Expr a -> Expr b -> Expr c
binop op (Expr e1) (Expr e2) = Expr (BinExpr (OpOther op) e1 e2)
--
-- Custom sql operators
--
now :: Expr CalendarTime
now = Expr (ConstExpr (OtherLit "NOW()"))
last_insert_id :: Expr Int
last_insert_id = Expr (ConstExpr (OtherLit "LAST_INSERT_ID()"))
ilike :: Expr String -> Expr String -> Expr Bool
ilike = binop "ILIKE"
| m4dc4p/haskelldb | test/old/CustomSql.hs | bsd-3-clause | 1,042 | 0 | 9 | 166 | 313 | 165 | 148 | 23 | 1 |
-- | Dynamically lookup up values from modules and loading them.
module DynamicLoading (
#ifdef GHCI
-- * Force loading information
forceLoadModuleInterfaces,
forceLoadNameModuleInterface,
forceLoadTyCon,
-- * Finding names
lookupRdrNameInModule,
-- * Loading values
getValueSafely,
lessUnsafeCoerce
#endif
) where
#ifdef GHCI
import Linker ( linkModule, getHValue )
import SrcLoc ( noSrcSpan )
import Finder ( findImportedModule, cannotFindModule )
import DriverPhases ( HscSource(HsSrcFile) )
import TcRnDriver ( getModuleInterface )
import TcRnMonad ( initTc, initIfaceTcRn )
import LoadIface ( loadUserInterface )
import RdrName ( RdrName, Provenance(..), ImportSpec(..), ImpDeclSpec(..)
, ImpItemSpec(..), mkGlobalRdrEnv, lookupGRE_RdrName, gre_name )
import RnNames ( gresFromAvails )
import PrelNames ( iNTERACTIVE )
import DynFlags
import HscTypes ( HscEnv(..), FindResult(..), ModIface(..), lookupTypeHscEnv )
import TypeRep ( TyThing(..), pprTyThingCategory )
import Type ( Type, eqType )
import TyCon ( TyCon )
import Name ( Name, nameModule_maybe )
import Id ( idType )
import Module ( Module, ModuleName )
import Panic ( GhcException(..), throwGhcException )
import FastString
import ErrUtils
import Outputable
import Exception
import Data.Maybe ( mapMaybe )
import GHC.Exts ( unsafeCoerce# )
-- | Force the interfaces for the given modules to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadModuleInterfaces :: HscEnv -> SDoc -> [Module] -> IO ()
forceLoadModuleInterfaces hsc_env doc modules
= (initTc hsc_env HsSrcFile False iNTERACTIVE $ initIfaceTcRn $ mapM_ (loadUserInterface False doc) modules) >> return ()
-- | Force the interface for the module containing the name to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadNameModuleInterface :: HscEnv -> SDoc -> Name -> IO ()
forceLoadNameModuleInterface hsc_env reason name = do
let name_modules = mapMaybe nameModule_maybe [name]
forceLoadModuleInterfaces hsc_env reason name_modules
-- | Load the 'TyCon' associated with the given name, come hell or high water. Fails if:
--
-- * The interface could not be loaded
-- * The name is not that of a 'TyCon'
-- * The name did not exist in the loaded module
forceLoadTyCon :: HscEnv -> Name -> IO TyCon
forceLoadTyCon hsc_env con_name = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of loadTyConTy")) con_name
mb_con_thing <- lookupTypeHscEnv hsc_env con_name
case mb_con_thing of
Nothing -> throwCmdLineErrorS $ missingTyThingError con_name
Just (ATyCon tycon) -> return tycon
Just con_thing -> throwCmdLineErrorS $ wrongTyThingError con_name con_thing
-- | Loads the value corresponding to a 'Name' if that value has the given 'Type'. This only provides limited safety
-- in that it is up to the user to ensure that that type corresponds to the type you try to use the return value at!
--
-- If the value found was not of the correct type, returns @Nothing@. Any other condition results in an exception:
--
-- * If we could not load the names module
-- * If the thing being loaded is not a value
-- * If the Name does not exist in the module
-- * If the link failed
getValueSafely :: HscEnv -> Name -> Type -> IO (Maybe a)
getValueSafely hsc_env val_name expected_type = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of getValueSafely")) val_name
-- Now look up the names for the value and type constructor in the type environment
mb_val_thing <- lookupTypeHscEnv hsc_env val_name
case mb_val_thing of
Nothing -> throwCmdLineErrorS $ missingTyThingError val_name
Just (AnId id) -> do
-- Check the value type in the interface against the type recovered from the type constructor
-- before finally casting the value to the type we assume corresponds to that constructor
if expected_type `eqType` idType id
then do
-- Link in the module that contains the value, if it has such a module
case nameModule_maybe val_name of
Just mod -> do linkModule hsc_env mod
return ()
Nothing -> return ()
-- Find the value that we just linked in and cast it given that we have proved it's type
hval <- getHValue hsc_env val_name
value <- lessUnsafeCoerce (hsc_dflags hsc_env) "getValueSafely" hval
return $ Just value
else return Nothing
Just val_thing -> throwCmdLineErrorS $ wrongTyThingError val_name val_thing
-- | Coerce a value as usual, but:
--
-- 1) Evaluate it immediately to get a segfault early if the coercion was wrong
--
-- 2) Wrap it in some debug messages at verbosity 3 or higher so we can see what happened
-- if it /does/ segfault
lessUnsafeCoerce :: DynFlags -> String -> a -> IO b
lessUnsafeCoerce dflags context what = do
debugTraceMsg dflags 3 $ (ptext $ sLit "Coercing a value in") <+> (text context) <> (ptext $ sLit "...")
output <- evaluate (unsafeCoerce# what)
debugTraceMsg dflags 3 $ ptext $ sLit "Successfully evaluated coercion"
return output
-- | Finds the 'Name' corresponding to the given 'RdrName' in the context of the 'ModuleName'. Returns @Nothing@ if no
-- such 'Name' could be found. Any other condition results in an exception:
--
-- * If the module could not be found
-- * If we could not determine the imports of the module
lookupRdrNameInModule :: HscEnv -> ModuleName -> RdrName -> IO (Maybe Name)
lookupRdrNameInModule hsc_env mod_name rdr_name = do
-- First find the package the module resides in by searching exposed packages and home modules
found_module <- findImportedModule hsc_env mod_name Nothing
case found_module of
Found _ mod -> do
-- Find the exports of the module
(_, mb_iface) <- getModuleInterface hsc_env mod
case mb_iface of
Just iface -> do
-- Try and find the required name in the exports
let decl_spec = ImpDeclSpec { is_mod = mod_name, is_as = mod_name
, is_qual = False, is_dloc = noSrcSpan }
provenance = Imported [ImpSpec decl_spec ImpAll]
env = mkGlobalRdrEnv (gresFromAvails provenance (mi_exports iface))
case lookupGRE_RdrName rdr_name env of
[gre] -> return (Just (gre_name gre))
[] -> return Nothing
_ -> panic "lookupRdrNameInModule"
Nothing -> throwCmdLineErrorS $ hsep [ptext (sLit "Could not determine the exports of the module"), ppr mod_name]
err -> throwCmdLineErrorS $ cannotFindModule dflags mod_name err
where
dflags = hsc_dflags hsc_env
wrongTyThingError :: Name -> TyThing -> SDoc
wrongTyThingError name got_thing = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not that of a value but rather a"), pprTyThingCategory got_thing]
missingTyThingError :: Name -> SDoc
missingTyThingError name = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not in the type environment: are you sure it exists?")]
throwCmdLineErrorS :: SDoc -> IO a
throwCmdLineErrorS = throwCmdLineError . showSDoc
throwCmdLineError :: String -> IO a
throwCmdLineError = throwGhcException . CmdLineError
#endif
| mcmaniac/ghc | compiler/main/DynamicLoading.hs | bsd-3-clause | 7,988 | 0 | 24 | 2,090 | 1,398 | 743 | 655 | 1 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Yesod.WebSockets
( -- * Core API
WebSocketsT
, webSockets
, receiveData
, sendTextData
, sendBinaryData
-- * Conduit API
, sourceWS
, sinkWSText
, sinkWSBinary
-- * Async helpers
, race
, race_
, concurrently
, concurrently_
) where
import qualified Control.Concurrent.Async as A
import Control.Monad (forever, void, when)
import Control.Monad.IO.Class (MonadIO (liftIO))
import Control.Monad.Trans.Control (control)
import Control.Monad.Trans.Control (MonadBaseControl (liftBaseWith, restoreM))
import Control.Monad.Trans.Reader (ReaderT (ReaderT, runReaderT))
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Network.Wai.Handler.WebSockets as WaiWS
import qualified Network.WebSockets as WS
import qualified Yesod.Core as Y
-- | A transformer for a WebSockets handler.
--
-- Since 0.1.0
type WebSocketsT = ReaderT WS.Connection
-- | Attempt to run a WebSockets handler. This function first checks if the
-- client initiated a WebSockets connection and, if so, runs the provided
-- application, short-circuiting the rest of your handler. If the client did
-- not request a WebSockets connection, the rest of your handler will be called
-- instead.
--
-- Since 0.1.0
webSockets :: (Y.MonadBaseControl IO m, Y.MonadHandler m) => WebSocketsT m () -> m ()
webSockets inner = do
req <- Y.waiRequest
when (WaiWS.isWebSocketsReq req) $
Y.sendRawResponseNoConduit
$ \src sink -> control $ \runInIO -> WaiWS.runWebSockets
WS.defaultConnectionOptions
(WaiWS.getRequestHead req)
(\pconn -> do
conn <- WS.acceptRequest pconn
WS.forkPingThread conn 30
runInIO $ runReaderT inner conn)
src
sink
-- | Receive a piece of data from the client.
--
-- Since 0.1.0
receiveData :: (MonadIO m, WS.WebSocketsData a) => WebSocketsT m a
receiveData = ReaderT $ liftIO . WS.receiveData
-- | Send a textual message to the client.
--
-- Since 0.1.0
sendTextData :: (MonadIO m, WS.WebSocketsData a) => a -> WebSocketsT m ()
sendTextData x = ReaderT $ liftIO . flip WS.sendTextData x
-- | Send a binary message to the client.
--
-- Since 0.1.0
sendBinaryData :: (MonadIO m, WS.WebSocketsData a) => a -> WebSocketsT m ()
sendBinaryData x = ReaderT $ liftIO . flip WS.sendBinaryData x
-- | A @Source@ of WebSockets data from the user.
--
-- Since 0.1.0
sourceWS :: (MonadIO m, WS.WebSocketsData a) => C.Producer (WebSocketsT m) a
sourceWS = forever $ Y.lift receiveData >>= C.yield
-- | A @Sink@ for sending textual data to the user.
--
-- Since 0.1.0
sinkWSText :: (MonadIO m, WS.WebSocketsData a) => C.Consumer a (WebSocketsT m) ()
sinkWSText = CL.mapM_ sendTextData
-- | A @Sink@ for sending binary data to the user.
--
-- Since 0.1.0
sinkWSBinary :: (MonadIO m, WS.WebSocketsData a) => C.Consumer a (WebSocketsT m) ()
sinkWSBinary = CL.mapM_ sendBinaryData
-- | Generalized version of 'A.race'.
--
-- Since 0.1.0
race :: MonadBaseControl IO m => m a -> m b -> m (Either a b)
race x y = liftBaseWith (\run -> A.race (run x) (run y))
>>= either (fmap Left . restoreM) (fmap Right . restoreM)
-- | Generalized version of 'A.race_'.
--
-- Since 0.1.0
race_ :: MonadBaseControl IO m => m a -> m b -> m ()
race_ x y = void $ race x y
-- | Generalized version of 'A.concurrently'. Note that if your underlying
-- monad has some kind of mutable state, the state from the second action will
-- overwrite the state from the first.
--
-- Since 0.1.0
concurrently :: MonadBaseControl IO m => m a -> m b -> m (a, b)
concurrently x y = do
(resX, resY) <- liftBaseWith $ \run -> A.concurrently (run x) (run y)
x' <- restoreM resX
y' <- restoreM resY
return (x', y')
-- | Run two actions concurrently (like 'A.concurrently'), but discard their
-- results and any modified monadic state.
--
-- Since 0.1.0
concurrently_ :: MonadBaseControl IO m => m a -> m b -> m ()
concurrently_ x y = void $ liftBaseWith $ \run -> A.concurrently (run x) (run y)
| ygale/yesod | yesod-websockets/Yesod/WebSockets.hs | mit | 4,363 | 0 | 18 | 1,056 | 1,062 | 584 | 478 | 68 | 1 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 1.0//EN"
"http://java.sun.com/products/javahelp/helpset_1_0.dtd">
<?SecureFTP this is data for SecureFTP ?>
<helpset version="1.0">
<!-- title -->
<title>Aide sur Secure FTP</title>
<!-- maps -->
<maps>
<homeID>intro</homeID>
<mapref location="Map_fr.jhm"/>
</maps>
<!-- views -->
<view>
<name>TOC</name>
<label>Contenu de l'aide</label>
<type>javax.help.TOCView</type>
<data>SecureFTPTOC_fr.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>SecureFTPIndex_fr.xml</data>
</view>
<view>
<name>Search</name>
<label>Recherche</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch_fr
</data>
</view>
<presentation default=true>
<size width="700" height="550" />
<location x="100" y="100" />
</presentation>
</helpset>
| robinpowell24/secureftp | src/com/glub/secureftp/client/resources/help/SecureFTP_fr.hs | apache-2.0 | 1,133 | 88 | 48 | 249 | 423 | 220 | 203 | -1 | -1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TemplateHaskell #-}
module T16133 where
import Data.Kind
import Language.Haskell.TH hiding (Type)
data P (a :: k) = MkP
$([d| f :: Int
f = $(varE 'id `appTypeE` conT ''Int `appE` litE (integerL 42))
type P' = $(conT ''P `appKindT` conT ''Type) |])
| sdiehl/ghc | testsuite/tests/th/T16133.hs | bsd-3-clause | 304 | 0 | 6 | 64 | 48 | 32 | 16 | 9 | 0 |
module Data.IP.Mask where
import Data.Bits
import Data.IP.Addr
import Data.Word
maskIPv4 :: Int -> IPv4
maskIPv4 len =
IP4 $ complement $ 0xffffffff `shift` (-len)
maskIPv6 :: Int -> IPv6
maskIPv6 len =
IP6 $ toIP6Addr $ bimapTup complement $
(0xffffffffffffffff, 0xffffffffffffffff) `shift128` (-len)
where
bimapTup f (x,y) = (f x, f y)
shift128 :: (Word64, Word64) -> Int -> (Word64, Word64)
shift128 x i
| i < 0 = x `shiftR128` (-i)
| i > 0 = x `shiftL128` i
| otherwise = x
shiftL128 :: (Word64, Word64) -> Int -> (Word64, Word64)
shiftL128 (h, l) i =
( (h `shiftL` i) .|. (l `shift` (i - 64) ), (l `shiftL` i))
shiftR128 :: (Word64, Word64) -> Int -> (Word64, Word64)
shiftR128 (h, l) i =
(h `shiftR` i, (l `shiftR` i) .|. h `shift` (64 - i) )
fromIP6Addr :: IPv6Addr -> (Word64, Word64)
fromIP6Addr (w3, w2, w1, w0) =
( (fromIntegral w3 `shiftL` 32) .|. fromIntegral w2
, (fromIntegral w1 `shiftL` 32) .|. fromIntegral w0
)
toIP6Addr :: (Word64, Word64) -> IPv6Addr
toIP6Addr (h, l) =
( fromIntegral $ (h `shiftR` 32) .&. m
, fromIntegral $ h .&. m
, fromIntegral $ (l `shiftR` 32) .&. m
, fromIntegral $ l .&. m
)
where m = 0xffffffff
| kazu-yamamoto/iproute | Data/IP/Mask.hs | bsd-3-clause | 1,231 | 0 | 10 | 300 | 563 | 325 | 238 | 34 | 1 |
{-# OPTIONS_JHC -fno-prelude -fffi -funboxed-values #-}
-- | helper routines for deriving(Enum) instances
-- these routines help out the compiler when
-- deriving enums.
module Jhc.Inst.PrimEnum(
enum_succ,
enum_pred,
enum_fromTo,
enum_fromThen,
enum_fromThenTo,
enum_toEnum,
enum_from
,ix_range
,ix_index
) where
import Jhc.Basics
import Jhc.Int
{-# INLINE enum_toEnum, enum_succ, enum_pred, enum_fromTo, enum_fromThen, enum_fromThenTo, enum_from #-}
enum_toEnum :: (Enum__ -> a) -> Int__ -> Int -> a
enum_toEnum box max int = case unboxInt int of
int_ -> case int_ `bits32UGt` max of
1# -> toEnumError
0# -> box (intToEnum int_)
enum_succ :: (Enum__ -> a) -> (a -> Enum__) -> Enum__ -> a -> a
enum_succ box debox max e = case debox e of
e_ -> case e_ `enumEq` max of
0# -> box (enumInc e_)
1# -> succError
enum_pred :: (Enum__ -> a) -> (a -> Enum__) -> a -> a
enum_pred box debox e = case debox e of
e_ -> case e_ `enumEq` 0# of
0# -> box (enumDec e_)
1# -> predError
enum_from :: (Enum__ -> a) -> (a -> Enum__) -> Enum__ -> a -> [a]
enum_from box debox max x = case debox x of
x_ -> f x_ where
f x = case x `enumGt` max of
0# -> box x:f (enumInc x)
1# -> []
enum_fromTo :: (Enum__ -> a) -> (a -> Enum__) -> a -> a -> [a]
enum_fromTo box debox x y = case debox y of
y_ -> enum_from box debox y_ x
enum_fromThen :: (Enum__ -> a) -> (a -> Enum__) -> Enum__ -> a -> a -> [a]
enum_fromThen box debox max x y = case debox x of
x_ -> case debox y of
y_ -> case x_ `enumGt` y_ of
0# -> enum_fromThenToUp' box x_ y_ max
1# -> enum_fromThenToDown' box x_ y_ 0#
enum_fromThenTo :: (Enum__ -> a) -> (a -> Enum__) -> a -> a -> a -> [a]
enum_fromThenTo box debox x y z = case debox x of
x_ -> case debox y of
y_ -> case debox z of
z_ -> case x_ `enumGt` y_ of
0# -> enum_fromThenToUp' box x_ y_ z_
1# -> enum_fromThenToDown' box x_ y_ z_
enum_fromThenToUp' :: (Enum__ -> a) -> Enum__ -> Enum__ -> Enum__ -> [a]
enum_fromThenToUp' box x y z = case y `enumSub` x of
inc -> let f x = case x `enumGt` z of
0# -> box x:f (x `enumAdd` inc)
1# -> []
in f x
enum_fromThenToDown' :: (Enum__ -> a) -> Enum__ -> Enum__ -> Enum__ -> [a]
enum_fromThenToDown' box x y z = case y `enumSub` x of
inc -> let f x = case x `enumLt` z of
0# -> box x:f (x `enumAdd` inc)
1# -> []
in f x
ix_range :: (Enum__ -> a) -> (a -> Enum__) -> (a,a) -> [a]
ix_range box debox (x,y) = enum_fromTo box debox x y
ix_index :: (Enum__ -> a) -> (a -> Enum__) -> (a,a) -> a -> Int
ix_index box debox (x,y) v = case debox v of
v_ -> case debox x of
x_ -> case debox y of
y_ -> case v_ `enumLte` x_ of
1# -> case v_ `enumLte` y_ of
1# -> boxInt (enumToInt (v_ `enumSub` x_))
0# -> ixIndexError
0# -> ixIndexError
ix_inRange :: (Enum__ -> a) -> (a -> Enum__) -> (a,a) -> a -> Bool
ix_inRange box debox (x,y) v = case debox v of
v_ -> case debox x of
x_ -> case debox y of
y_ -> case v_ `enumGte` x_ of
1# -> boxBool (v_ `enumLte` y_)
0# -> boxBool 0#
foreign import primitive "box" boxBool :: Bool__ -> Bool
foreign import primitive "Add" enumAdd :: Enum__ -> Enum__ -> Enum__
foreign import primitive "Eq" enumEq :: Enum__ -> Enum__ -> Bool__
foreign import primitive "Gt" enumGt :: Enum__ -> Enum__ -> Bool__
foreign import primitive "Gte" enumGte :: Enum__ -> Enum__ -> Bool__
foreign import primitive "Lt" enumLt :: Enum__ -> Enum__ -> Bool__
foreign import primitive "Lte" enumLte :: Enum__ -> Enum__ -> Bool__
foreign import primitive "Sub" enumSub :: Enum__ -> Enum__ -> Enum__
foreign import primitive "U2U" enumToInt :: Enum__ -> Int__
foreign import primitive "U2U" intToEnum :: Int__ -> Enum__
foreign import primitive "UGt" bits32UGt :: Bits32_ -> Bits32_ -> Bool__
foreign import primitive "decrement" enumDec :: Enum__ -> Enum__
foreign import primitive "error.Ix.Index: out of range" ixIndexError :: a
foreign import primitive "error.pred: out of range" predError :: a
foreign import primitive "error.succ: out of range" succError :: a
foreign import primitive "error.toEnum: out of range" toEnumError :: a
foreign import primitive "increment" enumInc :: Enum__ -> Enum__
| hvr/jhc | lib/jhc/Jhc/Inst/PrimEnum.hs | mit | 4,686 | 17 | 24 | 1,420 | 1,529 | 826 | 703 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>Call Home Add-On</title>
<maps>
<homeID>callhome</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/callhome/src/main/javahelp/org/zaproxy/addon/callhome/resources/help_sr_CS/helpset_sr_CS.hs | apache-2.0 | 966 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Polymorphism where
foo :: a -> a -> a
foo = undefined
foo' :: forall a. a -> a -> a
foo' = undefined
bar :: a -> b -> (a, b)
bar = undefined
bar' :: forall a b. a -> b -> (a, b)
bar' = undefined
baz :: a -> (a -> [a -> a] -> b) -> b
baz = undefined
baz' :: forall a b. a -> (a -> [a -> a] -> b) -> b
baz' = undefined
quux :: a -> (forall a. a -> a) -> a
quux x f = f x
quux' :: forall a. a -> (forall a. a -> a) -> a
quux' x f = f x
num :: Num a => a -> a -> a
num = undefined
num' :: forall a. Num a => a -> a -> a
num' = undefined
eq :: (Eq a, Eq b) => [a] -> [b] -> (a, b)
eq = undefined
eq' :: forall a b. (Eq a, Eq b) => [a] -> [b] -> (a, b)
eq' = undefined
mon :: Monad m => (a -> m a) -> m a
mon = undefined
mon' :: forall m a. Monad m => (a -> m a) -> m a
mon' = undefined
norf :: a -> (forall a. Ord a => a -> a) -> a
norf x f = x
norf' :: forall a. a -> (forall a. Ord a => a -> a) -> a
norf' x f = x
plugh :: forall a. a -> a
plugh x = x :: a
thud :: forall a b. (a -> b) -> a -> (a, b)
thud f x =
(x :: a, y) :: (a, b)
where
y = (f :: a -> b) x :: b
| sdiehl/ghc | testsuite/tests/hiefile/should_compile/hie007.hs | bsd-3-clause | 1,168 | 0 | 12 | 347 | 682 | 380 | 302 | 41 | 1 |
{-# LANGUAGE RebindableSyntax #-}
module Bug where
data Maybe a = Just a | Nothing
foo :: [Maybe a] -> [a]
foo xs = [ x | Just x <- xs ]
| olsner/ghc | testsuite/tests/rebindable/T11216A.hs | bsd-3-clause | 140 | 0 | 8 | 35 | 58 | 33 | 25 | 5 | 1 |
{-# LANGUAGE DataKinds #-}
module T13025 where
import T13025a
type MyRec = Rec '[ '("A",Int), '("B",Int), '("C",Int) ]
getC :: MyRec -> Int
getC = getField (Proxy::Proxy '("C",Int))
doubleC :: MyRec -> MyRec
doubleC r = setC (2 * (getC r)) r
where setC = set . (Field :: Int -> Field '("C",Int))
main :: IO ()
main = print (getC (Field 1 :& Field 2 :& Field 3 :& Nil :: MyRec))
| olsner/ghc | testsuite/tests/simplCore/should_compile/T13025.hs | bsd-3-clause | 384 | 0 | 12 | 79 | 204 | 113 | 91 | 11 | 1 |
-- ImageToVector.hs
-- Compiles to a library.
-- This library contains utility functions for loading, converting, and
-- manipulating images.
module ImageToVector
( loadImage
, loadImageToVector
, loadImages
, loadImagesToVectors
, imageToVector
, chop
, changeResolution
, vectorToImage
) where
import Data.Packed.Vector
import Vision.Image hiding (map)
import Vision.Primitive
import Vision.Primitive.Shape
import Vision.Image.Storage.DevIL (Autodetect (..), load)
import Data.Word (Word8)
imageToVector :: RGB -> Vector Double
imageToVector = fromList . concatMap f . toList . manifestVector
where
f :: RGBPixel -> [Double]
f (RGBPixel r g b) = [fromIntegral r, fromIntegral g, fromIntegral b]
vectorToImage :: Size -> Vector Double -> RGB
vectorToImage s v = kludgeImg s
. toList . mapVector (fromIntegral . floor) $ v
kludgeImg :: Size -> [Word8] -> RGB
kludgeImg s xs = Manifest s $ fromList $ take k $ pxls (xs ++ repeat 0)
where
k = let (Z :. w :. h) = s in w * h
pxls (x:y:z:rest) = RGBPixel x y z : pxls rest
changeResolution :: Int -> Int -> RGB -> RGB
changeResolution w h img = resize NearestNeighbor (Z :. w :. h) img
loadImageToVector :: Int -> Int -> FilePath -> IO (Maybe (Vector Double))
loadImageToVector w h path = do
img <- load Autodetect path
case img of
Left err -> do
--putStrLn $ "Error loading image:" ++ path
--print err
return Nothing
Right rgb -> do
return $ Just (imageToVector $ changeResolution w h rgb)
loadImagesToVectors :: Int -> Int -> [FilePath] -> IO [Maybe (Vector Double)]
loadImagesToVectors w h = mapM (loadImageToVector w h)
loadImage :: FilePath -> IO(Maybe RGB)
loadImage path = do
img <- load Autodetect path
case img of
Left err -> do
putStrLn $ "Error loading image:" ++ path
print err
return Nothing
Right rgb -> do
return $ Just rgb
loadImages :: [FilePath] -> IO [Maybe RGB]
loadImages = mapM loadImage
imageDim :: RGB -> (Int,Int)
imageDim img = (w,h)
where (Z :. h :. w) = manifestSize img
rects :: Int -> Int -> RGB -> [Rect]
rects w h img = (Rect 0 0 w h):(next 0 0)
where
next x y = if (x + w >= ((fst . imageDim) img)) && (y + h >= ((snd . imageDim) img))
then []
else if x + w >= ((fst . imageDim) img)
then (Rect 0 (y + h) w h):(next 0 (y+h))
else (Rect (x + w) y w h):(next (x+w) y)
--takes an rgb image and chops it into w by h chunks
chop :: Int -> Int -> RGB -> [RGB]
chop w h img = map ((flip crop) img) (rects w h img)
| IsToomersCornerBeingRolledRightNow/principalComponentAnalysisViaSingularValueDecomposition | ImageToVector.hs | mit | 2,625 | 0 | 16 | 679 | 1,039 | 537 | 502 | 62 | 3 |
module Main where
minIdx :: [Int] -> Int -> Int
minIdx (x:[]) idx = idx
minIdx (x:xs) idx = if x < minimum xs then idx else (minIdx xs (idx + 1))
maxIdx :: [Int] -> Int -> Int
maxIdx (x:[]) idx = idx
maxIdx (x:xs) idx = if x > maximum xs then idx else (maxIdx xs (idx + 1))
swap :: [Int] -> Int -> [Int]
swap [] _ = []
swap (x:xs) 0 = xs
swap (x:xs) a = take (a-1) xs ++ [x] ++ drop a xs
solutionMin :: [Int] -> Int -> Int
solutionMin [] a = a
solutionMin l a = let min = minIdx l 0; cnt = if min > 0 then a + 1 else a in solutionMin (swap l min) cnt
solutionMax :: [Int] -> Int -> Int
solutionMax [] a = a
solutionMax l a = let max = maxIdx l 0; cnt = if max > 0 then a + 1 else a in solutionMax (swap l max) cnt
solution :: [Int] -> Int
solution [] = 0
solution l = minimum ((solutionMin l 0):(solutionMax l 0):[])
--isSorted :: (Int -> Int -> Bool) -> [Int] -> Bool
--isSorted f [] = True
--isSorted f (_:[]) = True
--isSorted f (x:y:xs) = (f x y) && isSorted f (y:xs)
--isDecSorted xs = isSorted (<) xs
--isAscSorted xs = isSorted (>) xs
main :: IO ()
main = do
print $ minIdx [3, 2, 4] 0
print $ minIdx [1, 2, 3] 0
print $ minIdx [5, 3, 2] 0
print $ swap [3,5,2,6,1] 0
print $ swap [3,5,2,6,1] 1
print $ swap [3,5,2,6,1] 2
print $ swap [3,5,2,6,1] 3
print $ swap [3,5,2,6,1] 4
-- print $ isDecSorted [3,5,2,6,1]
-- print $ isDecSorted [1,2,3,4]
-- print $ isAscSorted [3,5,2,6,1]
-- print $ isAscSorted [4,3,2,1]
-- print $ isAscSorted [1,2,3,4]
print $ maxIdx [3,5,2,6,1] 0
print $ solutionMin [3,5,2,6,1] 0
print $ solutionMax [3,5,2,6,1] 0
print $ solution [3,5,2,6,1] | funfunStudy/algorithm | haskell/src/main/haskell/lilysHomework/Main.hs | mit | 1,614 | 0 | 10 | 367 | 833 | 451 | 382 | 34 | 2 |
{-# LANGUAGE Rank2Types #-}
module GA where
import System.Random
import Data.List
import Data.Maybe
import Control.Monad.Random
type GA a g = GeneticAlgorithm a -> g -> [a] -> ([a],g)
data GeneticAlgorithm a = GeneticAlgorithm {
fitness :: a -> Float,
mutate :: RandomGen g => (a,g) -> (a,g),
cross :: RandomGen g => g -> a -> a -> (a,g),
generate :: MonadRandom m => m a,
distance :: a -> a -> Int}
generations :: (RandomGen g, Num n, Enum n) => GeneticAlgorithm a -> GA a g -> n -> ([a],g) -> ([a],g)
generations algo ga gens gen = foldl (\(x,g) _ -> ga algo g x) gen [1..gens]
generations' algo ga gens rng popSize = generations algo ga gens (g0 algo rng popSize)
g0 algo rng popSize = foldl (\(xs,g) _ -> let (x,g') = (runRand (generate algo) g) in (x:xs,g')) ([],rng) [1..popSize]
--get best individual and mutate the winner of the roulette
geneticAlgorithm1 :: RandomGen g => GeneticAlgorithm a -> g -> [a] -> ([a],g)
geneticAlgorithm1 algo rng ls = newGen ls rng
where totalFitness xs = sum . map (fitness algo) $ xs
newGen xs g = let (xs',g') = rR xs g; (xs'',g'') = mL xs' g' in ((bestIndividual algo xs) : xs'', g'')
rR xs g = repeatedRoulette algo g xs 1 (totalFitness xs) (popSize-1)
popSize = length ls
mL xs g = foldl (\(ys,g') y -> let (y',g'') = mutate algo (y,g') in (y':ys,g'')) ([],g) xs
--keep best individual, get winners of roulette, cross them, then mutate the results
geneticAlgorithm2 algo rng ls = newGen ls rng
where totalFitness xs = sum . map (fitness algo) $ xs
popSize = length ls
rR xs g = repeatedRoulette algo g xs 1 (totalFitness xs) (popSize-1)
cL (x:xs) g = foldl (\(y'':ys,g') y -> let (y',g'') = cross algo g' y y'' in (y':y'':ys,g'')) ([x],g) xs
mL xs g = foldl (\(ys,g') y -> let (y',g'') = mutate algo (y,g') in (y':ys,g'')) ([],g) xs
gL g n = foldl (\(xs,g') _ -> let (x',g'') = (runRand (generate algo) g') in (x':xs,g'')) ([],g) [1..n]
dropCount = floor $ (fromIntegral popSize) / 10
newGen xs g = let (xs',g') = rR xs g
(xs'', g'') = cL xs' g'
(xs''', g''') = mL xs'' g''
(ds, g'''') = gL g''' dropCount
xs'''' = ds ++ drop dropCount xs'''
in ((bestIndividual algo xs) : xs'''', g'''')
bestIndividual algo xs = maximumBy (\a b -> fitness algo a `compare` fitness algo b) xs
repeatedRoulette :: (RandomGen g, Num n, Eq n) => GeneticAlgorithm a -> g -> [a] -> Float -> Float -> n -> ([a],g)
repeatedRoulette algo rng ls minV maxV count = runRand (loop count []) rng
where r' = roulette algo
loop n acc
| n == 0 = return acc
| otherwise = do
v <- getRandomR (minV,maxV)
loop (n-1) (r' v ls : acc)
roulette :: GeneticAlgorithm a -> Float -> [a] -> a
roulette algo maxV = (\(_,x) -> fromJust x) . foldl helper (0.0,Nothing)
where helper (sumV,x) y
| sumV < maxV = (fitness algo y + sumV,Just y)
| otherwise = (sumV,x)
--overselection ::Num n => GeneticAlgorithm a -> [a] -> ([a] -> n -> [a]) -> n -> n -> [a]
overselection algo ls selectionFunc groupFraction endcount = selectionFunc bestIndividuals
where bestIndividuals = take takeCount $ sortBy (\x y -> fitness algo x `compare` fitness algo y) ls
takeCount = floor ((fromIntegral $ length ls) / groupFraction)
| Stratege/Coevolutionary-Neural-Network | GA.hs | mit | 3,390 | 46 | 18 | 862 | 1,632 | 865 | 767 | 55 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Writer.Formats.Ltl
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein ([email protected])
--
-- Transforms a specification to a pure LTL formula.
--
-----------------------------------------------------------------------------
module Writer.Formats.Ltl where
-----------------------------------------------------------------------------
import Config
import Simplify
import Data.Error
import Data.Specification
import Writer.Eval
import Writer.Data
import Writer.Utils
-----------------------------------------------------------------------------
-- | pure LTL operator configuration.
opConfig
:: OperatorConfig
opConfig = OperatorConfig
{ tTrue = "true"
, fFalse = "false"
, opNot = UnaryOp "!" 1
, opAnd = BinaryOp "&&" 2 AssocLeft
, opOr = BinaryOp "||" 3 AssocLeft
, opImplies = BinaryOp "->" 4 AssocRight
, opEquiv = BinaryOp "<->" 4 AssocRight
, opNext = UnaryOp "X" 1
, opPrevious = UnaryOp "Y" 1
, opFinally = UnaryOp "F" 1
, opGlobally = UnaryOp "G" 1
, opHistorically = UnaryOp "H" 1
, opOnce = UnaryOp "O" 1
, opUntil = BinaryOp "U" 6 AssocRight
, opRelease = BinaryOp "R" 7 AssocLeft
, opWeak = BinaryOp "W" 5 AssocRight
, opSince = BinaryOp "S" 8 AssocRight
, opTriggered = BinaryOp "T" 9 AssocLeft
}
-----------------------------------------------------------------------------
-- | LTL writer.
writeFormat
:: Configuration -> Specification -> Either Error String
writeFormat c s = do
(es,ss,rs,as,is,gs) <- eval c s
fml0 <- merge es ss rs as is gs
fml1 <- simplify (adjust c opConfig) fml0
printFormula opConfig (outputMode c) (quoteMode c) fml1
-----------------------------------------------------------------------------
| reactive-systems/syfco | src/lib/Writer/Formats/Ltl.hs | mit | 1,989 | 0 | 10 | 454 | 393 | 219 | 174 | 36 | 1 |
import Data.List
coFactors :: Int -> [Int]
coFactors n = filter (test n) [1..(intSqrt n)] --generates factors upto sqrt
where intSqrt = floor . sqrt . fromIntegral
test n x = mod n x == 0
factors :: Int -> [Int]
factors n = (fmap (div n) fac) ++ fac
where fac = coFactors n
prime n = (length $ factors n) <= 2
largestPrime' :: Int -> [Int]
largestPrime' n = dropWhile (not . prime) (reverse $ sort $ factors n)
largestPrime :: Int -> Int
largestPrime n = head $ largestPrime' n
| johnprock/euler | p3.hs | mit | 498 | 1 | 9 | 114 | 225 | 115 | 110 | 13 | 1 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module ViewModels.NameValuePairViewModel where
import Data.Text
import Data.Data
data NameValuePairVM = NameValuePairVM {
name :: Text,
value :: Text
} deriving (Data, Typeable) | itsuart/fdc_archivist | src/ViewModels/NameValuePairViewModel.hs | mit | 239 | 0 | 8 | 33 | 49 | 30 | 19 | 8 | 0 |
module Data.Unique.Global
( getUnique
) where
import Data.Word
import Data.IORef
import System.IO.Unsafe
import Data.Unique.Internal
global :: IORef Word
global = unsafePerformIO $ newIORef 0
{-# NOINLINE global #-}
getUnique :: a -> IO (Unique a)
getUnique x = do
i <- readIORef global
writeIORef global $ i + 1
return $! MkUnique i x
{-# INLINE getUnique #-}
| treep/data-unique | Data/Unique/Global.hs | mit | 376 | 0 | 9 | 73 | 118 | 62 | 56 | 15 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.