_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
deb0916a5489f327ef991ba9a1003a47df921f57de832692b78c35945aa7f97f | conjure-cp/conjure | Table.hs | # LANGUAGE DeriveGeneric , DeriveDataTypeable , DeriveFunctor , , DeriveFoldable #
module Conjure.Language.Expression.Op.Table where
import Conjure.Prelude
import Conjure.Language.Expression.Op.Internal.Common
import qualified Data.Aeson as JSON -- aeson
import qualified Data.HashMap.Strict as M -- unordered-containers
import qualified Data.Vector as V -- vector
data OpTable x = OpTable x x
deriving (Eq, Ord, Show, Data, Functor, Traversable, Foldable, Typeable, Generic)
instance Serialize x => Serialize (OpTable x)
instance Hashable x => Hashable (OpTable x)
instance ToJSON x => ToJSON (OpTable x) where toJSON = genericToJSON jsonOptions
instance FromJSON x => FromJSON (OpTable x) where parseJSON = genericParseJSON jsonOptions
instance (TypeOf x, Pretty x) => TypeOf (OpTable x) where
typeOf p@(OpTable vars table) = do
tyVars <- typeOf vars
tyTable <- typeOf table
let
tyError = raiseTypeError $ vcat [ pretty p
, pretty vars <+> "has type" <+> pretty tyVars
, pretty table <+> "has type" <+> pretty tyTable
]
listLike (TypeList TypeInt{}) = return ()
listLike (TypeMatrix _ TypeInt{}) = return ()
listLike _ = tyError
listLike tyVars
case tyTable of
TypeList inner -> listLike inner
TypeMatrix _ inner -> listLike inner
_ -> tyError
return TypeBool
instance SimplifyOp OpTable x where
simplifyOp _ = na "simplifyOp{OpTable}"
instance Pretty x => Pretty (OpTable x) where
prettyPrec _ (OpTable a b) = "table" <> prettyList prParens "," [a, b]
instance (VarSymBreakingDescription x, ExpressionLike x) => VarSymBreakingDescription (OpTable x) where
varSymBreakingDescription (OpTable a b) = JSON.Object $ M.fromList
[ ("type", JSON.String "OpTable")
, ("children", JSON.Array $ V.fromList [varSymBreakingDescription a, varSymBreakingDescription b])
, ("symmetricChildren", JSON.Bool True)
]
| null | https://raw.githubusercontent.com/conjure-cp/conjure/dd5a27df138af2ccbbb970274c2b8f22ac6b26a0/src/Conjure/Language/Expression/Op/Table.hs | haskell | aeson
unordered-containers
vector | # LANGUAGE DeriveGeneric , DeriveDataTypeable , DeriveFunctor , , DeriveFoldable #
module Conjure.Language.Expression.Op.Table where
import Conjure.Prelude
import Conjure.Language.Expression.Op.Internal.Common
data OpTable x = OpTable x x
deriving (Eq, Ord, Show, Data, Functor, Traversable, Foldable, Typeable, Generic)
instance Serialize x => Serialize (OpTable x)
instance Hashable x => Hashable (OpTable x)
instance ToJSON x => ToJSON (OpTable x) where toJSON = genericToJSON jsonOptions
instance FromJSON x => FromJSON (OpTable x) where parseJSON = genericParseJSON jsonOptions
instance (TypeOf x, Pretty x) => TypeOf (OpTable x) where
typeOf p@(OpTable vars table) = do
tyVars <- typeOf vars
tyTable <- typeOf table
let
tyError = raiseTypeError $ vcat [ pretty p
, pretty vars <+> "has type" <+> pretty tyVars
, pretty table <+> "has type" <+> pretty tyTable
]
listLike (TypeList TypeInt{}) = return ()
listLike (TypeMatrix _ TypeInt{}) = return ()
listLike _ = tyError
listLike tyVars
case tyTable of
TypeList inner -> listLike inner
TypeMatrix _ inner -> listLike inner
_ -> tyError
return TypeBool
instance SimplifyOp OpTable x where
simplifyOp _ = na "simplifyOp{OpTable}"
instance Pretty x => Pretty (OpTable x) where
prettyPrec _ (OpTable a b) = "table" <> prettyList prParens "," [a, b]
instance (VarSymBreakingDescription x, ExpressionLike x) => VarSymBreakingDescription (OpTable x) where
varSymBreakingDescription (OpTable a b) = JSON.Object $ M.fromList
[ ("type", JSON.String "OpTable")
, ("children", JSON.Array $ V.fromList [varSymBreakingDescription a, varSymBreakingDescription b])
, ("symmetricChildren", JSON.Bool True)
]
|
f9b9a222b57310aff9311d1c36d5b31f327918dad44adde1f6ef96f143286302 | craigfe/sink | ppx_deriving_phantom.ml | open Ppxlib
(** Given a module type of the form:
{[
module type S1 = sig
type 'a t
val map : ('a -> 'b) -> 'a t -> 'b t
end
[@@deriving phantom]
]}
generate a new module type in which [t] has an extra phantom type parameter
that is unified across all occurrences:
{[
(** Equal to {!S1} but with an additional phantom type parameter. *)
module type S2 = sig
type ('a, 'phan) t
val map : ('a -> 'b) -> ('a, 'phan) t -> ('b, 'phan) t
end
]} *)
let add_phantom_parameter_to (module A : Ast_builder.S) id =
let tvar = A.ptyp_var "phantom" in
object
inherit Ast_traverse.map as super
method! type_declaration t =
let t = super#type_declaration t in
if String.equal t.ptype_name.txt id then
{ t with ptype_params = t.ptype_params @ [ (tvar, Invariant) ] }
else t
method! core_type t =
let t = super#core_type t in
match t.ptyp_desc with
| Ptyp_constr (l, vars) when l.txt = Lident id ->
{ t with ptyp_desc = Ptyp_constr (l, vars @ [ tvar ]) }
| _ -> t
end
let split_at_index : int -> string -> string * string =
fun i s -> (String.sub s 0 i, String.sub s i (String.length s - i))
let split_integer_suffix s : string * int option =
let last = String.length s - 1 in
let digit_at_index i =
let c = s.[i] in
48 <= Char.code c && Char.code c < 58
in
let rec aux = function
| -1 -> ("", s) (* The whole string is an integer *)
| n when digit_at_index n -> aux (n - 1)
| n -> split_at_index (n + 1) s
in
if digit_at_index last then
let prefix, suffix = aux last in
(prefix, Some (int_of_string suffix))
else (s, None)
* as large a suffix as possible of a given string as an integer , then
apply the given function to the result ( if at least one digit was parsed ) .
apply the given function to the result (if at least one digit was parsed). *)
let map_integer_suffix : (int option -> int) -> string -> string =
fun f s ->
let prefix, suffix = split_integer_suffix s in
prefix ^ string_of_int (f suffix)
let add_phantom_parameter ~loc ~path:_
{ pmtd_name; pmtd_type; pmtd_attributes = _; _ } subderiving =
let (module A) = Ast_builder.make loc in
let open A in
let pmtd_name =
pmtd_name.txt
|> map_integer_suffix (function Some i -> i + 1 | None -> 1)
|> Located.mk
and pmtd_type =
pmtd_type
|> Option.map (add_phantom_parameter_to (module A) "t")#module_type
and pmtd_attributes =
let doc =
attribute ~name:(Located.mk "ocaml.doc")
~payload:
(PStr
[
pstr_eval
(estring
(Format.sprintf
"Equal to {!%s} but with an additional type parameter"
pmtd_name.txt))
[];
])
in
match subderiving with
| None -> [ doc ]
| Some e ->
[
attribute ~name:(Located.mk "deriving")
~payload:(PStr [ pstr_eval e [] ]);
doc;
]
in
[ pstr_modtype { pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc = A.loc } ]
let branded : Deriving.t =
let open Deriving in
let args = Args.(empty +> arg "subderiving" __) in
add
~str_module_type_decl:(Generator.make args add_phantom_parameter)
"phantom"
| null | https://raw.githubusercontent.com/craigfe/sink/c5431edfa1b06f1a09845a481c4afcb3e92f0667/src/ppx_deriving_phantom/ppx_deriving_phantom.ml | ocaml | * Given a module type of the form:
{[
module type S1 = sig
type 'a t
val map : ('a -> 'b) -> 'a t -> 'b t
end
[@@deriving phantom]
]}
generate a new module type in which [t] has an extra phantom type parameter
that is unified across all occurrences:
{[
(** Equal to {!S1} but with an additional phantom type parameter.
The whole string is an integer | open Ppxlib
module type S2 = sig
type ('a, 'phan) t
val map : ('a -> 'b) -> ('a, 'phan) t -> ('b, 'phan) t
end
]} *)
let add_phantom_parameter_to (module A : Ast_builder.S) id =
let tvar = A.ptyp_var "phantom" in
object
inherit Ast_traverse.map as super
method! type_declaration t =
let t = super#type_declaration t in
if String.equal t.ptype_name.txt id then
{ t with ptype_params = t.ptype_params @ [ (tvar, Invariant) ] }
else t
method! core_type t =
let t = super#core_type t in
match t.ptyp_desc with
| Ptyp_constr (l, vars) when l.txt = Lident id ->
{ t with ptyp_desc = Ptyp_constr (l, vars @ [ tvar ]) }
| _ -> t
end
let split_at_index : int -> string -> string * string =
fun i s -> (String.sub s 0 i, String.sub s i (String.length s - i))
let split_integer_suffix s : string * int option =
let last = String.length s - 1 in
let digit_at_index i =
let c = s.[i] in
48 <= Char.code c && Char.code c < 58
in
let rec aux = function
| n when digit_at_index n -> aux (n - 1)
| n -> split_at_index (n + 1) s
in
if digit_at_index last then
let prefix, suffix = aux last in
(prefix, Some (int_of_string suffix))
else (s, None)
* as large a suffix as possible of a given string as an integer , then
apply the given function to the result ( if at least one digit was parsed ) .
apply the given function to the result (if at least one digit was parsed). *)
let map_integer_suffix : (int option -> int) -> string -> string =
fun f s ->
let prefix, suffix = split_integer_suffix s in
prefix ^ string_of_int (f suffix)
let add_phantom_parameter ~loc ~path:_
{ pmtd_name; pmtd_type; pmtd_attributes = _; _ } subderiving =
let (module A) = Ast_builder.make loc in
let open A in
let pmtd_name =
pmtd_name.txt
|> map_integer_suffix (function Some i -> i + 1 | None -> 1)
|> Located.mk
and pmtd_type =
pmtd_type
|> Option.map (add_phantom_parameter_to (module A) "t")#module_type
and pmtd_attributes =
let doc =
attribute ~name:(Located.mk "ocaml.doc")
~payload:
(PStr
[
pstr_eval
(estring
(Format.sprintf
"Equal to {!%s} but with an additional type parameter"
pmtd_name.txt))
[];
])
in
match subderiving with
| None -> [ doc ]
| Some e ->
[
attribute ~name:(Located.mk "deriving")
~payload:(PStr [ pstr_eval e [] ]);
doc;
]
in
[ pstr_modtype { pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc = A.loc } ]
let branded : Deriving.t =
let open Deriving in
let args = Args.(empty +> arg "subderiving" __) in
add
~str_module_type_decl:(Generator.make args add_phantom_parameter)
"phantom"
|
b8ff912a93ac70a146b04b74edded351ddc9f736318bf387668211a6a71cc0b5 | deadpendency/deadpendency | WriteChecksGitHubC.hs | module Common.Effect.GitHub.WriteChecks.Carrier.WriteChecksGitHubC
( WriteChecksGitHubIOC (..),
)
where
import Common.Effect.AppEventEmit.AppEventEmit
import Common.Effect.AppEventEmit.Model.AppEventAdditional
import Common.Effect.AppEventEmit.Model.AppEventMessage
import Common.Effect.GitHub.InstallationAuth.InstallationAuth
import Common.Effect.GitHub.WriteChecks.Backend.CreateCheckRunBackend
import Common.Effect.GitHub.WriteChecks.Backend.UpdateCheckRunBackend
import Common.Effect.GitHub.WriteChecks.Model.CheckRunUpdateResult
import Common.Effect.GitHub.WriteChecks.WriteChecks (WriteChecks (..))
import Common.Model.Error.CommonError
import Common.Model.Error.WriteChecksError
import Control.Algebra (Algebra (..), Has, (:+:) (..))
import Control.Effect.Throw (Throw, liftEither)
newtype WriteChecksGitHubIOC m a = WriteChecksGitHubIOC {runWriteChecksGitHubIOC :: m a}
deriving newtype (Functor, Applicative, Monad, MonadIO)
instance
( Algebra sig m,
MonadIO m,
Has AppEventEmit sig m,
Has (Throw CommonError) sig m,
Has (Throw WriteChecksError) sig m,
Has InstallationAuth sig m
) =>
Algebra (WriteChecks :+: sig) (WriteChecksGitHubIOC m)
where
alg hdl sig ctx = case sig of
(L (CreateCheckRun request)) -> do
emitAppEventInfoA (AppEventMessage "Started: Create check run") (AppEventAdditional request)
let installId = request ^. #_appInstallationId
ghAuth <- obtainInstallationAuth installId
eitherCreateCheckRunResult <- liftIO $ githubCreateCheckRun ghAuth request
liftedErrorResult <- liftEither eitherCreateCheckRunResult
emitAppEventInfoA (AppEventMessage "Finished: Create check run") (AppEventAdditional liftedErrorResult)
WriteChecksGitHubIOC $ pure (ctx $> liftedErrorResult)
(L (UpdateCheckRun request)) -> do
-- currently the checkRunOutput can be huge, so can't always log it
emitAppEventInfoA (AppEventMessage "Started: Update check run") (AppEventAdditional $ request & #_checkRunOutput .~ Nothing)
ghAuth <- existingInstallationAuth
eitherUpdateCheckRunResult <- liftIO $ githubUpdateCheckRun ghAuth request
liftedCheckRun <- liftEither eitherUpdateCheckRunResult
let result = CheckRunUpdateResult liftedCheckRun
emitAppEventInfoA (AppEventMessage "Finished: Update check run") (AppEventAdditional result)
WriteChecksGitHubIOC $ pure (ctx $> result)
(R other) -> WriteChecksGitHubIOC $ alg (runWriteChecksGitHubIOC . hdl) other ctx
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/common/src/Common/Effect/GitHub/WriteChecks/Carrier/WriteChecksGitHubC.hs | haskell | currently the checkRunOutput can be huge, so can't always log it | module Common.Effect.GitHub.WriteChecks.Carrier.WriteChecksGitHubC
( WriteChecksGitHubIOC (..),
)
where
import Common.Effect.AppEventEmit.AppEventEmit
import Common.Effect.AppEventEmit.Model.AppEventAdditional
import Common.Effect.AppEventEmit.Model.AppEventMessage
import Common.Effect.GitHub.InstallationAuth.InstallationAuth
import Common.Effect.GitHub.WriteChecks.Backend.CreateCheckRunBackend
import Common.Effect.GitHub.WriteChecks.Backend.UpdateCheckRunBackend
import Common.Effect.GitHub.WriteChecks.Model.CheckRunUpdateResult
import Common.Effect.GitHub.WriteChecks.WriteChecks (WriteChecks (..))
import Common.Model.Error.CommonError
import Common.Model.Error.WriteChecksError
import Control.Algebra (Algebra (..), Has, (:+:) (..))
import Control.Effect.Throw (Throw, liftEither)
newtype WriteChecksGitHubIOC m a = WriteChecksGitHubIOC {runWriteChecksGitHubIOC :: m a}
deriving newtype (Functor, Applicative, Monad, MonadIO)
instance
( Algebra sig m,
MonadIO m,
Has AppEventEmit sig m,
Has (Throw CommonError) sig m,
Has (Throw WriteChecksError) sig m,
Has InstallationAuth sig m
) =>
Algebra (WriteChecks :+: sig) (WriteChecksGitHubIOC m)
where
alg hdl sig ctx = case sig of
(L (CreateCheckRun request)) -> do
emitAppEventInfoA (AppEventMessage "Started: Create check run") (AppEventAdditional request)
let installId = request ^. #_appInstallationId
ghAuth <- obtainInstallationAuth installId
eitherCreateCheckRunResult <- liftIO $ githubCreateCheckRun ghAuth request
liftedErrorResult <- liftEither eitherCreateCheckRunResult
emitAppEventInfoA (AppEventMessage "Finished: Create check run") (AppEventAdditional liftedErrorResult)
WriteChecksGitHubIOC $ pure (ctx $> liftedErrorResult)
(L (UpdateCheckRun request)) -> do
emitAppEventInfoA (AppEventMessage "Started: Update check run") (AppEventAdditional $ request & #_checkRunOutput .~ Nothing)
ghAuth <- existingInstallationAuth
eitherUpdateCheckRunResult <- liftIO $ githubUpdateCheckRun ghAuth request
liftedCheckRun <- liftEither eitherUpdateCheckRunResult
let result = CheckRunUpdateResult liftedCheckRun
emitAppEventInfoA (AppEventMessage "Finished: Update check run") (AppEventAdditional result)
WriteChecksGitHubIOC $ pure (ctx $> result)
(R other) -> WriteChecksGitHubIOC $ alg (runWriteChecksGitHubIOC . hdl) other ctx
|
03d6e090dff7598d1623e27259b9920a0a45d2660427a96c08cd70846b26a901 | nandor/llir-ocaml | t210-setfield0.ml | TEST
include tool - ocaml - lib
flags = " -w a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
type t = {
mutable a : int;
};;
let x = {a = 7} in
x.a <- 11;
if x.a <> 11 then raise Not_found;
x
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 CONSTINT 7
11 MAKEBLOCK1 0
13 PUSHCONSTINT 11
15 PUSHACC1
16 SETFIELD0
17 CONSTINT 11
19 PUSHACC1
20 GETFIELD0
21 NEQ
22 BRANCHIFNOT 29
24 Not_found
26 MAKEBLOCK1 0
28 RAISE
29 ACC0
30 POP 1
32 ATOM0
33 SETGLOBAL T210 - setfield0
35 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 CONSTINT 7
11 MAKEBLOCK1 0
13 PUSHCONSTINT 11
15 PUSHACC1
16 SETFIELD0
17 CONSTINT 11
19 PUSHACC1
20 GETFIELD0
21 NEQ
22 BRANCHIFNOT 29
24 GETGLOBAL Not_found
26 MAKEBLOCK1 0
28 RAISE
29 ACC0
30 POP 1
32 ATOM0
33 SETGLOBAL T210-setfield0
35 STOP
**)
| null | https://raw.githubusercontent.com/nandor/llir-ocaml/9c019f15c444e30c825b1673cbe827e0497868fe/testsuite/tests/tool-ocaml/t210-setfield0.ml | ocaml | TEST
include tool - ocaml - lib
flags = " -w a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
type t = {
mutable a : int;
};;
let x = {a = 7} in
x.a <- 11;
if x.a <> 11 then raise Not_found;
x
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 CONSTINT 7
11 MAKEBLOCK1 0
13 PUSHCONSTINT 11
15 PUSHACC1
16 SETFIELD0
17 CONSTINT 11
19 PUSHACC1
20 GETFIELD0
21 NEQ
22 BRANCHIFNOT 29
24 Not_found
26 MAKEBLOCK1 0
28 RAISE
29 ACC0
30 POP 1
32 ATOM0
33 SETGLOBAL T210 - setfield0
35 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 CONSTINT 7
11 MAKEBLOCK1 0
13 PUSHCONSTINT 11
15 PUSHACC1
16 SETFIELD0
17 CONSTINT 11
19 PUSHACC1
20 GETFIELD0
21 NEQ
22 BRANCHIFNOT 29
24 GETGLOBAL Not_found
26 MAKEBLOCK1 0
28 RAISE
29 ACC0
30 POP 1
32 ATOM0
33 SETGLOBAL T210-setfield0
35 STOP
**)
|
|
85cc2c9d27eb6ff0aefc8b840ebba9a06c8e28e6dd838b21c561cab11c3b3fd0 | pmatos/racket-binaryen | common.rkt | #lang typed/racket/base
;; ---------------------------------------------------------------------------------------------------
(provide fits-int? fits-uint?)
;; ---------------------------------------------------------------------------------------------------
(: fits-int? (-> Integer Integer Boolean))
(define (fits-int? n bits)
(<= (expt 2 (- bits 1))
n
(- (expt 2 (- bits 1)) 1)))
(: fits-uint? (-> Integer Integer Boolean))
(define (fits-uint? n bits)
(<= 0
n
(- (expt 2 bits) 1)))
| null | https://raw.githubusercontent.com/pmatos/racket-binaryen/3535a9400b20f5e64d799bb70916d85466802e6d/common.rkt | racket | ---------------------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------------------- | #lang typed/racket/base
(provide fits-int? fits-uint?)
(: fits-int? (-> Integer Integer Boolean))
(define (fits-int? n bits)
(<= (expt 2 (- bits 1))
n
(- (expt 2 (- bits 1)) 1)))
(: fits-uint? (-> Integer Integer Boolean))
(define (fits-uint? n bits)
(<= 0
n
(- (expt 2 bits) 1)))
|
a9415545b21a8f13beb2b94fb9c3742a374079f395d67d04c0aa690e000fbe8f | Zulu-Inuoe/clution | keep-alive-stream.lisp | (in-package :cl-user)
(defpackage dexador.keep-alive-stream
(:use :cl)
(:import-from :trivial-gray-streams
:fundamental-input-stream
:stream-read-byte
:stream-read-sequence
:stream-element-type
:open-stream-p)
(:import-from :alexandria
:xor)
(:export :make-keep-alive-stream
:keep-alive-stream
:keep-alive-chunked-stream))
(in-package :dexador.keep-alive-stream)
(defclass keep-alive-stream (fundamental-input-stream)
((stream :type stream
:initarg :stream
:initform (error ":stream is required")
:accessor keep-alive-stream-stream)
(end :initarg :end
:initform nil
:accessor keep-alive-stream-end)))
(defclass keep-alive-chunked-stream (keep-alive-stream)
((state :type fixnum
:initarg :state
:initform -1)))
(defun make-keep-alive-stream (stream &key end chunked)
(assert (xor end chunked))
(if chunked
(make-instance 'keep-alive-chunked-stream :stream stream)
(make-instance 'keep-alive-stream :stream stream :end end)))
(defmethod stream-read-byte ((stream keep-alive-chunked-stream))
(block nil
(when (= (slot-value stream 'state) 3)
(return :eof))
(let ((byte (read-byte (keep-alive-stream-stream stream) nil nil)))
(unless byte
(return :eof))
(with-slots (state) stream
(ecase state
(-1
(when (= byte (char-code #\Return))
(setf state 0)))
;; Read CR
(0
(if (= byte (char-code #\Newline))
(setf state 1)
(setf state -1)))
;; Read CRLF
(1
(if (= byte (char-code #\Return))
(setf state 2)
(setf state -1)))
;; Read CRLFCR
(2
(if (= byte (char-code #\Newline))
(setf state 3)
(setf state -1)))))
(return byte))))
(defmethod stream-read-sequence ((stream keep-alive-stream) sequence start end &key)
(declare (optimize speed))
(let* ((to-read (min (- end start) (keep-alive-stream-end stream)))
(n (read-sequence sequence (keep-alive-stream-stream stream)
:start start
:end (+ start to-read))))
(decf (keep-alive-stream-end stream) (- n start))
n))
(defmethod stream-read-sequence ((stream keep-alive-chunked-stream) sequence start end &key)
(declare (optimize speed))
(loop for i from start below end
for byte = (read-byte stream nil nil)
if byte
do (setf (aref sequence i) byte)
else
do (return (max 0 (1- i)))
finally (return i)))
(defmethod stream-element-type ((stream keep-alive-stream))
'(unsigned-byte 8))
(defmethod open-stream-p ((stream keep-alive-stream))
(open-stream-p (keep-alive-stream-stream stream)))
(defmethod close ((stream keep-alive-stream) &key abort)
(with-slots (stream) stream
(when (open-stream-p stream)
(close stream :abort abort))))
| null | https://raw.githubusercontent.com/Zulu-Inuoe/clution/b72f7afe5f770ff68a066184a389c23551863f7f/cl-clution/qlfile-libs/dexador-20171130-git/src/keep-alive-stream.lisp | lisp | Read CR
Read CRLF
Read CRLFCR | (in-package :cl-user)
(defpackage dexador.keep-alive-stream
(:use :cl)
(:import-from :trivial-gray-streams
:fundamental-input-stream
:stream-read-byte
:stream-read-sequence
:stream-element-type
:open-stream-p)
(:import-from :alexandria
:xor)
(:export :make-keep-alive-stream
:keep-alive-stream
:keep-alive-chunked-stream))
(in-package :dexador.keep-alive-stream)
(defclass keep-alive-stream (fundamental-input-stream)
((stream :type stream
:initarg :stream
:initform (error ":stream is required")
:accessor keep-alive-stream-stream)
(end :initarg :end
:initform nil
:accessor keep-alive-stream-end)))
(defclass keep-alive-chunked-stream (keep-alive-stream)
((state :type fixnum
:initarg :state
:initform -1)))
(defun make-keep-alive-stream (stream &key end chunked)
(assert (xor end chunked))
(if chunked
(make-instance 'keep-alive-chunked-stream :stream stream)
(make-instance 'keep-alive-stream :stream stream :end end)))
(defmethod stream-read-byte ((stream keep-alive-chunked-stream))
(block nil
(when (= (slot-value stream 'state) 3)
(return :eof))
(let ((byte (read-byte (keep-alive-stream-stream stream) nil nil)))
(unless byte
(return :eof))
(with-slots (state) stream
(ecase state
(-1
(when (= byte (char-code #\Return))
(setf state 0)))
(0
(if (= byte (char-code #\Newline))
(setf state 1)
(setf state -1)))
(1
(if (= byte (char-code #\Return))
(setf state 2)
(setf state -1)))
(2
(if (= byte (char-code #\Newline))
(setf state 3)
(setf state -1)))))
(return byte))))
(defmethod stream-read-sequence ((stream keep-alive-stream) sequence start end &key)
(declare (optimize speed))
(let* ((to-read (min (- end start) (keep-alive-stream-end stream)))
(n (read-sequence sequence (keep-alive-stream-stream stream)
:start start
:end (+ start to-read))))
(decf (keep-alive-stream-end stream) (- n start))
n))
(defmethod stream-read-sequence ((stream keep-alive-chunked-stream) sequence start end &key)
(declare (optimize speed))
(loop for i from start below end
for byte = (read-byte stream nil nil)
if byte
do (setf (aref sequence i) byte)
else
do (return (max 0 (1- i)))
finally (return i)))
(defmethod stream-element-type ((stream keep-alive-stream))
'(unsigned-byte 8))
(defmethod open-stream-p ((stream keep-alive-stream))
(open-stream-p (keep-alive-stream-stream stream)))
(defmethod close ((stream keep-alive-stream) &key abort)
(with-slots (stream) stream
(when (open-stream-p stream)
(close stream :abort abort))))
|
4cc9190d1325f06cfa28886ff4bb855defb620739a85acbe1f3d6581b5898bc4 | kelamg/HtDP2e-workthrough | ex125.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex125) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct oops []) ; legal
(define-struct child [parents dob date]) ; legal
(define-struct (child person) [dob date]) ; illegal
;; (define-struct oops []) is legal because it follows the syntax rules for
;; a structure definition. Even though it has no variables in the enclosing
;; parenthesis, it is still syntactically legal.
( define - struct child [ parents date ] ) is legal because it uses the
define - struct keyword , followed by a variable name , and a sequence of 3
;; variable names enclosed within square brackets. Square brackets or
parentheses can be used here legally in
( define - struct ( child person ) [ dob date ] ) is illegal because it
;; defines a function call in place of the strcture's variable name | null | https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Fixed-size-Data/ex125.rkt | racket | about the language level of this file in a form that our tools can easily process.
legal
legal
illegal
(define-struct oops []) is legal because it follows the syntax rules for
a structure definition. Even though it has no variables in the enclosing
parenthesis, it is still syntactically legal.
variable names enclosed within square brackets. Square brackets or
defines a function call in place of the strcture's variable name | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex125) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
( define - struct child [ parents date ] ) is legal because it uses the
define - struct keyword , followed by a variable name , and a sequence of 3
parentheses can be used here legally in
( define - struct ( child person ) [ dob date ] ) is illegal because it |
3f8377ca558cf164b46da3b4faec20c9132bd8c5d76c9c11b33630d3d9ee623b | janestreet/async_kernel | throttled.ml | open! Core
open! Import
open! Deferred_std
module Deferred = Deferred1
module Counting_semaphore : sig
type t
val wait_to_acquire_job_token : t -> unit Deferred.t
val release_job_token : t -> unit
val abort : t -> unit
val create : max_concurrent_jobs:int -> t
end = struct
type t =
{ mutable max_concurrent_jobs : int
; mutable waiter : unit Ivar.t option
; mutable aborted : bool
}
let wait_to_acquire_job_token ({ max_concurrent_jobs; waiter; aborted } as t) =
match aborted with
| true -> Deferred.never ()
| false ->
if max_concurrent_jobs > 0
then (
t.max_concurrent_jobs <- max_concurrent_jobs - 1;
Deferred.return ())
else (
assert (Option.is_none waiter);
let ivar = Ivar.create () in
t.waiter <- Some ivar;
Ivar.read ivar)
;;
let release_job_token ({ max_concurrent_jobs; waiter; aborted = _ } as t) =
match waiter with
| Some ivar ->
Ivar.fill ivar ();
t.waiter <- None
| None -> t.max_concurrent_jobs <- max_concurrent_jobs + 1
;;
let abort t =
t.aborted <- true;
t.waiter <- None
;;
let create ~max_concurrent_jobs =
{ max_concurrent_jobs; waiter = None; aborted = false }
;;
end
module T = struct
type 'a t =
{ compute :
Execution_context.t -> Counting_semaphore.t -> ('a Deferred.t -> unit) -> unit
}
[@@unboxed]
let return x = { compute = (fun _ _ k -> k (return x)) }
let map =
`Custom
(fun t ~f ->
{ compute =
(fun exec_ctx semaphore k ->
t.compute exec_ctx semaphore (fun d ->
k
(let%map result = d in
f result)))
})
;;
let apply t_f t =
{ compute =
(fun exec_ctx semaphore k ->
t_f.compute exec_ctx semaphore (fun df ->
t.compute exec_ctx semaphore (fun dv ->
k
(let%bind f = df in
let%map v = dv in
f v))))
}
;;
end
include T
include Applicative.Make (T)
let enqueue' scheduler ctx f =
let ivar = Ivar.create () in
Scheduler.enqueue scheduler ctx (fun () -> upon (f ()) (Ivar.fill ivar)) ();
Ivar.read ivar
;;
let job f =
{ compute =
(fun exec_ctx semaphore k ->
Deferred.upon (Counting_semaphore.wait_to_acquire_job_token semaphore) (fun () ->
k
(enqueue' (Scheduler.t ()) exec_ctx (fun () ->
let%map a = f () in
Counting_semaphore.release_job_token semaphore;
a))))
}
;;
let run t ~max_concurrent_jobs =
let semaphore = Counting_semaphore.create ~max_concurrent_jobs in
The name is set to the empty string in order to prevent [ Monitor.send_exn ]
from appending information about this monitor to the exceptions we forward .
This matters because we want behavior to [ Throttle ] and not break
existing tests .
from appending information about this monitor to the exceptions we forward.
This matters because we want simliar behavior to [Throttle] and not break
existing tests. *)
let monitor = Monitor.create ~name:"" () in
let parent_monitor = Monitor.current () in
Monitor.detach_and_iter_errors monitor ~f:(fun err ->
Counting_semaphore.abort semaphore;
Monitor.send_exn parent_monitor err);
let exec_ctx =
Execution_context.create_like
~monitor
(Scheduler.current_execution_context (Scheduler.t ()))
in
let ivar = Ivar.create () in
t.compute exec_ctx semaphore (fun r -> Deferred.upon r (Ivar.fill ivar));
Ivar.read ivar
;;
let of_thunk thunk =
{ compute =
(fun exec_ctx semaphore k ->
let t = thunk () in
t.compute exec_ctx semaphore k)
}
;;
let ( *> ) t1 t2 =
{ compute =
(fun exec_ctx semaphore k ->
t1.compute exec_ctx semaphore (fun d1 ->
t2.compute exec_ctx semaphore (fun d2 ->
k
(let%bind () = d1 in
d2))))
}
;;
let both_unit = ( *> )
| null | https://raw.githubusercontent.com/janestreet/async_kernel/5807f6d4ef415408e8ec5afe74cdff5d27f277d4/src/throttled.ml | ocaml | open! Core
open! Import
open! Deferred_std
module Deferred = Deferred1
module Counting_semaphore : sig
type t
val wait_to_acquire_job_token : t -> unit Deferred.t
val release_job_token : t -> unit
val abort : t -> unit
val create : max_concurrent_jobs:int -> t
end = struct
type t =
{ mutable max_concurrent_jobs : int
; mutable waiter : unit Ivar.t option
; mutable aborted : bool
}
let wait_to_acquire_job_token ({ max_concurrent_jobs; waiter; aborted } as t) =
match aborted with
| true -> Deferred.never ()
| false ->
if max_concurrent_jobs > 0
then (
t.max_concurrent_jobs <- max_concurrent_jobs - 1;
Deferred.return ())
else (
assert (Option.is_none waiter);
let ivar = Ivar.create () in
t.waiter <- Some ivar;
Ivar.read ivar)
;;
let release_job_token ({ max_concurrent_jobs; waiter; aborted = _ } as t) =
match waiter with
| Some ivar ->
Ivar.fill ivar ();
t.waiter <- None
| None -> t.max_concurrent_jobs <- max_concurrent_jobs + 1
;;
let abort t =
t.aborted <- true;
t.waiter <- None
;;
let create ~max_concurrent_jobs =
{ max_concurrent_jobs; waiter = None; aborted = false }
;;
end
module T = struct
type 'a t =
{ compute :
Execution_context.t -> Counting_semaphore.t -> ('a Deferred.t -> unit) -> unit
}
[@@unboxed]
let return x = { compute = (fun _ _ k -> k (return x)) }
let map =
`Custom
(fun t ~f ->
{ compute =
(fun exec_ctx semaphore k ->
t.compute exec_ctx semaphore (fun d ->
k
(let%map result = d in
f result)))
})
;;
let apply t_f t =
{ compute =
(fun exec_ctx semaphore k ->
t_f.compute exec_ctx semaphore (fun df ->
t.compute exec_ctx semaphore (fun dv ->
k
(let%bind f = df in
let%map v = dv in
f v))))
}
;;
end
include T
include Applicative.Make (T)
let enqueue' scheduler ctx f =
let ivar = Ivar.create () in
Scheduler.enqueue scheduler ctx (fun () -> upon (f ()) (Ivar.fill ivar)) ();
Ivar.read ivar
;;
let job f =
{ compute =
(fun exec_ctx semaphore k ->
Deferred.upon (Counting_semaphore.wait_to_acquire_job_token semaphore) (fun () ->
k
(enqueue' (Scheduler.t ()) exec_ctx (fun () ->
let%map a = f () in
Counting_semaphore.release_job_token semaphore;
a))))
}
;;
let run t ~max_concurrent_jobs =
let semaphore = Counting_semaphore.create ~max_concurrent_jobs in
The name is set to the empty string in order to prevent [ Monitor.send_exn ]
from appending information about this monitor to the exceptions we forward .
This matters because we want behavior to [ Throttle ] and not break
existing tests .
from appending information about this monitor to the exceptions we forward.
This matters because we want simliar behavior to [Throttle] and not break
existing tests. *)
let monitor = Monitor.create ~name:"" () in
let parent_monitor = Monitor.current () in
Monitor.detach_and_iter_errors monitor ~f:(fun err ->
Counting_semaphore.abort semaphore;
Monitor.send_exn parent_monitor err);
let exec_ctx =
Execution_context.create_like
~monitor
(Scheduler.current_execution_context (Scheduler.t ()))
in
let ivar = Ivar.create () in
t.compute exec_ctx semaphore (fun r -> Deferred.upon r (Ivar.fill ivar));
Ivar.read ivar
;;
let of_thunk thunk =
{ compute =
(fun exec_ctx semaphore k ->
let t = thunk () in
t.compute exec_ctx semaphore k)
}
;;
let ( *> ) t1 t2 =
{ compute =
(fun exec_ctx semaphore k ->
t1.compute exec_ctx semaphore (fun d1 ->
t2.compute exec_ctx semaphore (fun d2 ->
k
(let%bind () = d1 in
d2))))
}
;;
let both_unit = ( *> )
|
|
e1db7f317e41f846f5254f4a3ac45992071d9743308599d6ffebcc688f6aae9c | kdltr/chicken-core | callback-tests.scm | ;;;; callback-tests.scm
(import (only (chicken process-context) command-line-arguments))
(define k1)
(define-external (foo) void
(call/cc
(lambda (k) (set! k1 k)))
(print "hi!"))
#>
extern void foo();
static void bar() { foo(); }
<#
(print "callbacks ...")
((foreign-safe-lambda void "bar"))
(when (member "twice" (command-line-arguments))
(k1 #f))
| null | https://raw.githubusercontent.com/kdltr/chicken-core/b2e6c5243dd469064bec947cb3b49dafaa1514e5/tests/callback-tests.scm | scheme | callback-tests.scm
} |
(import (only (chicken process-context) command-line-arguments))
(define k1)
(define-external (foo) void
(call/cc
(lambda (k) (set! k1 k)))
(print "hi!"))
#>
<#
(print "callbacks ...")
((foreign-safe-lambda void "bar"))
(when (member "twice" (command-line-arguments))
(k1 #f))
|
6e4afab1626e142cde50d92a724fe244e2ff5946dd6bb03be78539a44bfa899e | denisidoro/rosebud | resolvers.clj | (ns rosebud.resolvers
(:require [rosebud.components.bucket.protocols.provider :as p.bucket]
[rosebud.logic.investment.core :as l.investment]))
(defn yielding-cdi-investments
[{{:keys [bucket]} :components}]
(->> (p.bucket/get-investments bucket)
l.investment/fixed-income-yielding
(map l.investment/as-tabular)))
(def queries
{:cdi/yielding yielding-cdi-investments})
(def tags
{:buckets [:a :b :c]})
| null | https://raw.githubusercontent.com/denisidoro/rosebud/90385528d9a75a0e17803df487a4f6cfb87e981c/server/src/rosebud/resolvers.clj | clojure | (ns rosebud.resolvers
(:require [rosebud.components.bucket.protocols.provider :as p.bucket]
[rosebud.logic.investment.core :as l.investment]))
(defn yielding-cdi-investments
[{{:keys [bucket]} :components}]
(->> (p.bucket/get-investments bucket)
l.investment/fixed-income-yielding
(map l.investment/as-tabular)))
(def queries
{:cdi/yielding yielding-cdi-investments})
(def tags
{:buckets [:a :b :c]})
|
|
b8da4ffcf56ad7f86f4377f7496eea2386856d66b446455be4ea00415af522ce | erlangbureau/jamdb_oracle | jamdb_oracle_crypt.erl | -module(jamdb_oracle_crypt).
%% API
-export([generate/1]).
-export([validate/1]).
-include("jamdb_oracle.hrl").
%% API
o3logon(#logon{auth = Sess , key = KeySess , password = Pass } ) - >
% IVec = <<0:64>>,
block_decrypt(des_cbc , binary : part(KeySess,0,8 ) , IVec , ) ,
N = ( 8 - ( length(Pass ) rem 8 ) ) rem 8 ,
= < < ( list_to_binary(Pass))/binary , ( binary : copy(<<0 > > , N))/binary > > ,
block_encrypt(des_cbc , binary : part(SrvSess,0,8 ) , IVec , CliPass ) ,
# logon{password = hexify(AuthPass)++[N ] } .
o5logon(#logon{auth=Sess, user=User, password=Pass, bits=128} = Logon) ->
IVec = <<0:64>>,
CliPass = norm(User++Pass),
B1 = block_encrypt(des_cbc, unhex("0123456789ABCDEF"), IVec, CliPass),
B2 = block_encrypt(des_cbc, binary:part(B1,byte_size(B1),-8), IVec, CliPass),
KeySess = <<(binary:part(B2,byte_size(B2),-8))/binary,0:64>>,
generate(Logon#logon{auth=unhex(Sess), key=KeySess});
o5logon(#logon{auth=Sess, salt=Salt, password=Pass, bits=192} = Logon) ->
Data = crypto:hash(sha,<<(list_to_binary(Pass))/binary,(unhex(Salt))/binary>>),
KeySess = <<Data/binary,0:32>>,
generate(Logon#logon{auth=unhex(Sess), key=KeySess});
o5logon(#logon{auth=Sess, salt=Salt, password=Pass, bits=256} = Logon) ->
Data = pbkdf2(sha512, 64, 4096, 64, Pass, <<(unhex(Salt))/binary,"AUTH_PBKDF2_SPEEDY_KEY">>),
KeySess = binary:part(crypto:hash(sha512, <<Data/binary, (unhex(Salt))/binary>>),0,32),
generate(Logon#logon{auth=unhex(Sess), key=KeySess,
der_key = <<(crypto:strong_rand_bytes(16))/binary, Data/binary>>}).
generate(#logon{type=Type,bits=undefined} = Logon) ->
Bits =
case Type of
2361 -> 128;
6949 -> 192;
18453 -> 256
end,
o5logon(Logon#logon{bits=Bits});
generate(#logon{auth=Sess, key=KeySess, der_salt=DerivedSalt, der_key=DerivedKey,
password=Pass, newpassword=NewPass, bits=Bits} = Logon) ->
IVec = <<0:128>>,
Cipher = cipher(Bits),
SrvSess = block_decrypt(Cipher, KeySess, IVec, Sess),
CliSess =
case binary:match(SrvSess,pad(8, <<>>)) of
{40,8} -> pad(8, crypto:strong_rand_bytes(40));
_ -> crypto:strong_rand_bytes(byte_size(SrvSess))
end,
AuthSess = block_encrypt(Cipher, KeySess, IVec, CliSess),
CatKey = cat_key(SrvSess, CliSess, DerivedSalt, Bits),
KeyConn = conn_key(CatKey, DerivedSalt, Bits),
AuthPass = block_encrypt(Cipher, KeyConn, IVec, pad(Pass)),
AuthNewPass =
case NewPass of
[] -> <<>>;
_ -> block_encrypt(Cipher, KeyConn, IVec, pad(NewPass))
end,
SpeedyKey =
case DerivedKey of
undefined -> <<>>;
_ -> block_encrypt(Cipher, KeyConn, IVec, DerivedKey)
end,
Logon#logon{auth=list_to_binary(hexify(AuthSess)), key=KeyConn, speedy_key=hexify(SpeedyKey),
password=hexify(AuthPass), newpassword=hexify(AuthNewPass)}.
validate(#logon{auth=Resp, key=KeyConn}) ->
IVec = <<0:128>>,
Cipher = cipher(byte_size(KeyConn) * 8),
Data = block_decrypt(Cipher, KeyConn, IVec, unhex(Resp)),
case binary:match(Data,<<"SERVER_TO_CLIENT">>) of
nomatch -> error;
_ -> ok
end.
%% internal
cipher(128) -> aes_128_cbc;
cipher(192) -> aes_192_cbc;
cipher(256) -> aes_256_cbc.
conn_key(Key, undefined, 128) ->
<<(erlang:md5(Key))/binary>>;
conn_key(Key, undefined, 192) ->
<<(erlang:md5(binary:part(Key,0,16)))/binary,
(binary:part(erlang:md5(binary:part(Key,16,8)),0,8))/binary>>;
conn_key(Key, DerivedSalt, Bits) ->
pbkdf2(sha512, 64, 3, Bits div 8, hexify(Key), unhex(DerivedSalt)).
cat_key(Key, Key2, undefined, Bits) ->
cat_key(binary:part(Key, 16, Bits div 8),binary:part(Key2, 16, Bits div 8),[]);
cat_key(Key, Key2, _DerivedSalt, Bits) ->
<<(binary:part(Key2, 0, Bits div 8))/binary,(binary:part(Key, 0, Bits div 8))/binary>>.
cat_key(<<>>,<<>>,S) ->
list_to_binary(S);
cat_key(<<A, Rest/bits>>,<<B, Rest2/bits>>,S) ->
cat_key(Rest,Rest2,S++[A bxor B]).
norm(Data) ->
Bin = norm(list_to_binary(Data),[]),
N = (8 - (byte_size(Bin) rem 8 )) rem 8,
<<Bin/binary, (binary:copy(<<0>>, N))/binary>>.
norm(<<>>,S) ->
list_to_binary(S);
norm(<<A/utf8, Rest/bits>>,S) ->
B = case A of
N when N > 255 -> 63;
N when N >= 97, N =< 122 -> N-32;
N -> N
end,
norm(Rest,S++[0,B]).
pad(S) ->
P = 16 - (length(S) rem 16),
<<(pad(16,<<>>))/binary, (pad(P,list_to_binary(S)))/binary>>.
pad(P, Bin) -> <<Bin/binary, (binary:copy(<<P>>, P))/binary>>.
unhex(S) ->
list_to_binary(unhex(S, [])).
unhex([], Acc) ->
lists:reverse(Acc);
unhex([A, B | S], Acc) ->
unhex(S, [list_to_integer([A, B], 16) | Acc]).
hexify(Bin) ->
[hex_byte(B) || B <- binary_to_list(Bin)].
hex_byte(B) when B < 16 -> "0"++integer_to_list(B, 16);
hex_byte(B) -> integer_to_list(B, 16).
block_encrypt(Cipher, Key, Ivec, Data) ->
crypto:crypto_one_time(Cipher, Key, Ivec, Data, true).
block_decrypt(Cipher, Key, Ivec, Data) ->
crypto:crypto_one_time(Cipher, Key, Ivec, Data, false).
pbkdf2(Type, MacLength, Count, Length, Pass, Salt) ->
pubkey_pbe:pbdkdf2(Pass, Salt, Count, Length, fun pbdkdf2_hmac/4, Type, MacLength).
pbdkdf2_hmac(Type, Key, Data, MacLength) ->
crypto:macN(hmac, Type, Key, Data, MacLength).
| null | https://raw.githubusercontent.com/erlangbureau/jamdb_oracle/bc9b7c8701b3ef517a23a93e34a028139da0c67b/src/jamdb_oracle_crypt.erl | erlang | API
API
IVec = <<0:64>>,
internal | -module(jamdb_oracle_crypt).
-export([generate/1]).
-export([validate/1]).
-include("jamdb_oracle.hrl").
o3logon(#logon{auth = Sess , key = KeySess , password = Pass } ) - >
block_decrypt(des_cbc , binary : part(KeySess,0,8 ) , IVec , ) ,
N = ( 8 - ( length(Pass ) rem 8 ) ) rem 8 ,
= < < ( list_to_binary(Pass))/binary , ( binary : copy(<<0 > > , N))/binary > > ,
block_encrypt(des_cbc , binary : part(SrvSess,0,8 ) , IVec , CliPass ) ,
# logon{password = hexify(AuthPass)++[N ] } .
o5logon(#logon{auth=Sess, user=User, password=Pass, bits=128} = Logon) ->
IVec = <<0:64>>,
CliPass = norm(User++Pass),
B1 = block_encrypt(des_cbc, unhex("0123456789ABCDEF"), IVec, CliPass),
B2 = block_encrypt(des_cbc, binary:part(B1,byte_size(B1),-8), IVec, CliPass),
KeySess = <<(binary:part(B2,byte_size(B2),-8))/binary,0:64>>,
generate(Logon#logon{auth=unhex(Sess), key=KeySess});
o5logon(#logon{auth=Sess, salt=Salt, password=Pass, bits=192} = Logon) ->
Data = crypto:hash(sha,<<(list_to_binary(Pass))/binary,(unhex(Salt))/binary>>),
KeySess = <<Data/binary,0:32>>,
generate(Logon#logon{auth=unhex(Sess), key=KeySess});
o5logon(#logon{auth=Sess, salt=Salt, password=Pass, bits=256} = Logon) ->
Data = pbkdf2(sha512, 64, 4096, 64, Pass, <<(unhex(Salt))/binary,"AUTH_PBKDF2_SPEEDY_KEY">>),
KeySess = binary:part(crypto:hash(sha512, <<Data/binary, (unhex(Salt))/binary>>),0,32),
generate(Logon#logon{auth=unhex(Sess), key=KeySess,
der_key = <<(crypto:strong_rand_bytes(16))/binary, Data/binary>>}).
generate(#logon{type=Type,bits=undefined} = Logon) ->
Bits =
case Type of
2361 -> 128;
6949 -> 192;
18453 -> 256
end,
o5logon(Logon#logon{bits=Bits});
generate(#logon{auth=Sess, key=KeySess, der_salt=DerivedSalt, der_key=DerivedKey,
password=Pass, newpassword=NewPass, bits=Bits} = Logon) ->
IVec = <<0:128>>,
Cipher = cipher(Bits),
SrvSess = block_decrypt(Cipher, KeySess, IVec, Sess),
CliSess =
case binary:match(SrvSess,pad(8, <<>>)) of
{40,8} -> pad(8, crypto:strong_rand_bytes(40));
_ -> crypto:strong_rand_bytes(byte_size(SrvSess))
end,
AuthSess = block_encrypt(Cipher, KeySess, IVec, CliSess),
CatKey = cat_key(SrvSess, CliSess, DerivedSalt, Bits),
KeyConn = conn_key(CatKey, DerivedSalt, Bits),
AuthPass = block_encrypt(Cipher, KeyConn, IVec, pad(Pass)),
AuthNewPass =
case NewPass of
[] -> <<>>;
_ -> block_encrypt(Cipher, KeyConn, IVec, pad(NewPass))
end,
SpeedyKey =
case DerivedKey of
undefined -> <<>>;
_ -> block_encrypt(Cipher, KeyConn, IVec, DerivedKey)
end,
Logon#logon{auth=list_to_binary(hexify(AuthSess)), key=KeyConn, speedy_key=hexify(SpeedyKey),
password=hexify(AuthPass), newpassword=hexify(AuthNewPass)}.
validate(#logon{auth=Resp, key=KeyConn}) ->
IVec = <<0:128>>,
Cipher = cipher(byte_size(KeyConn) * 8),
Data = block_decrypt(Cipher, KeyConn, IVec, unhex(Resp)),
case binary:match(Data,<<"SERVER_TO_CLIENT">>) of
nomatch -> error;
_ -> ok
end.
cipher(128) -> aes_128_cbc;
cipher(192) -> aes_192_cbc;
cipher(256) -> aes_256_cbc.
conn_key(Key, undefined, 128) ->
<<(erlang:md5(Key))/binary>>;
conn_key(Key, undefined, 192) ->
<<(erlang:md5(binary:part(Key,0,16)))/binary,
(binary:part(erlang:md5(binary:part(Key,16,8)),0,8))/binary>>;
conn_key(Key, DerivedSalt, Bits) ->
pbkdf2(sha512, 64, 3, Bits div 8, hexify(Key), unhex(DerivedSalt)).
cat_key(Key, Key2, undefined, Bits) ->
cat_key(binary:part(Key, 16, Bits div 8),binary:part(Key2, 16, Bits div 8),[]);
cat_key(Key, Key2, _DerivedSalt, Bits) ->
<<(binary:part(Key2, 0, Bits div 8))/binary,(binary:part(Key, 0, Bits div 8))/binary>>.
cat_key(<<>>,<<>>,S) ->
list_to_binary(S);
cat_key(<<A, Rest/bits>>,<<B, Rest2/bits>>,S) ->
cat_key(Rest,Rest2,S++[A bxor B]).
norm(Data) ->
Bin = norm(list_to_binary(Data),[]),
N = (8 - (byte_size(Bin) rem 8 )) rem 8,
<<Bin/binary, (binary:copy(<<0>>, N))/binary>>.
norm(<<>>,S) ->
list_to_binary(S);
norm(<<A/utf8, Rest/bits>>,S) ->
B = case A of
N when N > 255 -> 63;
N when N >= 97, N =< 122 -> N-32;
N -> N
end,
norm(Rest,S++[0,B]).
pad(S) ->
P = 16 - (length(S) rem 16),
<<(pad(16,<<>>))/binary, (pad(P,list_to_binary(S)))/binary>>.
pad(P, Bin) -> <<Bin/binary, (binary:copy(<<P>>, P))/binary>>.
unhex(S) ->
list_to_binary(unhex(S, [])).
unhex([], Acc) ->
lists:reverse(Acc);
unhex([A, B | S], Acc) ->
unhex(S, [list_to_integer([A, B], 16) | Acc]).
hexify(Bin) ->
[hex_byte(B) || B <- binary_to_list(Bin)].
hex_byte(B) when B < 16 -> "0"++integer_to_list(B, 16);
hex_byte(B) -> integer_to_list(B, 16).
block_encrypt(Cipher, Key, Ivec, Data) ->
crypto:crypto_one_time(Cipher, Key, Ivec, Data, true).
block_decrypt(Cipher, Key, Ivec, Data) ->
crypto:crypto_one_time(Cipher, Key, Ivec, Data, false).
pbkdf2(Type, MacLength, Count, Length, Pass, Salt) ->
pubkey_pbe:pbdkdf2(Pass, Salt, Count, Length, fun pbdkdf2_hmac/4, Type, MacLength).
pbdkdf2_hmac(Type, Key, Data, MacLength) ->
crypto:macN(hmac, Type, Key, Data, MacLength).
|
87372fc01a61421426cebcddc337f95c0986d12eaf9fa781b4e36c91fba1590e | inaka/sumo_db | blog.erl | %%% @doc Main module for the blog example.
%%%
Copyright 2012 Inaka & lt;> ;
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%% @end
< >
%%%
-module(blog).
-author("Marcelo Gornstein <>").
-github("").
-license("Apache License 2.0").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Exports.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Posts API.
-export([
total_posts/0, new_post/3, save_post/1, del_post/0, del_post/1, find_post/1
]).
%%% Author API.
-export([
new_author/2, save_author/1, del_author/0, del_author/1, del_author_by_name/1,
find_author/1, find_all_authors/2, find_authors_by_name/3
]).
%%% Reader API.
-export([new_reader/2, save_reader/1, del_reader/0, find_reader/1]).
%%% Vote API.
-export([new_vote/2]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Code starts here.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Paginates all authors, sorts by name.
-spec find_all_authors(non_neg_integer(), non_neg_integer()) -> [blog_author:author()].
find_all_authors(Limit, Offset) ->
sumo:find_all(author, [], Limit, Offset).
-spec find_authors_by_name(string(), non_neg_integer(), non_neg_integer()) -> [blog_author:author()].
find_authors_by_name(Name, Limit, Offset) ->
sumo:find_by(author, [{name, Name}], Limit, Offset).
%% @doc Finds a post given the id.
-spec find_post(blog_post:id()) -> blog_post:post()|notfound.
find_post(Id) ->
sumo:find(post, Id).
%% @doc Finds an author, given the id.
-spec find_author(blog_author:id()) -> blog_author:author()|notfound.
find_author(Id) ->
sumo:find(author, Id).
%% @doc Find a reader, given the id.
-spec find_reader(blog_reader:id()) -> blog_reader:reader()|notfound.
find_reader(Id) ->
sumo:find(reader, Id).
%% @doc Returns all available posts.
-spec total_posts() -> non_neg_integer().
total_posts() ->
sumo:call(post, total_posts).
%% @doc Deletes all authors.
-spec del_author() -> non_neg_integer().
del_author() ->
sumo:delete_all(author).
%% @doc Deletes all posts.
-spec del_post() -> non_neg_integer().
del_post() ->
sumo:delete_all(post).
%% @doc Deletes all readers.
-spec del_reader() -> non_neg_integer().
del_reader() ->
sumo:delete_all(reader).
%% @doc Deletes the given author.
-spec del_author_by_name(binary()) -> non_neg_integer().
del_author_by_name(Name) ->
sumo:delete_by(author, [{name, Name}]).
%% @doc Deletes the given author.
-spec del_author(blog_author:author()) -> boolean().
del_author(Author) ->
sumo:delete(author, blog_author:id(Author)).
%% @doc Deletes the given post.
-spec del_post(blog_post:post()) -> boolean().
del_post(Post) ->
sumo:delete(post, blog_post:id(Post)).
%% @doc Updates an author.
-spec save_author(blog_author:author()) -> ok.
save_author(Author) ->
sumo:persist(author, Author).
%% @doc Updates a post.
-spec save_post(blog_post:post()) -> ok.
save_post(Post) ->
sumo:persist(post, Post).
%% @doc Updates a reader.
-spec save_reader(blog_reader:reader()) -> ok.
save_reader(Reader) ->
sumo:persist(reader, Reader).
%% @doc Creates a new author.
-spec new_author(binary(), binary()) -> blog_author:author().
new_author(Name, Photo) ->
sumo:persist(author, blog_author:new(Name, Photo)).
%% @doc Creates a new post.
-spec new_post(string(), string(), string()) -> blog_post:post().
new_post(Title, Content, Author) ->
sumo:persist(
post, blog_post:new(Title, Content, blog_author:id(Author))
).
%% @doc Creates a new blog reader.
-spec new_reader(string(), string()) -> blog_reader:reader().
new_reader(Name, Email) ->
sumo:persist(reader, blog_reader:new(Name, Email)).
%% @doc Creates a new vote.
-spec new_vote(blog_reader:id(), blog_post:id()) -> blog_vote:vote().
new_vote(ReaderId, PostId) ->
sumo:persist(vote, blog_vote:new(ReaderId, PostId)).
| null | https://raw.githubusercontent.com/inaka/sumo_db/331ea718c13a01748a7739ad4078b0032f4d32e5/examples/blog/src/blog.erl | erlang | @doc Main module for the blog example.
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@end
Exports.
Posts API.
Author API.
Reader API.
Vote API.
Code starts here.
@doc Paginates all authors, sorts by name.
@doc Finds a post given the id.
@doc Finds an author, given the id.
@doc Find a reader, given the id.
@doc Returns all available posts.
@doc Deletes all authors.
@doc Deletes all posts.
@doc Deletes all readers.
@doc Deletes the given author.
@doc Deletes the given author.
@doc Deletes the given post.
@doc Updates an author.
@doc Updates a post.
@doc Updates a reader.
@doc Creates a new author.
@doc Creates a new post.
@doc Creates a new blog reader.
@doc Creates a new vote. | Copyright 2012 Inaka & lt;> ;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
< >
-module(blog).
-author("Marcelo Gornstein <>").
-github("").
-license("Apache License 2.0").
-export([
total_posts/0, new_post/3, save_post/1, del_post/0, del_post/1, find_post/1
]).
-export([
new_author/2, save_author/1, del_author/0, del_author/1, del_author_by_name/1,
find_author/1, find_all_authors/2, find_authors_by_name/3
]).
-export([new_reader/2, save_reader/1, del_reader/0, find_reader/1]).
-export([new_vote/2]).
-spec find_all_authors(non_neg_integer(), non_neg_integer()) -> [blog_author:author()].
find_all_authors(Limit, Offset) ->
sumo:find_all(author, [], Limit, Offset).
-spec find_authors_by_name(string(), non_neg_integer(), non_neg_integer()) -> [blog_author:author()].
find_authors_by_name(Name, Limit, Offset) ->
sumo:find_by(author, [{name, Name}], Limit, Offset).
-spec find_post(blog_post:id()) -> blog_post:post()|notfound.
find_post(Id) ->
sumo:find(post, Id).
-spec find_author(blog_author:id()) -> blog_author:author()|notfound.
find_author(Id) ->
sumo:find(author, Id).
-spec find_reader(blog_reader:id()) -> blog_reader:reader()|notfound.
find_reader(Id) ->
sumo:find(reader, Id).
-spec total_posts() -> non_neg_integer().
total_posts() ->
sumo:call(post, total_posts).
-spec del_author() -> non_neg_integer().
del_author() ->
sumo:delete_all(author).
-spec del_post() -> non_neg_integer().
del_post() ->
sumo:delete_all(post).
-spec del_reader() -> non_neg_integer().
del_reader() ->
sumo:delete_all(reader).
-spec del_author_by_name(binary()) -> non_neg_integer().
del_author_by_name(Name) ->
sumo:delete_by(author, [{name, Name}]).
-spec del_author(blog_author:author()) -> boolean().
del_author(Author) ->
sumo:delete(author, blog_author:id(Author)).
-spec del_post(blog_post:post()) -> boolean().
del_post(Post) ->
sumo:delete(post, blog_post:id(Post)).
-spec save_author(blog_author:author()) -> ok.
save_author(Author) ->
sumo:persist(author, Author).
-spec save_post(blog_post:post()) -> ok.
save_post(Post) ->
sumo:persist(post, Post).
-spec save_reader(blog_reader:reader()) -> ok.
save_reader(Reader) ->
sumo:persist(reader, Reader).
-spec new_author(binary(), binary()) -> blog_author:author().
new_author(Name, Photo) ->
sumo:persist(author, blog_author:new(Name, Photo)).
-spec new_post(string(), string(), string()) -> blog_post:post().
new_post(Title, Content, Author) ->
sumo:persist(
post, blog_post:new(Title, Content, blog_author:id(Author))
).
-spec new_reader(string(), string()) -> blog_reader:reader().
new_reader(Name, Email) ->
sumo:persist(reader, blog_reader:new(Name, Email)).
-spec new_vote(blog_reader:id(), blog_post:id()) -> blog_vote:vote().
new_vote(ReaderId, PostId) ->
sumo:persist(vote, blog_vote:new(ReaderId, PostId)).
|
572aeac98e9585b7ea015270c8d48e319d14783162ab7079b73ede593b8c5e86 | Chris00/ocaml-cairo | image_create.ml | open Printf
open Cairo
open Bigarray
let create() =
let data = Array1.create int8_unsigned c_layout 360_000 in
Gc.finalise (fun _ -> eprintf "DESTROY bigarray 'data'\n%!") data;
let surf = Image.create_for_data8 data Image.RGB24 ~w:300 ~h:300 in
Cairo.create surf
let () =
let cr = create() in
printf "With Cairo handle:\n%!";
set_source_rgb cr 1. 1. 1.;
rectangle cr 0. 0. ~w:300. ~h:300.;
fill cr;
Gc.compact(); Gc.compact();
set_source_rgb cr 1. 0. 0.;
move_to cr 10. 150.;
set_font_size cr 100.;
show_text cr "Hello";
Gc.compact(); Gc.compact();
eprintf "- Write image\n%!";
PNG.write (get_target cr) "test_image.png";
eprintf "- Finish surface\n%!";
Surface.finish (get_target cr);
Gc.compact()
(* Test for stride < 0 (not handled for now) and for incoherent width
/ stride *)
let () =
let mat = Array1.create int8_unsigned c_layout 80_000 in
let test_stride stride =
try
let surf = Image.create_for_data8 mat Image.A8 ~w:100 ~h:100 ~stride in
assert(Image.get_stride surf = stride)
with Error INVALID_STRIDE ->
assert(stride < 100)
in
test_stride 108;
test_stride 99;
test_stride 0;
test_stride (-108);
| null | https://raw.githubusercontent.com/Chris00/ocaml-cairo/202674a8d0c533b689ceacdb523ca167611e1b4c/tests/image_create.ml | ocaml | Test for stride < 0 (not handled for now) and for incoherent width
/ stride | open Printf
open Cairo
open Bigarray
let create() =
let data = Array1.create int8_unsigned c_layout 360_000 in
Gc.finalise (fun _ -> eprintf "DESTROY bigarray 'data'\n%!") data;
let surf = Image.create_for_data8 data Image.RGB24 ~w:300 ~h:300 in
Cairo.create surf
let () =
let cr = create() in
printf "With Cairo handle:\n%!";
set_source_rgb cr 1. 1. 1.;
rectangle cr 0. 0. ~w:300. ~h:300.;
fill cr;
Gc.compact(); Gc.compact();
set_source_rgb cr 1. 0. 0.;
move_to cr 10. 150.;
set_font_size cr 100.;
show_text cr "Hello";
Gc.compact(); Gc.compact();
eprintf "- Write image\n%!";
PNG.write (get_target cr) "test_image.png";
eprintf "- Finish surface\n%!";
Surface.finish (get_target cr);
Gc.compact()
let () =
let mat = Array1.create int8_unsigned c_layout 80_000 in
let test_stride stride =
try
let surf = Image.create_for_data8 mat Image.A8 ~w:100 ~h:100 ~stride in
assert(Image.get_stride surf = stride)
with Error INVALID_STRIDE ->
assert(stride < 100)
in
test_stride 108;
test_stride 99;
test_stride 0;
test_stride (-108);
|
025b6e1759ce1c84fe215d872413d5be5bcc3e79896525c14f5e7d5acda8386a | potapenko/playphraseme-site | permission.clj | (ns playphraseme.api.queries.user.permission
(:require [playphraseme.db.users-db :refer :all]))
(def coll "permissions")
(defn insert-permission!
"Inserts a single permission into the permission table"
[permission]
(when-not (get-doc coll {:permission permission})
(add-doc coll {:permission permission})))
(comment
(insert-permission! "admin")
(insert-permission! "basic"))
| null | https://raw.githubusercontent.com/potapenko/playphraseme-site/d50a62a6bc8f463e08365dca96b3a6e5dde4fb12/src/clj/playphraseme/api/queries/user/permission.clj | clojure | (ns playphraseme.api.queries.user.permission
(:require [playphraseme.db.users-db :refer :all]))
(def coll "permissions")
(defn insert-permission!
"Inserts a single permission into the permission table"
[permission]
(when-not (get-doc coll {:permission permission})
(add-doc coll {:permission permission})))
(comment
(insert-permission! "admin")
(insert-permission! "basic"))
|
|
7e17414c5679e30d3f14617d6e436a97326f67142a46eb930adb573b3ec0cebf | brawnski/git-annex | Bup.hs | Using bup as a remote .
-
- Copyright 2011 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2011 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Remote.Bup (remote) where
import qualified Data.ByteString.Lazy.Char8 as L
import System.IO
import System.IO.Error
import Control.Exception.Extensible (IOException)
import qualified Data.Map as M
import Control.Monad (when)
import Control.Monad.State (liftIO)
import System.Process
import System.Exit
import System.FilePath
import Data.Maybe
import Data.List.Utils
import System.Cmd.Utils
import Types
import Types.Remote
import qualified Git
import qualified Annex
import UUID
import Locations
import Config
import Utility
import Messages
import Remote.Ssh
import Remote.Special
import Remote.Encryptable
import Crypto
type BupRepo = String
remote :: RemoteType Annex
remote = RemoteType {
typename = "bup",
enumerate = findSpecialRemotes "buprepo",
generate = gen,
setup = bupSetup
}
gen :: Git.Repo -> UUID -> Maybe RemoteConfig -> Annex (Remote Annex)
gen r u c = do
buprepo <- getConfig r "buprepo" (error "missing buprepo")
cst <- remoteCost r (if bupLocal buprepo then semiCheapRemoteCost else expensiveRemoteCost)
bupr <- liftIO $ bup2GitRemote buprepo
(u', bupr') <- getBupUUID bupr u
return $ encryptableRemote c
(storeEncrypted r buprepo)
(retrieveEncrypted buprepo)
Remote {
uuid = u',
cost = cst,
name = Git.repoDescribe r,
storeKey = store r buprepo,
retrieveKeyFile = retrieve buprepo,
removeKey = remove,
hasKey = checkPresent r bupr',
hasKeyCheap = bupLocal buprepo,
config = c
}
bupSetup :: UUID -> RemoteConfig -> Annex RemoteConfig
bupSetup u c = do
-- verify configuration is sane
let buprepo = fromMaybe (error "Specify buprepo=") $
M.lookup "buprepo" c
c' <- encryptionSetup c
-- bup init will create the repository.
-- (If the repository already exists, bup init again appears safe.)
showAction "bup init"
bup "init" buprepo [] >>! error "bup init failed"
storeBupUUID u buprepo
-- The buprepo is stored in git config, as well as this repo's
-- persistant state, so it can vary between hosts.
gitConfigSpecialRemote u c' "buprepo" buprepo
return c'
bupParams :: String -> BupRepo -> [CommandParam] -> [CommandParam]
bupParams command buprepo params =
Param command : [Param "-r", Param buprepo] ++ params
bup :: String -> BupRepo -> [CommandParam] -> Annex Bool
bup command buprepo params = do
showOutput -- make way for bup output
liftIO $ boolSystem "bup" $ bupParams command buprepo params
pipeBup :: [CommandParam] -> Maybe Handle -> Maybe Handle -> IO Bool
pipeBup params inh outh = do
p <- runProcess "bup" (toCommand params)
Nothing Nothing inh outh Nothing
ok <- waitForProcess p
case ok of
ExitSuccess -> return True
_ -> return False
bupSplitParams :: Git.Repo -> BupRepo -> Key -> CommandParam -> Annex [CommandParam]
bupSplitParams r buprepo k src = do
o <- getConfig r "bup-split-options" ""
let os = map Param $ words o
showOutput -- make way for bup output
return $ bupParams "split" buprepo
(os ++ [Param "-n", Param (show k), src])
store :: Git.Repo -> BupRepo -> Key -> Annex Bool
store r buprepo k = do
g <- Annex.gitRepo
let src = gitAnnexLocation g k
params <- bupSplitParams r buprepo k (File src)
liftIO $ boolSystem "bup" params
storeEncrypted :: Git.Repo -> BupRepo -> (Cipher, Key) -> Key -> Annex Bool
storeEncrypted r buprepo (cipher, enck) k = do
g <- Annex.gitRepo
let src = gitAnnexLocation g k
params <- bupSplitParams r buprepo enck (Param "-")
liftIO $ catchBool $
withEncryptedHandle cipher (L.readFile src) $ \h ->
pipeBup params (Just h) Nothing
retrieve :: BupRepo -> Key -> FilePath -> Annex Bool
retrieve buprepo k f = do
let params = bupParams "join" buprepo [Param $ show k]
liftIO $ catchBool $ do
tofile <- openFile f WriteMode
pipeBup params Nothing (Just tofile)
retrieveEncrypted :: BupRepo -> (Cipher, Key) -> FilePath -> Annex Bool
retrieveEncrypted buprepo (cipher, enck) f = do
let params = bupParams "join" buprepo [Param $ show enck]
liftIO $ catchBool $ do
(pid, h) <- hPipeFrom "bup" $ toCommand params
withDecryptedContent cipher (L.hGetContents h) $ L.writeFile f
forceSuccess pid
return True
remove :: Key -> Annex Bool
remove _ = do
warning "content cannot be removed from bup remote"
return False
Bup does not provide a way to tell if a given dataset is present
- in a bup repository . One way it to check if the git repository has
- a branch matching the name ( as created by bup split -n ) .
- in a bup repository. One way it to check if the git repository has
- a branch matching the name (as created by bup split -n).
-}
checkPresent :: Git.Repo -> Git.Repo -> Key -> Annex (Either IOException Bool)
checkPresent r bupr k
| Git.repoIsUrl bupr = do
showAction $ "checking " ++ Git.repoDescribe r
ok <- onBupRemote bupr boolSystem "git" params
return $ Right ok
| otherwise = liftIO $ try $ boolSystem "git" $ Git.gitCommandLine bupr params
where
params =
[ Params "show-ref --quiet --verify"
, Param $ "refs/heads/" ++ show k]
{- Store UUID in the annex.uuid setting of the bup repository. -}
storeBupUUID :: UUID -> BupRepo -> Annex ()
storeBupUUID u buprepo = do
r <- liftIO $ bup2GitRemote buprepo
if Git.repoIsUrl r
then do
showAction "storing uuid"
onBupRemote r boolSystem "git"
[Params $ "config annex.uuid " ++ u]
>>! error "ssh failed"
else liftIO $ do
r' <- Git.configRead r
let olduuid = Git.configGet r' "annex.uuid" ""
when (olduuid == "") $
Git.run r' "config" [Param "annex.uuid", Param u]
onBupRemote :: Git.Repo -> (FilePath -> [CommandParam] -> IO a) -> FilePath -> [CommandParam] -> Annex a
onBupRemote r a command params = do
let dir = shellEscape (Git.workTree r)
sshparams <- sshToRepo r [Param $
"cd " ++ dir ++ " && " ++ unwords (command : toCommand params)]
liftIO $ a "ssh" sshparams
{- Allow for bup repositories on removable media by checking
- local bup repositories to see if they are available, and getting their
- uuid (which may be different from the stored uuid for the bup remote).
-
- If a bup repository is not available, returns a dummy uuid of "".
- This will cause checkPresent to indicate nothing from the bup remote
- is known to be present.
-
- Also, returns a version of the repo with config read, if it is local.
-}
getBupUUID :: Git.Repo -> UUID -> Annex (UUID, Git.Repo)
getBupUUID r u
| Git.repoIsUrl r = return (u, r)
| otherwise = liftIO $ do
ret <- try $ Git.configRead r
case ret of
Right r' -> return (Git.configGet r' "annex.uuid" "", r')
Left _ -> return ("", r)
{- Converts a bup remote path spec into a Git.Repo. There are some
- differences in path representation between git and bup. -}
bup2GitRemote :: BupRepo -> IO Git.Repo
bup2GitRemote "" = do
-- bup -r "" operates on ~/.bup
h <- myHomeDir
Git.repoFromAbsPath $ h </> ".bup"
bup2GitRemote r
| bupLocal r =
if head r == '/'
then Git.repoFromAbsPath r
else error "please specify an absolute path"
| otherwise = Git.repoFromUrl $ "ssh://" ++ host ++ slash dir
where
bits = split ":" r
host = head bits
dir = join ":" $ drop 1 bits
-- "host:~user/dir" is not supported specially by bup;
-- "host:dir" is relative to the home directory;
-- "host:" goes in ~/.bup
slash d
| d == "" = "/~/.bup"
| head d == '/' = d
| otherwise = "/~/" ++ d
bupLocal :: BupRepo -> Bool
bupLocal = notElem ':'
| null | https://raw.githubusercontent.com/brawnski/git-annex/8b847517a810d384a79178124b9766141b89bc17/Remote/Bup.hs | haskell | verify configuration is sane
bup init will create the repository.
(If the repository already exists, bup init again appears safe.)
The buprepo is stored in git config, as well as this repo's
persistant state, so it can vary between hosts.
make way for bup output
make way for bup output
Store UUID in the annex.uuid setting of the bup repository.
Allow for bup repositories on removable media by checking
- local bup repositories to see if they are available, and getting their
- uuid (which may be different from the stored uuid for the bup remote).
-
- If a bup repository is not available, returns a dummy uuid of "".
- This will cause checkPresent to indicate nothing from the bup remote
- is known to be present.
-
- Also, returns a version of the repo with config read, if it is local.
Converts a bup remote path spec into a Git.Repo. There are some
- differences in path representation between git and bup.
bup -r "" operates on ~/.bup
"host:~user/dir" is not supported specially by bup;
"host:dir" is relative to the home directory;
"host:" goes in ~/.bup | Using bup as a remote .
-
- Copyright 2011 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2011 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Remote.Bup (remote) where
import qualified Data.ByteString.Lazy.Char8 as L
import System.IO
import System.IO.Error
import Control.Exception.Extensible (IOException)
import qualified Data.Map as M
import Control.Monad (when)
import Control.Monad.State (liftIO)
import System.Process
import System.Exit
import System.FilePath
import Data.Maybe
import Data.List.Utils
import System.Cmd.Utils
import Types
import Types.Remote
import qualified Git
import qualified Annex
import UUID
import Locations
import Config
import Utility
import Messages
import Remote.Ssh
import Remote.Special
import Remote.Encryptable
import Crypto
type BupRepo = String
remote :: RemoteType Annex
remote = RemoteType {
typename = "bup",
enumerate = findSpecialRemotes "buprepo",
generate = gen,
setup = bupSetup
}
gen :: Git.Repo -> UUID -> Maybe RemoteConfig -> Annex (Remote Annex)
gen r u c = do
buprepo <- getConfig r "buprepo" (error "missing buprepo")
cst <- remoteCost r (if bupLocal buprepo then semiCheapRemoteCost else expensiveRemoteCost)
bupr <- liftIO $ bup2GitRemote buprepo
(u', bupr') <- getBupUUID bupr u
return $ encryptableRemote c
(storeEncrypted r buprepo)
(retrieveEncrypted buprepo)
Remote {
uuid = u',
cost = cst,
name = Git.repoDescribe r,
storeKey = store r buprepo,
retrieveKeyFile = retrieve buprepo,
removeKey = remove,
hasKey = checkPresent r bupr',
hasKeyCheap = bupLocal buprepo,
config = c
}
bupSetup :: UUID -> RemoteConfig -> Annex RemoteConfig
bupSetup u c = do
let buprepo = fromMaybe (error "Specify buprepo=") $
M.lookup "buprepo" c
c' <- encryptionSetup c
showAction "bup init"
bup "init" buprepo [] >>! error "bup init failed"
storeBupUUID u buprepo
gitConfigSpecialRemote u c' "buprepo" buprepo
return c'
bupParams :: String -> BupRepo -> [CommandParam] -> [CommandParam]
bupParams command buprepo params =
Param command : [Param "-r", Param buprepo] ++ params
bup :: String -> BupRepo -> [CommandParam] -> Annex Bool
bup command buprepo params = do
liftIO $ boolSystem "bup" $ bupParams command buprepo params
pipeBup :: [CommandParam] -> Maybe Handle -> Maybe Handle -> IO Bool
pipeBup params inh outh = do
p <- runProcess "bup" (toCommand params)
Nothing Nothing inh outh Nothing
ok <- waitForProcess p
case ok of
ExitSuccess -> return True
_ -> return False
bupSplitParams :: Git.Repo -> BupRepo -> Key -> CommandParam -> Annex [CommandParam]
bupSplitParams r buprepo k src = do
o <- getConfig r "bup-split-options" ""
let os = map Param $ words o
return $ bupParams "split" buprepo
(os ++ [Param "-n", Param (show k), src])
store :: Git.Repo -> BupRepo -> Key -> Annex Bool
store r buprepo k = do
g <- Annex.gitRepo
let src = gitAnnexLocation g k
params <- bupSplitParams r buprepo k (File src)
liftIO $ boolSystem "bup" params
storeEncrypted :: Git.Repo -> BupRepo -> (Cipher, Key) -> Key -> Annex Bool
storeEncrypted r buprepo (cipher, enck) k = do
g <- Annex.gitRepo
let src = gitAnnexLocation g k
params <- bupSplitParams r buprepo enck (Param "-")
liftIO $ catchBool $
withEncryptedHandle cipher (L.readFile src) $ \h ->
pipeBup params (Just h) Nothing
retrieve :: BupRepo -> Key -> FilePath -> Annex Bool
retrieve buprepo k f = do
let params = bupParams "join" buprepo [Param $ show k]
liftIO $ catchBool $ do
tofile <- openFile f WriteMode
pipeBup params Nothing (Just tofile)
retrieveEncrypted :: BupRepo -> (Cipher, Key) -> FilePath -> Annex Bool
retrieveEncrypted buprepo (cipher, enck) f = do
let params = bupParams "join" buprepo [Param $ show enck]
liftIO $ catchBool $ do
(pid, h) <- hPipeFrom "bup" $ toCommand params
withDecryptedContent cipher (L.hGetContents h) $ L.writeFile f
forceSuccess pid
return True
remove :: Key -> Annex Bool
remove _ = do
warning "content cannot be removed from bup remote"
return False
Bup does not provide a way to tell if a given dataset is present
- in a bup repository . One way it to check if the git repository has
- a branch matching the name ( as created by bup split -n ) .
- in a bup repository. One way it to check if the git repository has
- a branch matching the name (as created by bup split -n).
-}
checkPresent :: Git.Repo -> Git.Repo -> Key -> Annex (Either IOException Bool)
checkPresent r bupr k
| Git.repoIsUrl bupr = do
showAction $ "checking " ++ Git.repoDescribe r
ok <- onBupRemote bupr boolSystem "git" params
return $ Right ok
| otherwise = liftIO $ try $ boolSystem "git" $ Git.gitCommandLine bupr params
where
params =
[ Params "show-ref --quiet --verify"
, Param $ "refs/heads/" ++ show k]
storeBupUUID :: UUID -> BupRepo -> Annex ()
storeBupUUID u buprepo = do
r <- liftIO $ bup2GitRemote buprepo
if Git.repoIsUrl r
then do
showAction "storing uuid"
onBupRemote r boolSystem "git"
[Params $ "config annex.uuid " ++ u]
>>! error "ssh failed"
else liftIO $ do
r' <- Git.configRead r
let olduuid = Git.configGet r' "annex.uuid" ""
when (olduuid == "") $
Git.run r' "config" [Param "annex.uuid", Param u]
onBupRemote :: Git.Repo -> (FilePath -> [CommandParam] -> IO a) -> FilePath -> [CommandParam] -> Annex a
onBupRemote r a command params = do
let dir = shellEscape (Git.workTree r)
sshparams <- sshToRepo r [Param $
"cd " ++ dir ++ " && " ++ unwords (command : toCommand params)]
liftIO $ a "ssh" sshparams
getBupUUID :: Git.Repo -> UUID -> Annex (UUID, Git.Repo)
getBupUUID r u
| Git.repoIsUrl r = return (u, r)
| otherwise = liftIO $ do
ret <- try $ Git.configRead r
case ret of
Right r' -> return (Git.configGet r' "annex.uuid" "", r')
Left _ -> return ("", r)
bup2GitRemote :: BupRepo -> IO Git.Repo
bup2GitRemote "" = do
h <- myHomeDir
Git.repoFromAbsPath $ h </> ".bup"
bup2GitRemote r
| bupLocal r =
if head r == '/'
then Git.repoFromAbsPath r
else error "please specify an absolute path"
| otherwise = Git.repoFromUrl $ "ssh://" ++ host ++ slash dir
where
bits = split ":" r
host = head bits
dir = join ":" $ drop 1 bits
slash d
| d == "" = "/~/.bup"
| head d == '/' = d
| otherwise = "/~/" ++ d
bupLocal :: BupRepo -> Bool
bupLocal = notElem ':'
|
bbd82923aecab96794d6503fcb0372ae35f00277fd4a10f3ebbecda57eda2342 | Sword-Smith/Sword | TypeChecker.hs | MIT License
--
Copyright ( c ) 2019 and
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
--
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
module TypeChecker where
import SwordLanguageDefinition
import Data.List (sort)
import Control.Monad (unless)
data ExpType = BoolType
| IntType deriving (Show, Eq)
typeChecker :: Contract -> Either String Contract
typeChecker c = do
unless (hasSequentialPartyIDs c) (Left "Parties must be named sequentially from 1 to N.")
typeCheckerContract c
hasSequentialPartyIDs :: Contract -> Bool
hasSequentialPartyIDs c = verifySequence 1 $ sort $ getAllParties c
where
getAllParties :: Contract -> [PartyTokenID]
getAllParties (Transfer _ to) = [to]
getAllParties (Both contractA contractB) = getAllParties contractA ++ getAllParties contractB
getAllParties (Translate _ contract) = getAllParties contract
getAllParties (IfWithin _ contractA contractB) = getAllParties contractA ++ getAllParties contractB
getAllParties (Scale _ _ contract) = getAllParties contract
getAllParties Zero = []
verifySequence :: Integer -> [PartyTokenID] -> Bool
verifySequence _ [] = True
verifySequence n (x:xs) = n == getPartyTokenID x && verifySequence (n + 1) xs
typeCheckerContract :: Contract -> Either String Contract
typeCheckerContract (Transfer tokenAddress to) =
return $ Transfer tokenAddress to
typeCheckerContract (Both contractA contractB) = do
cA <- typeCheckerContract contractA
cB <- typeCheckerContract contractB
return $ Both cA cB
typeCheckerContract (Translate delay contract) = do
c <- typeCheckerContract contract
return $ Translate delay c
typeCheckerContract (IfWithin (MemExp time e) contractA contractB) = do
t0 <- getType e
if t0 == BoolType then do
cA <- typeCheckerContract contractA
cB <- typeCheckerContract contractB
return $ IfWithin (MemExp time e) cA cB
else
Left $ "First argument in If-Within must be of type Boolean, got " ++ show t0
typeCheckerContract (Scale maxFac scaleFac contract) = do
t0 <- getType scaleFac
if t0 /= BoolType then do
c <- typeCheckerContract contract
return $ Scale maxFac scaleFac c
else
Left $ "2nd argument to scale must be of type int, got: " ++ show t0
typeCheckerContract Zero = Right Zero
getType :: Expr -> Either String ExpType
getType (Lit literal) =
getLiteral literal
getType (MultExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in multiplication expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (SubtExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in subtraction expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (AddiExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in addition expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (DiviExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in division expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (LtExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in LtExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (GtExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in GtExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (EqExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in EqExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (GtOrEqExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in GtOrEqExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (LtOrEqExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in LtOrEqExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (OrExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == BoolType && t1 == BoolType then
return BoolType
else
Left $ "Error in OrExp expression! Expected bool, bool; got " ++ show t0 ++ ", " ++ show t1
getType (AndExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == BoolType && t1 == BoolType then
return BoolType
else
Left $ "Error in AndExp expression! Expected bool, bool; got " ++ show t0 ++ ", " ++ show t1
getType (MinExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in MinExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (MaxExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in MaxExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (NotExp e0) = do
t0 <- getType e0
if t0 == BoolType then
return BoolType
else
Left $ "Error in NotExp expression! Expected bool; got " ++ show t0
getType (IfExp e0 e1 e2) = do
t0 <- getType e0
t1 <- getType e1
t2 <- getType e2
if t0 == BoolType && ((t1 == BoolType && t2 == BoolType) || (t1 == IntType && t2 == IntType)) then
return $ if t1 == BoolType then BoolType else IntType
else
if t0 /= BoolType
then
Left $ "Error in IfExp expression! First exp must be of type bool; got " ++ show t0
else
Left $ "Error in IfExp expression! Types in both branches must match; got " ++ show t1 ++ ", " ++ show t2
getLiteral :: Literal -> Either String ExpType
getLiteral (IntVal _) = Right IntType
getLiteral (BoolVal _) = Right BoolType
getLiteral (Observable OBool _ _ ) = Right BoolType
getLiteral (Observable OInteger _ _ ) = Right IntType
| null | https://raw.githubusercontent.com/Sword-Smith/Sword/214da8011eec75fb949bdc52418b269ab329b2c6/src/TypeChecker.hs | haskell |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. | MIT License
Copyright ( c ) 2019 and
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
module TypeChecker where
import SwordLanguageDefinition
import Data.List (sort)
import Control.Monad (unless)
data ExpType = BoolType
| IntType deriving (Show, Eq)
typeChecker :: Contract -> Either String Contract
typeChecker c = do
unless (hasSequentialPartyIDs c) (Left "Parties must be named sequentially from 1 to N.")
typeCheckerContract c
hasSequentialPartyIDs :: Contract -> Bool
hasSequentialPartyIDs c = verifySequence 1 $ sort $ getAllParties c
where
getAllParties :: Contract -> [PartyTokenID]
getAllParties (Transfer _ to) = [to]
getAllParties (Both contractA contractB) = getAllParties contractA ++ getAllParties contractB
getAllParties (Translate _ contract) = getAllParties contract
getAllParties (IfWithin _ contractA contractB) = getAllParties contractA ++ getAllParties contractB
getAllParties (Scale _ _ contract) = getAllParties contract
getAllParties Zero = []
verifySequence :: Integer -> [PartyTokenID] -> Bool
verifySequence _ [] = True
verifySequence n (x:xs) = n == getPartyTokenID x && verifySequence (n + 1) xs
typeCheckerContract :: Contract -> Either String Contract
typeCheckerContract (Transfer tokenAddress to) =
return $ Transfer tokenAddress to
typeCheckerContract (Both contractA contractB) = do
cA <- typeCheckerContract contractA
cB <- typeCheckerContract contractB
return $ Both cA cB
typeCheckerContract (Translate delay contract) = do
c <- typeCheckerContract contract
return $ Translate delay c
typeCheckerContract (IfWithin (MemExp time e) contractA contractB) = do
t0 <- getType e
if t0 == BoolType then do
cA <- typeCheckerContract contractA
cB <- typeCheckerContract contractB
return $ IfWithin (MemExp time e) cA cB
else
Left $ "First argument in If-Within must be of type Boolean, got " ++ show t0
typeCheckerContract (Scale maxFac scaleFac contract) = do
t0 <- getType scaleFac
if t0 /= BoolType then do
c <- typeCheckerContract contract
return $ Scale maxFac scaleFac c
else
Left $ "2nd argument to scale must be of type int, got: " ++ show t0
typeCheckerContract Zero = Right Zero
getType :: Expr -> Either String ExpType
getType (Lit literal) =
getLiteral literal
getType (MultExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in multiplication expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (SubtExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in subtraction expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (AddiExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in addition expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (DiviExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in division expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (LtExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in LtExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (GtExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in GtExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (EqExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in EqExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (GtOrEqExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in GtOrEqExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (LtOrEqExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return BoolType
else
Left $ "Error in LtOrEqExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (OrExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == BoolType && t1 == BoolType then
return BoolType
else
Left $ "Error in OrExp expression! Expected bool, bool; got " ++ show t0 ++ ", " ++ show t1
getType (AndExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == BoolType && t1 == BoolType then
return BoolType
else
Left $ "Error in AndExp expression! Expected bool, bool; got " ++ show t0 ++ ", " ++ show t1
getType (MinExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in MinExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (MaxExp e0 e1) = do
t0 <- getType e0
t1 <- getType e1
if t0 == IntType && t1 == IntType then
return IntType
else
Left $ "Error in MaxExp expression! Expected int, int; got " ++ show t0 ++ ", " ++ show t1
getType (NotExp e0) = do
t0 <- getType e0
if t0 == BoolType then
return BoolType
else
Left $ "Error in NotExp expression! Expected bool; got " ++ show t0
getType (IfExp e0 e1 e2) = do
t0 <- getType e0
t1 <- getType e1
t2 <- getType e2
if t0 == BoolType && ((t1 == BoolType && t2 == BoolType) || (t1 == IntType && t2 == IntType)) then
return $ if t1 == BoolType then BoolType else IntType
else
if t0 /= BoolType
then
Left $ "Error in IfExp expression! First exp must be of type bool; got " ++ show t0
else
Left $ "Error in IfExp expression! Types in both branches must match; got " ++ show t1 ++ ", " ++ show t2
getLiteral :: Literal -> Either String ExpType
getLiteral (IntVal _) = Right IntType
getLiteral (BoolVal _) = Right BoolType
getLiteral (Observable OBool _ _ ) = Right BoolType
getLiteral (Observable OInteger _ _ ) = Right IntType
|
2cab8604d56a326edbfae7007451c066267fc95f7fbc6fbddb3143fff27d93b1 | fyquah/hardcaml_zprize | test_load_store_sm.ml | open! Core
open Hardcaml
open Hardcaml_waveterm
module Store_sm = Zprize_ntt.Store_sm.Make (struct
let logn = 4
let support_4step_twiddle = false
let logcores = 0
let logblocks = 0
let memory_layout = Zprize_ntt.Memory_layout.Optimised_layout_single_port
end)
module Sim = Cyclesim.With_interface (Store_sm.I) (Store_sm.O)
let test_store_sm () =
let sim = Sim.create (Store_sm.create (Scope.create ())) in
let i = Cyclesim.inputs sim in
let waves, sim = Waveform.create sim in
let cycle () = Cyclesim.cycle sim in
i.clear := Bits.vdd;
cycle ();
i.clear := Bits.gnd;
i.start := Bits.vdd;
cycle ();
i.start := Bits.gnd;
i.tready := Bits.vdd;
cycle ();
i.tready := Bits.gnd;
cycle ();
cycle ();
i.tready := Bits.vdd;
cycle ();
cycle ();
i.tready := Bits.gnd;
cycle ();
i.tready := Bits.vdd;
cycle ();
i.tready := Bits.gnd;
cycle ();
cycle ();
for _ = 0 to 10 do
cycle ()
done;
waves
;;
let%expect_test "store sm" =
let waves = test_store_sm () in
Waveform.print ~display_width:90 ~display_height:25 ~wave_width:1 waves;
[%expect {|
┌Signals───────────┐┌Waves───────────────────────────────────────────────────────────────┐
│clock ││┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ │
│ ││ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─│
│clear ││────┐ │
│ ││ └───────────────────────────────────────────────────────────────│
│first_4step_pass ││ │
│ ││────────────────────────────────────────────────────────────────────│
│start ││ ┌───┐ │
│ ││────┘ └───────────────────────────────────────────────────────────│
│tready ││ ┌───┐ ┌───────┐ ┌───┐ │
│ ││────────┘ └───────┘ └───┘ └───────────────────────────────│
│block ││ │
│ ││────────────────────────────────────────────────────────────────────│
│done_ ││────────┐ │
│ ││ └───────────────────────────────────────────────────────────│
│ ││────────────┬───┬───┬───┬───┬───┬───┬───┬───┬───────────────────────│
│rd_addr ││ 0 │1 │2 │3 │4 │5 │6 │7 │8 │9 │
│ ││────────────┴───┴───┴───┴───┴───┴───┴───┴───┴───────────────────────│
│rd_any ││ ┌───────────────────────────────────┐ │
│ ││────────┘ └───────────────────────│
│rd_en ││ ┌───────────────────────────────────┐ │
│ ││────────┘ └───────────────────────│
│tvalid ││ ┌───────────────────────│
│ ││────────────────────────────────────────────┘ │
└──────────────────┘└────────────────────────────────────────────────────────────────────┘ |}]
;;
| null | https://raw.githubusercontent.com/fyquah/hardcaml_zprize/553b1be10ae9b977decbca850df6ee2d0595e7ff/zprize/ntt/hardcaml/test/test_load_store_sm.ml | ocaml | open! Core
open Hardcaml
open Hardcaml_waveterm
module Store_sm = Zprize_ntt.Store_sm.Make (struct
let logn = 4
let support_4step_twiddle = false
let logcores = 0
let logblocks = 0
let memory_layout = Zprize_ntt.Memory_layout.Optimised_layout_single_port
end)
module Sim = Cyclesim.With_interface (Store_sm.I) (Store_sm.O)
let test_store_sm () =
let sim = Sim.create (Store_sm.create (Scope.create ())) in
let i = Cyclesim.inputs sim in
let waves, sim = Waveform.create sim in
let cycle () = Cyclesim.cycle sim in
i.clear := Bits.vdd;
cycle ();
i.clear := Bits.gnd;
i.start := Bits.vdd;
cycle ();
i.start := Bits.gnd;
i.tready := Bits.vdd;
cycle ();
i.tready := Bits.gnd;
cycle ();
cycle ();
i.tready := Bits.vdd;
cycle ();
cycle ();
i.tready := Bits.gnd;
cycle ();
i.tready := Bits.vdd;
cycle ();
i.tready := Bits.gnd;
cycle ();
cycle ();
for _ = 0 to 10 do
cycle ()
done;
waves
;;
let%expect_test "store sm" =
let waves = test_store_sm () in
Waveform.print ~display_width:90 ~display_height:25 ~wave_width:1 waves;
[%expect {|
┌Signals───────────┐┌Waves───────────────────────────────────────────────────────────────┐
│clock ││┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ ┌─┐ │
│ ││ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─┘ └─│
│clear ││────┐ │
│ ││ └───────────────────────────────────────────────────────────────│
│first_4step_pass ││ │
│ ││────────────────────────────────────────────────────────────────────│
│start ││ ┌───┐ │
│ ││────┘ └───────────────────────────────────────────────────────────│
│tready ││ ┌───┐ ┌───────┐ ┌───┐ │
│ ││────────┘ └───────┘ └───┘ └───────────────────────────────│
│block ││ │
│ ││────────────────────────────────────────────────────────────────────│
│done_ ││────────┐ │
│ ││ └───────────────────────────────────────────────────────────│
│ ││────────────┬───┬───┬───┬───┬───┬───┬───┬───┬───────────────────────│
│rd_addr ││ 0 │1 │2 │3 │4 │5 │6 │7 │8 │9 │
│ ││────────────┴───┴───┴───┴───┴───┴───┴───┴───┴───────────────────────│
│rd_any ││ ┌───────────────────────────────────┐ │
│ ││────────┘ └───────────────────────│
│rd_en ││ ┌───────────────────────────────────┐ │
│ ││────────┘ └───────────────────────│
│tvalid ││ ┌───────────────────────│
│ ││────────────────────────────────────────────┘ │
└──────────────────┘└────────────────────────────────────────────────────────────────────┘ |}]
;;
|
|
ffc76da61bf5671ddd38ead266ad617cba9e1d8c5fc3b96a875caa32c9cd433f | sbcl/sbcl | methods.lisp | This software is part of the SBCL system . See the README file for
;;;; more information.
This software is derived from software originally released by Xerox
;;;; Corporation. Copyright and release statements follow. Later modifications
;;;; to the software are in the public domain and are provided with
;;;; absolutely no warranty. See the COPYING and CREDITS files for more
;;;; information.
copyright information from original PCL sources :
;;;;
Copyright ( c ) 1985 , 1986 , 1987 , 1988 , 1989 , 1990 Xerox Corporation .
;;;; All rights reserved.
;;;;
;;;; Use and copying of this software and preparation of derivative works based
;;;; upon this software are permitted. Any distribution of this software or
derivative works must comply with all applicable United States export
;;;; control laws.
;;;;
This software is made available AS IS , and Xerox Corporation makes no
;;;; warranty about the software, its performance or its conformity to any
;;;; specification.
(in-package "SB-PCL")
;;; methods
;;;
;;; Methods themselves are simple inanimate objects. Most properties of
;;; methods are immutable, methods cannot be reinitialized. The following
;;; properties of methods can be changed:
;;; METHOD-GENERIC-FUNCTION
;;; initialization
;;;
;;; Error checking is done in before methods. Because of the simplicity of
;;; standard method objects the standard primary method can fill the slots.
;;;
;;; Methods are not reinitializable.
(define-condition metaobject-initialization-violation
(reference-condition simple-error)
())
(defun change-class-to-metaobject-violation (to-name
&optional from-name references)
(error 'metaobject-initialization-violation
:format-control "~@<Cannot ~S~@[ ~S~] objects into ~S metaobjects.~@:>"
:format-arguments (list 'change-class from-name to-name)
:references references))
(macrolet ((def (name args control)
`(defmethod ,name ,args
(declare (ignore initargs))
(error 'metaobject-initialization-violation
:format-control ,(format nil "~~@<~A~~@:>" control)
:format-arguments (list ',name)
:references '((:amop :initialization method))))))
(def reinitialize-instance ((method method) &rest initargs)
"Method objects cannot be redefined by ~S.")
(def change-class ((method method) new &rest initargs)
"Method objects cannot be redefined by ~S.")
;; NEW being a subclass of method is dealt with in the general
;; method of CHANGE-CLASS
(def update-instance-for-redefined-class ((method method) added discarded
plist &rest initargs)
"No behaviour specified for ~S on method objects.")
(def update-instance-for-different-class (old (new method) &rest initargs)
"No behaviour specified for ~S on method objects.")
(def update-instance-for-different-class ((old method) new &rest initargs)
"No behaviour specified for ~S on method objects."))
(define-condition invalid-method-initarg (simple-program-error)
((method :initarg :method :reader invalid-method-initarg-method))
(:report
(lambda (c s)
(format s "~@<In initialization of ~S:~2I~_~?~@:>"
(invalid-method-initarg-method c)
(simple-condition-format-control c)
(simple-condition-format-arguments c)))))
(defun invalid-method-initarg (method format-control &rest args)
(error 'invalid-method-initarg :method method
:format-control format-control :format-arguments args))
(defun check-documentation (method doc)
(unless (or (null doc) (stringp doc))
(invalid-method-initarg method "~@<~S of ~S is neither ~S nor a ~S.~@:>"
:documentation doc 'null 'string)))
(defun check-lambda-list (method ll)
(declare (ignore method ll))
nil)
(defun check-method-function (method fun)
(unless (functionp fun)
(invalid-method-initarg method "~@<~S of ~S is not a ~S.~@:>"
:function fun 'function)))
(macrolet ((dolist-carefully ((var list improper-list-handler) &body body)
`(let ((,var nil)
(.dolist-carefully. ,list))
(loop (when (null .dolist-carefully.) (return nil))
(if (consp .dolist-carefully.)
(progn
(setq ,var (pop .dolist-carefully.))
,@body)
(,improper-list-handler))))))
(defun check-qualifiers (method qualifiers)
(flet ((improper-list ()
(invalid-method-initarg method
"~@<~S of ~S is an improper list.~@:>"
:qualifiers qualifiers)))
(dolist-carefully (q qualifiers improper-list)
(unless (and q (atom q))
(invalid-method-initarg method
"~@<~S, in ~S ~S, is not a non-~S atom.~@:>"
q :qualifiers qualifiers 'null)))))
(defun check-slot-name (method name)
(declare (ignore method))
(unless (symbolp name)
(invalid-method-initarg "~@<~S of ~S is not a ~S.~@:>"
:slot-name name 'symbol)))
(defun check-specializers (method specializers)
(flet ((improper-list ()
(invalid-method-initarg method
"~@<~S of ~S is an improper list.~@:>"
:specializers specializers)))
(dolist-carefully (s specializers improper-list)
(unless (specializerp s)
(invalid-method-initarg method
"~@<~S, in ~S ~S, is not a ~S.~@:>"
s :specializers specializers 'specializer)))
: ANSI says that it 's not valid to have methods
;; specializing on classes which are "not defined", leaving
;; unclear what the definedness of a class is; AMOP suggests that
;; forward-referenced-classes, since they have proper names and
;; all, are at least worthy of some level of definition. We allow
;; methods specialized on forward-referenced-classes, but it's
;; non-portable and potentially dubious, so
(let ((frcs (remove-if-not #'forward-referenced-class-p specializers)))
(unless (null frcs)
(style-warn "~@<Defining a method using ~
~1{~S~}~;~1{~S and ~S~}~:;~{~#[~;and ~]~S~^ , ~}~ ] ~
a specializer~:;specializers~].~@ :> "
(length frcs) frcs)))))
end MACROLET
(defmethod shared-initialize :before
((method standard-method) slot-names &key
qualifiers lambda-list specializers function documentation)
(declare (ignore slot-names))
FIXME : it 's not clear to me ( CSR , 2006 - 08 - 09 ) why methods get
;; this extra paranoia and nothing else does; either everything
;; should be aggressively checking initargs, or nothing much should.
;; In either case, it would probably be better to have :type
;; declarations in slots, which would then give a suitable type
;; error (if we implement type-checking for slots...) rather than
;; this hand-crafted thing.
(check-qualifiers method qualifiers)
(check-lambda-list method lambda-list)
(check-specializers method specializers)
(check-method-function method function)
(check-documentation method documentation))
(defmethod shared-initialize :before
((method standard-accessor-method) slot-names &key
slot-name slot-definition)
(declare (ignore slot-names))
(unless slot-definition
(check-slot-name method slot-name)))
(defmethod shared-initialize :after ((method standard-method) slot-names
&rest initargs &key ((method-cell method-cell)))
(declare (ignore slot-names method-cell))
(initialize-method-function initargs method))
(define-load-time-global *the-class-standard-generic-function*
(find-class 'standard-generic-function))
(defmethod shared-initialize :before
((generic-function standard-generic-function)
slot-names
&key (lambda-list () lambda-list-p)
argument-precedence-order
declarations
documentation
(method-class nil method-class-supplied-p)
(method-combination nil method-combination-supplied-p))
(declare (ignore slot-names
declarations argument-precedence-order documentation
lambda-list lambda-list-p))
(flet ((initarg-error (initarg value string)
(error "when initializing the generic function ~S:~%~
The ~S initialization argument was: ~A.~%~
It must be ~A."
generic-function initarg value string)))
(cond (method-class-supplied-p
(when (symbolp method-class)
(setq method-class (find-class method-class)))
(unless (and (classp method-class)
(*subtypep (class-eq-specializer method-class)
*the-class-method*))
(initarg-error :method-class
method-class
"a subclass of the class METHOD"))
(setf (slot-value generic-function 'method-class) method-class))
((slot-boundp generic-function 'method-class))
(t
(initarg-error :method-class
"not supplied"
"a subclass of the class METHOD")))
(cond (method-combination-supplied-p
(unless (method-combination-p method-combination)
(initarg-error :method-combination
method-combination
"a method combination object")))
((slot-boundp generic-function '%method-combination))
(t
(initarg-error :method-combination
"not supplied"
"a method combination object")))))
(defun find-generic-function (name &optional (errorp t))
(let ((fun (and (fboundp name) (fdefinition name))))
(cond
((and fun (typep fun 'generic-function)) fun)
(errorp (error "No generic function named ~S." name))
(t nil))))
(defun real-add-named-method (generic-function-name qualifiers
specializers lambda-list &rest other-initargs)
(let* ((existing-gf (find-generic-function generic-function-name nil))
(generic-function
(if existing-gf
(ensure-generic-function
generic-function-name
:generic-function-class (class-of existing-gf))
(ensure-generic-function generic-function-name)))
(proto (method-prototype-for-gf generic-function-name)))
;; FIXME: Destructive modification of &REST list.
(setf (getf (getf other-initargs 'plist) :name)
(make-method-spec generic-function qualifiers specializers))
(let ((new (apply #'make-instance (class-of proto)
:qualifiers qualifiers :specializers specializers
:lambda-list lambda-list other-initargs)))
(add-method generic-function new)
new)))
(define-condition find-method-length-mismatch
(reference-condition simple-error)
()
(:default-initargs :references '((:ansi-cl :function find-method))))
(defun real-get-method (generic-function qualifiers specializers
&optional (errorp t)
always-check-specializers)
(sb-thread::with-recursive-system-lock ((gf-lock generic-function))
(let ((specializer-count (length specializers))
(methods (generic-function-methods generic-function)))
(when (or methods always-check-specializers)
(let ((required-parameter-count
(length (arg-info-metatypes (gf-arg-info generic-function)))))
;; Since we internally bypass FIND-METHOD by using GET-METHOD
;; instead we need to do this here or users may get hit by a
failed AVER instead of a sensible error message .
(unless (= specializer-count required-parameter-count)
(error
'find-method-length-mismatch
:format-control "~@<The generic function ~S takes ~D ~
required argument~:P; was asked to ~
find a method with specializers ~:S~@:>"
:format-arguments (list generic-function required-parameter-count
(unparse-specializers generic-function specializers))))))
(flet ((congruentp (other-method)
(let ((other-specializers (method-specializers other-method)))
(aver (= specializer-count (length other-specializers)))
(and (equal qualifiers (safe-method-qualifiers other-method))
(every #'same-specializer-p specializers other-specializers)))))
(declare (dynamic-extent #'congruentp))
(cond ((find-if #'congruentp methods))
((null errorp) nil)
(t
(error "~@<There is no method on ~S with ~:[no ~
qualifiers~;~:*qualifiers ~:S~] and specializers ~
~:S.~@:>"
generic-function qualifiers specializers)))))))
(defmethod find-method ((generic-function standard-generic-function)
qualifiers specializers &optional (errorp t))
;; ANSI about FIND-METHOD: "The specializers argument contains the
;; parameter specializers for the method. It must correspond in
;; length to the number of required arguments of the generic
;; function, or an error is signaled."
;;
;; This error checking is done by REAL-GET-METHOD.
(real-get-method
generic-function qualifiers
;; ANSI for FIND-METHOD seems to imply that in fact specializers
;; should always be passed in parsed form instead of being parsed
;; at this point. Since there's no ANSI-blessed way of getting an
EQL specializer , that seems unnecessarily painful , so we are
nice to our users . -- CSR , 2007 - 06 - 01
Note that INTERN - EQL - SPECIALIZER is exported from SB - MOP , but MOP is n't
;; part of the ANSI standard. Parsing introduces a tiny semantic problem in
the edge case of an EQL specializer whose object is literally ( EQL :X ) .
;; That one must be supplied as a pre-parsed #<EQL-SPECIALIZER> because if
;; not, we'd parse it into a specializer whose object is :X.
(parse-specializers generic-function specializers) errorp t))
;;; Compute various information about a generic-function's arglist by looking
;;; at the argument lists of the methods. The hair for trying not to use
;;; &REST arguments lives here.
;;; The values returned are:
;;; number-of-required-arguments
;;; the number of required arguments to this generic-function's
;;; discriminating function
;;; &rest-argument-p
;;; whether or not this generic-function's discriminating
;;; function takes an &rest argument.
;;; specialized-argument-positions
;;; a list of the positions of the arguments this generic-function
;;; specializes (e.g. for a classical generic-function this is the
list : ( 1 ) ) .
(defmethod compute-discriminating-function-arglist-info
((generic-function standard-generic-function))
;;(declare (values number-of-required-arguments &rest-argument-p
;; specialized-argument-postions))
(let ((number-required nil)
(restp nil)
(specialized-positions ())
(methods (generic-function-methods generic-function)))
(dolist (method methods)
(multiple-value-setq (number-required restp specialized-positions)
(compute-discriminating-function-arglist-info-internal
generic-function method number-required restp specialized-positions)))
(values number-required restp (sort specialized-positions #'<))))
(defun compute-discriminating-function-arglist-info-internal
(generic-function method number-of-requireds restp
specialized-argument-positions)
(declare (ignore generic-function)
(type (or null fixnum) number-of-requireds))
(let ((requireds 0))
(declare (fixnum requireds))
;; Go through this methods arguments seeing how many are required,
;; and whether there is an &rest argument.
(dolist (arg (method-lambda-list method))
(cond ((eq arg '&aux) (return))
((memq arg '(&optional &rest &key))
(return (setq restp t)))
((memq arg lambda-list-keywords))
(t (incf requireds))))
;; Now go through this method's type specifiers to see which
;; argument positions are type specified. Treat T specially
;; in the usual sort of way. For efficiency don't bother to
;; keep specialized-argument-positions sorted, rather depend
;; on our caller to do that.
(let ((pos 0))
(dolist (type-spec (method-specializers method))
(unless (eq type-spec *the-class-t*)
(pushnew pos specialized-argument-positions :test #'eq))
(incf pos)))
;; Finally merge the values for this method into the values
;; for the exisiting methods and return them. Note that if
num - of - requireds is NIL it means this is the first method
;; and we depend on that.
(values (min (or number-of-requireds requireds) requireds)
(or restp
(and number-of-requireds (/= number-of-requireds requireds)))
specialized-argument-positions)))
(defmethod generic-function-argument-precedence-order
((gf standard-generic-function))
(aver (eq **boot-state** 'complete))
(loop with arg-info = (gf-arg-info gf)
with lambda-list = (arg-info-lambda-list arg-info)
for argument-position in (arg-info-precedence arg-info)
collect (nth argument-position lambda-list)))
(defmethod generic-function-lambda-list ((gf generic-function))
(gf-lambda-list gf))
(defmethod gf-fast-method-function-p ((gf standard-generic-function))
(gf-info-fast-mf-p (slot-value gf 'arg-info)))
(defun add-to-weak-hashset (key set)
(with-system-mutex ((hashset-mutex set))
(hashset-insert set key)))
(defun remove-from-weak-hashset (key set)
(with-system-mutex ((hashset-mutex set))
(hashset-remove set key)))
(defun weak-hashset-memberp (key set)
(with-system-mutex ((hashset-mutex set))
(hashset-find set key)))
(defmethod initialize-instance :after ((gf standard-generic-function)
&key (lambda-list nil lambda-list-p)
argument-precedence-order)
;; FIXME: Because ARG-INFO is a STRUCTURE-OBJECT, it does not get
a permutation vector , and therefore the code that SLOT - VALUE transforms
to winds up punting to # ' ( SLOT - ACCESSOR : GLOBAL ARG - INFO READER ) .
Using SLOT - VALUE the " slow " way sidesteps some bootstrap issues .
(declare (notinline slot-value))
(progn ; WAS: with-slots (arg-info) gf
(if lambda-list-p
(set-arg-info gf
:lambda-list lambda-list
:argument-precedence-order argument-precedence-order)
(set-arg-info gf))
(let ((mc (generic-function-method-combination gf)))
(add-to-weak-hashset gf (method-combination-%generic-functions mc)))
(when (arg-info-valid-p (slot-value gf 'arg-info))
(update-dfun gf))))
(defmethod reinitialize-instance :around
((gf standard-generic-function) &rest args &key
(lambda-list nil lambda-list-p) (argument-precedence-order nil apo-p))
(let* ((old-mc (generic-function-method-combination gf))
(mc (getf args :method-combination old-mc)))
(unless (eq mc old-mc)
(aver (weak-hashset-memberp gf (method-combination-%generic-functions old-mc)))
(aver (not (weak-hashset-memberp gf (method-combination-%generic-functions mc)))))
(prog1 (call-next-method)
(unless (eq mc old-mc)
(remove-from-weak-hashset gf (method-combination-%generic-functions old-mc))
(add-to-weak-hashset gf (method-combination-%generic-functions mc))
(flush-effective-method-cache gf))
(sb-thread::with-recursive-system-lock ((gf-lock gf))
(cond
((and lambda-list-p apo-p)
(set-arg-info gf
:lambda-list lambda-list
:argument-precedence-order argument-precedence-order))
(lambda-list-p (set-arg-info gf :lambda-list lambda-list))
(t (set-arg-info gf)))
(when (arg-info-valid-p (gf-arg-info gf))
(update-dfun gf))
(map-dependents gf (lambda (dependent)
(apply #'update-dependent gf dependent args)))))))
(defun set-methods (gf methods)
(setf (generic-function-methods gf) nil)
(loop (when (null methods) (return gf))
(real-add-method gf (pop methods) methods)))
(define-condition new-value-specialization (reference-condition error)
((%method :initarg :method :reader new-value-specialization-method))
(:report
(lambda (c s)
(format s "~@<Cannot add method ~S to ~S, as it specializes the ~
new-value argument.~@:>"
(new-value-specialization-method c)
#'(setf slot-value-using-class))))
(:default-initargs :references
(list '(:sbcl :node "Metaobject Protocol")
'(:amop :generic-function (setf slot-value-using-class)))))
(defgeneric values-for-add-method (gf method)
(:method ((gf standard-generic-function) (method standard-method))
: Just a single generic dispatch , and everything else
;; comes from permutation vectors. Would be nicer to define
;; REAL-ADD-METHOD with a proper method so that we could efficiently
use SLOT - VALUE there .
;;
;; Optimization note: REAL-ADD-METHOD has a lot of O(N) stuff in it (as
does PCL as a whole ) . It should not be too hard to internally store
;; many of the things we now keep in lists as either purely functional
;; O(log N) sets, or --if we don't mind the memory cost-- using
;; specialized hash-tables: most things are used to answer questions about
;; set-membership, not ordering.
(values (slot-value gf '%lock)
(slot-value method 'qualifiers)
(slot-value method 'specializers)
(slot-value method 'lambda-list)
(slot-value method '%generic-function)
(slot-value gf 'name))))
(define-condition print-object-stream-specializer (reference-condition simple-warning)
()
(:default-initargs
:references '((:ansi-cl :function print-object))
:format-control "~@<Specializing on the second argument to ~S has ~
unportable effects, and also interferes with ~
precomputation of print functions for exceptional ~
situations.~@:>"
:format-arguments (list 'print-object)))
(defun defer-ftype-computation (gf)
;; Is there any reason not to do this as soon as possible?
;; While doing it with every ADD/REMOVE-METHOD call could result in
;; wasted work, it seems like unnecessary complexity.
;; I think it's just to get through bootstrap, probably,
;; but if it's a semantics thing, it deserves some explanation.
(let ((name (generic-function-name gf)))
(when (legal-fun-name-p name) ; tautological ?
(unless (eq (info :function :where-from name) :declared)
(when (and (fboundp name) (eq (fdefinition name) gf))
(setf (info :function :type name) :generic-function))))))
(defun compute-gf-ftype (name)
(let ((gf (and (fboundp name) (fdefinition name)))
(methods-in-compilation-unit (and (boundp 'sb-c::*methods-in-compilation-unit*)
sb-c::*methods-in-compilation-unit*
(gethash name sb-c::*methods-in-compilation-unit*))))
(cond ((generic-function-p gf)
(let* ((ll (generic-function-lambda-list gf))
;; If the GF has &REST without &KEY then we don't augment
the FTYPE with keywords , so as not to complain about keywords
;; which seem not to be accepted.
(type (sb-c::ftype-from-lambda-list
(if (and (member '&rest ll) (not (member '&key ll)))
ll
(generic-function-pretty-arglist gf methods-in-compilation-unit)))))
;; It would be nice if globaldb were transactional,
;; so that either both updates or neither occur.
(setf (info :function :where-from name) :defined-method
(info :function :type name) type)))
(methods-in-compilation-unit
(setf (info :function :where-from name) :defined-method
(info :function :type name)
(sb-c::ftype-from-lambda-list
(gf-merge-arglists methods-in-compilation-unit))))
(t
;; The defaulting expression for (:FUNCTION :TYPE) does not store
;; the default. For :GENERIC-FUNCTION that is not FBOUNDP we also
;; don't, however this branch should never be reached because the
;; info only stores :GENERIC-FUNCTION when methods are loaded.
Maybe AVER that it does not happen ?
(sb-c::ftype-from-definition name)))))
(defun real-add-method (generic-function method &optional skip-dfun-update-p)
(flet ((similar-lambda-lists-p (old-method new-lambda-list)
(binding* (((a-llks a-nreq a-nopt)
(analyze-lambda-list (method-lambda-list old-method)))
((b-llks b-nreq b-nopt)
(analyze-lambda-list new-lambda-list)))
(and (= a-nreq b-nreq)
(= a-nopt b-nopt)
(eq (ll-keyp-or-restp a-llks)
(ll-keyp-or-restp b-llks))))))
(multiple-value-bind (lock qualifiers specializers new-lambda-list
method-gf name)
(values-for-add-method generic-function method)
(when method-gf
(error "~@<The method ~S is already part of the generic ~
function ~S; it can't be added to another generic ~
function until it is removed from the first one.~@:>"
method method-gf))
(when (and (eq name 'print-object) (not (eq (second specializers) *the-class-t*)))
(warn 'print-object-stream-specializer))
(handler-case
;; System lock because interrupts need to be disabled as
;; well: it would be bad to unwind and leave the gf in an
;; inconsistent state.
(sb-thread::with-recursive-system-lock (lock)
(let ((existing (get-method generic-function
qualifiers
specializers
nil)))
;; If there is already a method like this one then we must get
;; rid of it before proceeding. Note that we call the generic
;; function REMOVE-METHOD to remove it rather than doing it in
;; some internal way.
(when (and existing (similar-lambda-lists-p existing new-lambda-list))
(remove-method generic-function existing))
: We have a special case here , as we disallow
specializations of the NEW - VALUE argument to ( SETF
SLOT - VALUE - USING - CLASS ) . GET - ACCESSOR - METHOD - FUNCTION is
;; the optimizing function here: it precomputes the effective
;; method, assuming that there is no dispatch to be done on
;; the new-value argument.
(when (and (eq generic-function #'(setf slot-value-using-class))
(not (eq *the-class-t* (first specializers))))
(error 'new-value-specialization :method method))
(setf (method-generic-function method) generic-function)
(pushnew method (generic-function-methods generic-function) :test #'eq)
(dolist (specializer specializers)
(add-direct-method specializer method))
: SET - ARG - INFO contains the error - detecting logic for
;; detecting attempts to add methods with incongruent lambda
lists . However , according to on cmucl - imp ,
;; it also depends on the new method already having been added
;; to the generic function. Therefore, we need to remove it
;; again on error:
(let ((remove-again-p t))
(unwind-protect
(progn
(set-arg-info generic-function :new-method method)
(setq remove-again-p nil))
(when remove-again-p
(remove-method generic-function method))))
;; KLUDGE II: ANSI saith that it is not an error to add a
;; method with invalid qualifiers to a generic function of the
;; wrong kind; it's only an error at generic function
;; invocation time; I dunno what the rationale was, and it
;; sucks. Nevertheless, it's probably a programmer error, so
let 's warn anyway . -- CSR , 2003 - 08 - 20
(let* ((mc (generic-function-method-combination generic-function))
(type-name (method-combination-type-name mc)))
(flet ((invalid ()
(warn "~@<Invalid qualifiers for ~S method ~
combination in method ~S:~2I~_~S.~@:>"
type-name method qualifiers)))
(cond
((and (eq mc *standard-method-combination*)
qualifiers
(or (cdr qualifiers)
(not (standard-method-combination-qualifier-p
(car qualifiers)))))
(invalid))
((and (short-method-combination-p mc)
(or (null qualifiers)
(cdr qualifiers)
(not (short-method-combination-qualifier-p
type-name (car qualifiers)))))
(invalid)))))
(unless skip-dfun-update-p
(update-ctors 'add-method
:generic-function generic-function
:method method)
(update-dfun generic-function))
(defer-ftype-computation generic-function)
(map-dependents generic-function
(lambda (dep)
(update-dependent generic-function
dep 'add-method method)))))
(serious-condition (c)
(error c)))))
generic-function)
(defun real-remove-method (generic-function method)
(when (eq generic-function (method-generic-function method))
(flush-effective-method-cache generic-function)
(let ((lock (gf-lock generic-function)))
;; System lock because interrupts need to be disabled as well:
;; it would be bad to unwind and leave the gf in an inconsistent
;; state.
(sb-thread::with-recursive-system-lock (lock)
(let* ((specializers (method-specializers method))
(methods (generic-function-methods generic-function))
(new-methods (remove method methods)))
(setf (method-generic-function method) nil
(generic-function-methods generic-function) new-methods)
(dolist (specializer specializers)
(remove-direct-method specializer method))
(set-arg-info generic-function)
(update-ctors 'remove-method
:generic-function generic-function
:method method)
(update-dfun generic-function)
(defer-ftype-computation generic-function)
(map-dependents generic-function
(lambda (dep)
(update-dependent generic-function
dep 'remove-method method)))))))
generic-function)
(defun compute-applicable-methods-function (generic-function arguments)
(values (compute-applicable-methods-using-types
generic-function
(types-from-args generic-function arguments 'eql))))
(defmethod compute-applicable-methods
((generic-function generic-function) arguments)
(values (compute-applicable-methods-using-types
generic-function
(types-from-args generic-function arguments 'eql))))
(defmethod compute-applicable-methods-using-classes
((generic-function generic-function) classes)
(compute-applicable-methods-using-types
generic-function
(types-from-args generic-function classes 'class-eq)))
(defun !proclaim-incompatible-superclasses (classes)
(setq classes (mapcar (lambda (class)
(if (symbolp class)
(find-class class)
class))
classes))
(dolist (class classes)
(dolist (other-class classes)
(unless (eq class other-class)
(pushnew other-class (class-incompatible-superclass-list class) :test #'eq)))))
(defun superclasses-compatible-p (class1 class2)
(let ((cpl1 (cpl-or-nil class1))
(cpl2 (cpl-or-nil class2)))
(dolist (sc1 cpl1 t)
(dolist (ic (class-incompatible-superclass-list sc1))
(when (memq ic cpl2)
(return-from superclasses-compatible-p nil))))))
(mapc
#'!proclaim-incompatible-superclasses
'(;; superclass class
direct subclasses of pcl - class
(standard-class funcallable-standard-class)
superclass metaobject
(class eql-specializer class-eq-specializer method method-combination
generic-function slot-definition)
;; metaclass built-in-class
(number sequence character ; direct subclasses of t, but not array
standard-object structure-object) ; or symbol
(number array character symbol ; direct subclasses of t, but not
standard-object structure-object) ; sequence
(complex float rational) ; direct subclasses of number
(integer ratio) ; direct subclasses of rational
(list vector) ; direct subclasses of sequence
(cons null) ; direct subclasses of list
(string bit-vector) ; direct subclasses of vector
))
(defmethod same-specializer-p ((specl1 specializer) (specl2 specializer))
(eql specl1 specl2))
(defmethod same-specializer-p ((specl1 class) (specl2 class))
(eq specl1 specl2))
(defmethod specializer-class ((specializer class))
specializer)
(defmethod same-specializer-p ((specl1 class-eq-specializer)
(specl2 class-eq-specializer))
(eq (specializer-class specl1) (specializer-class specl2)))
;; FIXME: This method is wacky, and indicative of a coding style in which
;; metaphorically the left hand does not know what the right is doing.
;; If you want this to be the abstract comparator, and you "don't know"
that EQL - specializers are interned , then the comparator should be EQL .
;; But if you *do* know that they're interned, then why does this method
;; exist at all? The method on SPECIALIZER works fine.
(defmethod same-specializer-p ((specl1 eql-specializer)
(specl2 eql-specializer))
;; A bit of deception to confuse the enemy?
(eq (specializer-object specl1) (specializer-object specl2)))
(defmethod specializer-class ((specializer eql-specializer))
(class-of (slot-value specializer 'object)))
(defun specializer-class-or-nil (specializer)
(and (standard-specializer-p specializer)
(specializer-class specializer)))
(defun error-need-at-least-n-args (function n)
(%program-error "~@<The function ~2I~_~S ~I~_requires at least ~W ~
argument~:P.~:>"
function n))
(defun types-from-args (generic-function arguments &optional type-modifier)
(multiple-value-bind (nreq applyp metatypes nkeys arg-info)
(get-generic-fun-info generic-function)
(declare (ignore applyp metatypes nkeys))
(let ((types-rev nil))
(dotimes-fixnum (i nreq)
(unless arguments
(error-need-at-least-n-args (generic-function-name generic-function)
nreq))
(let ((arg (pop arguments)))
(push (if type-modifier `(,type-modifier ,arg) arg) types-rev)))
(values (nreverse types-rev) arg-info))))
(defun get-wrappers-from-classes (nkeys wrappers classes metatypes)
(let* ((w wrappers) (w-tail w) (mt-tail metatypes))
(dolist (class (ensure-list classes))
(unless (eq t (car mt-tail))
(let ((c-w (class-wrapper class)))
(unless c-w (return-from get-wrappers-from-classes nil))
(if (eql nkeys 1)
(setq w c-w)
(setf (car w-tail) c-w
w-tail (cdr w-tail)))))
(setq mt-tail (cdr mt-tail)))
w))
(defun sdfun-for-caching (gf classes)
(let ((types (mapcar #'class-eq-type classes)))
(multiple-value-bind (methods all-applicable-and-sorted-p)
(compute-applicable-methods-using-types gf types)
(let ((generator (get-secondary-dispatch-function1
gf methods types nil t all-applicable-and-sorted-p)))
(make-callable generator
nil (mapcar #'class-wrapper classes))))))
(defun value-for-caching (gf classes)
(let ((methods (compute-applicable-methods-using-types
gf (mapcar #'class-eq-type classes))))
(method-plist-value (car methods) :constant-value)))
(defun default-secondary-dispatch-function (generic-function)
(lambda (&rest args)
(let ((methods (compute-applicable-methods generic-function args)))
(if methods
(let ((emf (get-effective-method-function generic-function
methods)))
(invoke-emf emf args))
(call-no-applicable-method generic-function args)))))
(define-load-time-global *std-cam-methods* nil)
(defun compute-applicable-methods-emf (generic-function)
(if (eq **boot-state** 'complete)
(let* ((cam (gdefinition 'compute-applicable-methods))
(cam-methods (compute-applicable-methods-using-types
cam (list `(eql ,generic-function) t))))
(values (get-effective-method-function cam cam-methods)
(list-elts-eq cam-methods
(or *std-cam-methods*
(setq *std-cam-methods*
(compute-applicable-methods-using-types
cam (list `(eql ,cam) t)))))))
(values #'compute-applicable-methods-function t)))
(defun compute-applicable-methods-emf-std-p (gf)
(gf-info-c-a-m-emf-std-p (gf-arg-info gf)))
(defvar *old-c-a-m-gf-methods* nil)
(defun update-all-c-a-m-gf-info (c-a-m-gf)
(let ((methods (generic-function-methods c-a-m-gf)))
(if (and *old-c-a-m-gf-methods*
(every (lambda (old-method)
(member old-method methods :test #'eq))
*old-c-a-m-gf-methods*))
(let ((gfs-to-do nil)
(gf-classes-to-do nil))
(dolist (method methods)
(unless (member method *old-c-a-m-gf-methods* :test #'eq)
(let ((specl (car (method-specializers method))))
(if (eql-specializer-p specl)
(pushnew (specializer-object specl) gfs-to-do :test #'eq)
(pushnew (specializer-class specl) gf-classes-to-do :test #'eq)))))
(map-all-generic-functions
(lambda (gf)
(when (or (member gf gfs-to-do :test #'eq)
(dolist (class gf-classes-to-do nil)
(member class
(class-precedence-list (class-of gf))
:test #'eq)))
(update-c-a-m-gf-info gf)))))
(map-all-generic-functions #'update-c-a-m-gf-info))
(setq *old-c-a-m-gf-methods* methods)))
(defun update-gf-info (gf)
(update-c-a-m-gf-info gf)
(update-gf-simple-accessor-type gf))
(defun update-c-a-m-gf-info (gf)
(unless (early-gf-p gf)
(multiple-value-bind (c-a-m-emf std-p)
(compute-applicable-methods-emf gf)
(let ((arg-info (gf-arg-info gf)))
(setf (gf-info-static-c-a-m-emf arg-info) c-a-m-emf)
(setf (gf-info-c-a-m-emf-std-p arg-info) std-p)))))
(defun update-gf-simple-accessor-type (gf)
(let ((arg-info (gf-arg-info gf)))
(setf (gf-info-simple-accessor-type arg-info)
(let* ((methods (generic-function-methods gf))
(class (and methods (class-of (car methods))))
(type
(and class
(cond ((or (eq class *the-class-standard-reader-method*)
(eq class *the-class-global-reader-method*))
'reader)
((or (eq class *the-class-standard-writer-method*)
(eq class *the-class-global-writer-method*))
'writer)
((eq class *the-class-global-boundp-method*)
'boundp)
((eq class *the-class-global-makunbound-method*)
'makunbound)))))
(when (and (gf-info-c-a-m-emf-std-p arg-info)
type
(dolist (method (cdr methods) t)
(unless (eq class (class-of method)) (return nil)))
(eq (generic-function-method-combination gf)
*standard-method-combination*))
type)))))
CMUCL ( Gerd 's PCL , 2002 - 04 - 25 ) comment :
;;;
Return two values . First value is a function to be stored in
effective slot definition SLOTD for reading it with
SLOT - VALUE - USING - CLASS , setting it with ( SETF
SLOT - VALUE - USING - CLASS ) , testing it with SLOT - BOUNDP - USING - CLASS ,
or making it unbound with SLOT - MAKUNBOUND - USING - CLASS . GF is one
;;; of these generic functions, TYPE is one of the symbols READER,
WRITER , BOUNDP , MAKUNBOUND . CLASS is SLOTD 's class .
;;;
Second value is true if the function returned is one of the
;;; optimized standard functions for the purpose, which are used
;;; when only standard methods are applicable.
;;;
;;; FIXME: Change all these wacky function names to something sane.
(defun get-accessor-method-function (gf type class slotd)
(let* ((std-method (standard-svuc-method type))
(str-method (structure-svuc-method type))
(types1 `((eql ,class) (class-eq ,class) (eql ,slotd)))
(types (if (eq type 'writer) `(t ,@types1) types1))
(methods (compute-applicable-methods-using-types gf types))
(std-p (null (cdr methods))))
(values
(if std-p
(get-optimized-std-accessor-method-function class slotd type)
(let* ((optimized-std-fun
(get-optimized-std-slot-value-using-class-method-function
class slotd type))
(method-alist
`((,(car (or (member std-method methods :test #'eq)
(member str-method methods :test #'eq)
(bug "error in ~S"
'get-accessor-method-function)))
,optimized-std-fun)))
(wrappers
(let ((wrappers (list (wrapper-of class)
(class-wrapper class)
(wrapper-of slotd))))
(if (eq type 'writer)
(cons (class-wrapper *the-class-t*) wrappers)
wrappers)))
(sdfun (get-secondary-dispatch-function
gf methods types method-alist wrappers)))
(get-accessor-from-svuc-method-function class slotd sdfun type)))
std-p)))
;;; used by OPTIMIZE-SLOT-VALUE-BY-CLASS-P (vector.lisp)
(defun update-slot-value-gf-info (gf type)
(unless *new-class*
(update-std-or-str-methods gf type))
(when (and (standard-svuc-method type) (structure-svuc-method type))
(flet ((update-accessor-info (class)
(when (class-finalized-p class)
(dolist (slotd (class-slots class))
(compute-slot-accessor-info slotd type gf)))))
(if *new-class*
(update-accessor-info *new-class*)
(map-all-classes #'update-accessor-info 'slot-object)))))
(define-load-time-global *standard-slot-value-using-class-method* nil)
(define-load-time-global *standard-setf-slot-value-using-class-method* nil)
(define-load-time-global *standard-slot-boundp-using-class-method* nil)
(define-load-time-global *standard-slot-makunbound-using-class-method* nil)
(define-load-time-global *condition-slot-value-using-class-method* nil)
(define-load-time-global *condition-setf-slot-value-using-class-method* nil)
(define-load-time-global *condition-slot-boundp-using-class-method* nil)
(define-load-time-global *condition-slot-makunbound-using-class-method* nil)
(define-load-time-global *structure-slot-value-using-class-method* nil)
(define-load-time-global *structure-setf-slot-value-using-class-method* nil)
(define-load-time-global *structure-slot-boundp-using-class-method* nil)
(define-load-time-global *structure-slot-makunbound-using-class-method* nil)
(defun standard-svuc-method (type)
(case type
(reader *standard-slot-value-using-class-method*)
(writer *standard-setf-slot-value-using-class-method*)
(boundp *standard-slot-boundp-using-class-method*)
(makunbound *standard-slot-makunbound-using-class-method*)))
(defun set-standard-svuc-method (type method)
(case type
(reader (setq *standard-slot-value-using-class-method* method))
(writer (setq *standard-setf-slot-value-using-class-method* method))
(boundp (setq *standard-slot-boundp-using-class-method* method))
(makunbound (setq *standard-slot-makunbound-using-class-method* method))))
(defun condition-svuc-method (type)
(case type
(reader *condition-slot-value-using-class-method*)
(writer *condition-setf-slot-value-using-class-method*)
(boundp *condition-slot-boundp-using-class-method*)
(makunbound *condition-slot-makunbound-using-class-method*)))
(defun set-condition-svuc-method (type method)
(case type
(reader (setq *condition-slot-value-using-class-method* method))
(writer (setq *condition-setf-slot-value-using-class-method* method))
(boundp (setq *condition-slot-boundp-using-class-method* method))
(makunbound (setq *condition-slot-makunbound-using-class-method* method))))
(defun structure-svuc-method (type)
(case type
(reader *structure-slot-value-using-class-method*)
(writer *structure-setf-slot-value-using-class-method*)
(boundp *structure-slot-boundp-using-class-method*)
(makunbound *standard-slot-makunbound-using-class-method*)))
(defun set-structure-svuc-method (type method)
(case type
(reader (setq *structure-slot-value-using-class-method* method))
(writer (setq *structure-setf-slot-value-using-class-method* method))
(boundp (setq *structure-slot-boundp-using-class-method* method))
(makunbound (setq *structure-slot-makunbound-using-class-method* method))))
(defun update-std-or-str-methods (gf type)
(dolist (method (generic-function-methods gf))
(let ((specls (method-specializers method)))
(when (and (or (not (eq type 'writer))
(eq (pop specls) *the-class-t*))
(every #'classp specls))
(cond ((and (eq (class-name (car specls)) 'std-class)
(eq (class-name (cadr specls)) 'standard-object)
(eq (class-name (caddr specls))
'standard-effective-slot-definition))
(set-standard-svuc-method type method))
((and (eq (class-name (car specls)) 'condition-class)
(eq (class-name (cadr specls)) 'condition)
(eq (class-name (caddr specls))
'condition-effective-slot-definition))
(set-condition-svuc-method type method))
((and (eq (class-name (car specls)) 'structure-class)
(eq (class-name (cadr specls)) 'structure-object)
(eq (class-name (caddr specls))
'structure-effective-slot-definition))
(set-structure-svuc-method type method)))))))
(defun mec-all-classes-internal (spec precompute-p)
(let ((wrapper (class-wrapper (specializer-class spec))))
(unless (or (not wrapper) (invalid-wrapper-p wrapper))
(cons (specializer-class spec)
(and (classp spec)
precompute-p
(not (or (eq spec *the-class-t*)
(eq spec *the-class-slot-object*)
(eq spec *the-class-standard-object*)
(eq spec *the-class-structure-object*)))
(let ((sc (class-direct-subclasses spec)))
(when sc
(mapcan (lambda (class)
(mec-all-classes-internal class precompute-p))
sc))))))))
(defun mec-all-classes (spec precompute-p)
(let ((classes (mec-all-classes-internal spec precompute-p)))
(if (null (cdr classes))
classes
(let* ((a-classes (cons nil classes))
(tail classes))
(loop (when (null (cdr tail))
(return (cdr a-classes)))
(let ((class (cadr tail))
(ttail (cddr tail)))
(if (dolist (c ttail nil)
(when (eq class c) (return t)))
(setf (cdr tail) (cddr tail))
(setf tail (cdr tail)))))))))
(defun mec-all-class-lists (spec-list precompute-p)
(if (null spec-list)
(list nil)
(let* ((car-all-classes (mec-all-classes (car spec-list)
precompute-p))
(all-class-lists (mec-all-class-lists (cdr spec-list)
precompute-p)))
(mapcan (lambda (list)
(mapcar (lambda (c) (cons c list)) car-all-classes))
all-class-lists))))
(defun make-emf-cache (generic-function valuep cache classes-list new-class)
(let* ((arg-info (gf-arg-info generic-function))
(nkeys (arg-info-nkeys arg-info))
(metatypes (arg-info-metatypes arg-info))
(wrappers (unless (eq nkeys 1) (make-list nkeys)))
(precompute-p (gf-precompute-dfun-and-emf-p arg-info)))
(flet ((add-class-list (classes)
(when (or (null new-class) (memq new-class classes))
(let ((%wrappers (get-wrappers-from-classes
nkeys wrappers classes metatypes)))
(when (and %wrappers (not (probe-cache cache %wrappers)))
(let ((value (cond ((eq valuep t)
(sdfun-for-caching generic-function
classes))
((eq valuep :constant-value)
(value-for-caching generic-function
classes)))))
;; need to get them again, as finalization might
;; have happened in between, which would
;; invalidate wrappers.
(let ((wrappers (get-wrappers-from-classes
nkeys wrappers classes metatypes)))
(when (if (atom wrappers)
(not (invalid-wrapper-p wrappers))
(every (complement #'invalid-wrapper-p)
wrappers))
(setq cache (fill-cache cache wrappers value))))))))))
(if classes-list
(mapc #'add-class-list classes-list)
(dolist (method (generic-function-methods generic-function))
(mapc #'add-class-list
(mec-all-class-lists (method-specializers method)
precompute-p))))
cache)))
(defmacro class-test (arg class)
(cond
((eq class *the-class-t*) t)
((eq class *the-class-standard-object*)
`(or (std-instance-p ,arg) (fsc-instance-p ,arg)))
((eq class *the-class-funcallable-standard-object*)
`(fsc-instance-p ,arg))
;; This is going to be cached (in *fgens*),
;; and structure type tests do not check for invalid layout.
;; Cache the wrapper itself, which is going to be different after
;; redifinition.
((structure-class-p class)
`(sb-c::%instance-typep ,arg ,(class-wrapper class)))
(t
`(typep ,arg ',(class-name class)))))
(defmacro class-eq-test (arg class)
`(eq (class-of ,arg) ',class))
(defun dnet-methods-p (form)
(and (consp form)
(or (eq (car form) 'methods)
(eq (car form) 'unordered-methods))))
This is CASE , but without .
(defmacro scase (arg &rest clauses)
`(let ((.case-arg. ,arg))
(cond ,@(mapcar (lambda (clause)
(list* (cond ((null (car clause))
nil)
((consp (car clause))
(if (null (cdar clause))
`(eql .case-arg.
',(caar clause))
`(member .case-arg.
',(car clause))))
((member (car clause) '(t otherwise))
`t)
(t
`(eql .case-arg. ',(car clause))))
nil
(cdr clause)))
clauses))))
(defmacro mcase (arg &rest clauses) `(scase ,arg ,@clauses))
(defun generate-discrimination-net (generic-function methods types sorted-p)
(let* ((arg-info (gf-arg-info generic-function))
(c-a-m-emf-std-p (gf-info-c-a-m-emf-std-p arg-info))
(precedence (arg-info-precedence arg-info)))
(generate-discrimination-net-internal
generic-function methods types
(lambda (methods known-types)
(if (or sorted-p
(and c-a-m-emf-std-p
(block one-order-p
(let ((sorted-methods nil))
(map-all-orders
(copy-list methods) precedence
(lambda (methods)
(when sorted-methods (return-from one-order-p nil))
(setq sorted-methods methods)))
(setq methods sorted-methods))
t)))
`(methods ,methods ,known-types)
`(unordered-methods ,methods ,known-types)))
(lambda (position type true-value false-value)
(let ((arg (dfun-arg-symbol position)))
(if (eq (car type) 'eql)
(let* ((false-case-p (and (consp false-value)
(or (eq (car false-value) 'scase)
(eq (car false-value) 'mcase))
(eq arg (cadr false-value))))
(false-clauses (if false-case-p
(cddr false-value)
`((t ,false-value))))
(case-sym (if (and (dnet-methods-p true-value)
(if false-case-p
(eq (car false-value) 'mcase)
(dnet-methods-p false-value)))
'mcase
'scase))
(type-sym `(,(cadr type))))
`(,case-sym ,arg
(,type-sym ,true-value)
,@false-clauses))
`(if ,(let ((arg (dfun-arg-symbol position)))
(case (car type)
(class `(class-test ,arg ,(cadr type)))
(class-eq `(class-eq-test ,arg ,(cadr type)))))
,true-value
,false-value))))
#'identity)))
(defun class-from-type (type)
(if (or (atom type) (eq (car type) t))
*the-class-t*
(case (car type)
(and (dolist (type (cdr type) *the-class-t*)
(when (and (consp type) (not (eq (car type) 'not)))
(return (class-from-type type)))))
(not *the-class-t*)
(eql (class-of (cadr type)))
(class-eq (cadr type))
(class (cadr type)))))
;;; We know that known-type implies neither new-type nor `(not ,new-type).
(defun augment-type (new-type known-type)
(if (or (eq known-type t)
(eq (car new-type) 'eql))
new-type
(let ((so-far (if (and (consp known-type) (eq (car known-type) 'and))
(cdr known-type)
(list known-type))))
(unless (eq (car new-type) 'not)
(setq so-far
(mapcan (lambda (type)
(unless (*subtypep new-type type)
(list type)))
so-far)))
(if (null so-far)
new-type
`(and ,new-type ,@so-far)))))
(defun generate-discrimination-net-internal
(gf methods types methods-function test-fun type-function)
(let* ((arg-info (gf-arg-info gf))
(precedence (arg-info-precedence arg-info))
(nreq (arg-info-number-required arg-info))
(metatypes (arg-info-metatypes arg-info)))
(labels ((do-column (p-tail contenders known-types)
(if p-tail
(let* ((position (car p-tail))
(known-type (or (nth position types) t)))
(if (eq (nth position metatypes) t)
(do-column (cdr p-tail) contenders
(cons (cons position known-type)
known-types))
(do-methods p-tail contenders
known-type () known-types)))
(funcall methods-function contenders
(let ((k-t (make-list nreq)))
(dolist (index+type known-types)
(setf (nth (car index+type) k-t)
(cdr index+type)))
k-t))))
(do-methods (p-tail contenders known-type winners known-types)
;; CONTENDERS
;; is a (sorted) list of methods that must be discriminated.
;; KNOWN-TYPE
;; is the type of this argument, constructed from tests
;; already made.
;; WINNERS
;; is a (sorted) list of methods that are potentially
;; applicable after the discrimination has been made.
(if (null contenders)
(do-column (cdr p-tail)
winners
(cons (cons (car p-tail) known-type)
known-types))
(let* ((position (car p-tail))
(method (car contenders))
(specl (nth position (method-specializers method)))
(type (funcall type-function
(type-from-specializer specl))))
(multiple-value-bind (app-p maybe-app-p)
(specializer-applicable-using-type-p type known-type)
(flet ((determined-to-be (truth-value)
(if truth-value app-p (not maybe-app-p)))
(do-if (truth &optional implied)
(let ((ntype (if truth type `(not ,type))))
(do-methods p-tail
(cdr contenders)
(if implied
known-type
(augment-type ntype known-type))
(if truth
(append winners `(,method))
winners)
known-types))))
(cond ((determined-to-be nil) (do-if nil t))
((determined-to-be t) (do-if t t))
(t (funcall test-fun position type
(do-if t) (do-if nil))))))))))
(do-column precedence methods ()))))
(defun compute-secondary-dispatch-function (generic-function net &optional
method-alist wrappers)
(funcall (the function (compute-secondary-dispatch-function1 generic-function net))
method-alist wrappers))
(defvar *eq-case-table-limit* 15)
(defvar *case-table-limit* 10)
(defun compute-mcase-parameters (case-list)
(unless (eq t (caar (last case-list)))
(error "The key for the last case arg to mcase was not T"))
(let* ((eq-p (dolist (case case-list t)
(unless (or (eq (car case) t)
(symbolp (caar case)))
(return nil))))
(len (1- (length case-list)))
(type (cond ((= len 1)
:simple)
((<= len
(if eq-p
*eq-case-table-limit*
*case-table-limit*))
:assoc)
(t
:hash-table))))
(list eq-p type)))
(defmacro mlookup (key info default &optional eq-p type)
(unless (or (eq eq-p t) (null eq-p))
(bug "Invalid eq-p argument: ~S" eq-p))
(ecase type
(:simple
`(if (locally
(declare (optimize (inhibit-warnings 3)))
(,(if eq-p 'eq 'eql) ,key (car ,info)))
(cdr ,info)
,default))
(:assoc
`(dolist (e ,info ,default)
(when (locally
(declare (optimize (inhibit-warnings 3)))
(,(if eq-p 'eq 'eql) (car e) ,key))
(return (cdr e)))))
(:hash-table
`(gethash ,key ,info ,default))))
(defun net-test-converter (form)
(if (atom form)
(default-test-converter form)
(case (car form)
((invoke-effective-method-function invoke-fast-method-call
invoke-effective-narrow-method-function)
'.call.)
(methods
'.methods.)
(unordered-methods
'.umethods.)
(mcase
`(mlookup ,(cadr form)
nil
nil
,@(compute-mcase-parameters (cddr form))))
(t (default-test-converter form)))))
(defun net-code-converter (form)
(if (atom form)
(default-code-converter form)
(case (car form)
((methods unordered-methods)
(let ((gensym (gensym)))
(values gensym
(list gensym))))
(mcase
(let ((mp (compute-mcase-parameters (cddr form)))
(gensym (gensym)) (default (gensym)))
(values `(mlookup ,(cadr form) ,gensym ,default ,@mp)
(list gensym default))))
(t
(default-code-converter form)))))
(defun net-constant-converter (form generic-function)
(or (let ((c (methods-converter form generic-function)))
(when c (list c)))
(if (atom form)
(default-constant-converter form)
(case (car form)
(mcase
(let* ((mp (compute-mcase-parameters (cddr form)))
(list (mapcar (lambda (clause)
(let ((key (car clause))
(meth (cadr clause)))
(cons (if (consp key) (car key) key)
(methods-converter
meth generic-function))))
(cddr form)))
(default (car (last list))))
(list (list* :mcase mp (nbutlast list))
(cdr default))))
(t
(default-constant-converter form))))))
(defun methods-converter (form generic-function)
(cond ((and (consp form) (eq (car form) 'methods))
(cons '.methods.
(get-effective-method-function1 generic-function (cadr form))))
((and (consp form) (eq (car form) 'unordered-methods))
(default-secondary-dispatch-function generic-function))))
(defun convert-methods (constant method-alist wrappers)
(if (and (consp constant)
(eq (car constant) '.methods.))
(funcall (cdr constant) method-alist wrappers)
constant))
(defun convert-table (constant method-alist wrappers)
(cond ((and (consp constant)
(eq (car constant) :mcase))
(let ((alist (mapcar (lambda (k+m)
(cons (car k+m)
(convert-methods (cdr k+m)
method-alist
wrappers)))
(cddr constant)))
(mp (cadr constant)))
(ecase (cadr mp)
(:simple
(car alist))
(:assoc
alist)
(:hash-table
(let ((table (make-hash-table :test (if (car mp) 'eq 'eql))))
(dolist (k+m alist)
(setf (gethash (car k+m) table) (cdr k+m)))
table)))))))
(defun compute-secondary-dispatch-function1 (generic-function net
&optional function-p)
(cond
((and (eq (car net) 'methods) (not function-p))
(get-effective-method-function1 generic-function (cadr net)))
(t
(let* ((name (generic-function-name generic-function))
(arg-info (gf-arg-info generic-function))
(metatypes (arg-info-metatypes arg-info))
(nargs (length metatypes))
(applyp (arg-info-applyp arg-info))
(fmc-arg-info (cons nargs applyp))
(arglist (if function-p
(make-dfun-lambda-list nargs applyp)
(make-fast-method-call-lambda-list nargs applyp))))
(multiple-value-bind (cfunction constants)
We do n't want NAMED - LAMBDA for any expressions handed to FNGEN ,
;; because name mismatches will render the hashing ineffective.
(get-fun `(lambda ,arglist
(declare (optimize (sb-c::store-closure-debug-pointer 3)))
,@(unless function-p
`((declare (ignore .pv. .next-method-call.))))
(locally (declare #.*optimize-speed*)
(let ((emf ,net))
,(make-emf-call nargs applyp 'emf))))
#'net-test-converter
#'net-code-converter
(lambda (form)
(net-constant-converter form generic-function)))
(lambda (method-alist wrappers)
(let* ((alist (list nil))
(alist-tail alist))
(dolist (constant constants)
(let* ((a (or (dolist (a alist nil)
(when (eq (car a) constant)
(return a)))
(cons constant
(or (convert-table
constant method-alist wrappers)
(convert-methods
constant method-alist wrappers)))))
(new (list a)))
(setf (cdr alist-tail) new)
(setf alist-tail new)))
(let ((function (apply cfunction (mapcar #'cdr (cdr alist)))))
(if function-p
(set-fun-name function `(gf-dispatch ,name))
(make-fast-method-call
:function (set-fun-name function `(sdfun-method ,name))
:arg-info fmc-arg-info))))))))))
(defvar *show-make-unordered-methods-emf-calls* nil)
(defun make-unordered-methods-emf (generic-function methods)
(when *show-make-unordered-methods-emf-calls*
(format t "~&make-unordered-methods-emf ~S~%"
(generic-function-name generic-function)))
(lambda (&rest args)
(let* ((types (types-from-args generic-function args 'eql))
(smethods (sort-applicable-methods generic-function
methods
types))
(emf (get-effective-method-function generic-function smethods)))
(invoke-emf emf args))))
;;; The value returned by compute-discriminating-function is a function
;;; object. It is called a discriminating function because it is called
;;; when the generic function is called and its role is to discriminate
;;; on the arguments to the generic function and then call appropriate
;;; method functions.
;;;
;;; A discriminating function can only be called when it is installed as
;;; the funcallable instance function of the generic function for which
;;; it was computed.
;;;
;;; More precisely, if compute-discriminating-function is called with
;;; an argument <gf1>, and returns a result <df1>, that result must
;;; not be passed to apply or funcall directly. Rather, <df1> must be
;;; stored as the funcallable instance function of the same generic
function < gf1 > ( using SET - FUNCALLABLE - INSTANCE - FUNCTION ) . Then the
;;; generic function can be passed to funcall or apply.
;;;
;;; An important exception is that methods on this generic function are
;;; permitted to return a function which itself ends up calling the value
;;; returned by a more specific method. This kind of `encapsulation' of
discriminating function is critical to many uses of the MOP .
;;;
;;; As an example, the following canonical case is legal:
;;;
;;; (defmethod compute-discriminating-function ((gf my-generic-function))
;;; (let ((std (call-next-method)))
;;; (lambda (arg)
;;; (print (list 'call-to-gf gf arg))
;;; (funcall std arg))))
;;;
;;; Because many discriminating functions would like to use a dynamic
;;; strategy in which the precise discriminating function changes with
;;; time it is important to specify how a discriminating function is
;;; permitted itself to change the funcallable instance function of the
;;; generic function.
;;;
;;; Discriminating functions may set the funcallable instance function
;;; of the generic function, but the new value must be generated by making
;;; a call to COMPUTE-DISCRIMINATING-FUNCTION. This is to ensure that any
;;; more specific methods which may have encapsulated the discriminating
;;; function will get a chance to encapsulate the new, inner discriminating
;;; function.
;;;
;;; This implies that if a discriminating function wants to modify itself
it should first store some information in the generic function proper ,
;;; and then call compute-discriminating-function. The appropriate method
;;; on compute-discriminating-function will see the information stored in
;;; the generic function and generate a discriminating function accordingly.
;;;
;;; The following is an example of a discriminating function which modifies
;;; itself in accordance with this protocol:
;;;
;;; (defmethod compute-discriminating-function ((gf my-generic-function))
;;; (lambda (arg)
;;; (cond (<some condition>
;;; <store some info in the generic function>
;;; (set-funcallable-instance-function
;;; gf
;;; (compute-discriminating-function gf))
;;; (funcall gf arg))
;;; (t
;;; <call-a-method-of-gf>))))
;;;
;;; Whereas this code would not be legal:
;;;
;;; (defmethod compute-discriminating-function ((gf my-generic-function))
;;; (lambda (arg)
;;; (cond (<some condition>
;;; (set-funcallable-instance-function
;;; gf
;;; (lambda (a) ..))
;;; (funcall gf arg))
;;; (t
;;; <call-a-method-of-gf>))))
;;;
;;; NOTE: All the examples above assume that all instances of the class
my - generic - function accept only one argument .
(defun slot-value-using-class-dfun (class object slotd)
(declare (ignore class))
(funcall (slot-info-reader (slot-definition-info slotd)) object))
(defun setf-slot-value-using-class-dfun (new-value class object slotd)
(declare (ignore class))
(funcall (slot-info-writer (slot-definition-info slotd)) new-value object))
(defun slot-boundp-using-class-dfun (class object slotd)
(declare (ignore class))
(funcall (slot-info-boundp (slot-definition-info slotd)) object))
(defun slot-makunbound-using-class-dfun (class object slotd)
(declare (ignore class))
(funcall (slot-info-makunbound (slot-definition-info slotd)) object))
(defun special-case-for-compute-discriminating-function-p (gf)
(or (eq gf #'slot-value-using-class)
(eq gf #'(setf slot-value-using-class))
(eq gf #'slot-boundp-using-class)
(eq gf #'slot-makunbound-using-class)))
;;; this is the normal function for computing the discriminating
;;; function of a standard-generic-function
(let (initial-print-object-cache)
(defun standard-compute-discriminating-function (gf)
(declare (notinline slot-value))
(let ((dfun-state (slot-value gf 'dfun-state)))
(when (special-case-for-compute-discriminating-function-p gf)
;; if we have a special case for
COMPUTE - DISCRIMINATING - FUNCTION , then ( at least for the
special cases implemented as of 2006 - 05 - 09 ) any information
;; in the cache is misplaced.
(aver (null dfun-state)))
(typecase dfun-state
(null
(when (eq gf (load-time-value #'compute-applicable-methods t))
(update-all-c-a-m-gf-info gf))
(cond ((eq gf (load-time-value #'slot-value-using-class t))
(update-slot-value-gf-info gf 'reader)
#'slot-value-using-class-dfun)
((eq gf (load-time-value #'(setf slot-value-using-class) t))
(update-slot-value-gf-info gf 'writer)
#'setf-slot-value-using-class-dfun)
((eq gf (load-time-value #'slot-boundp-using-class t))
(update-slot-value-gf-info gf 'boundp)
#'slot-boundp-using-class-dfun)
((eq gf (load-time-value #'slot-makunbound-using-class t))
(update-slot-value-gf-info gf 'makunbound)
#'slot-makunbound-using-class-dfun)
: PRINT - OBJECT is not a special - case in the sense
;; of having a desperately special discriminating function.
;; However, it is important that the machinery for printing
;; conditions for stack and heap exhaustion, and the
;; restarts offered by the debugger, work without consuming
many extra resources . -- CSR , 2008 - 06 - 09
((eq gf (locally (declare (optimize (safety 0))) #'print-object))
(let ((nkeys (nth-value 3 (get-generic-fun-info gf))))
(cond ((/= nkeys 1)
: someone has defined a method
specialized on the second argument : punt .
(setf initial-print-object-cache nil)
(make-initial-dfun gf))
(initial-print-object-cache
(multiple-value-bind (dfun cache info)
(make-caching-dfun gf (copy-cache initial-print-object-cache))
(set-dfun gf dfun cache info)))
;; the relevant PRINT-OBJECT methods get defined
late , by delayed DEFMETHOD . We must n't cache
;; the effective method for our classes earlier
;; than the relevant PRINT-OBJECT methods are
;; defined...
((boundp '*!delayed-defmethod-args*)
(make-initial-dfun gf))
(t (multiple-value-bind (dfun cache info)
(make-final-dfun-internal
gf
(mapcar (lambda (x) (list (find-class x)))
'(sb-kernel::control-stack-exhausted
sb-kernel::binding-stack-exhausted
sb-kernel::alien-stack-exhausted
sb-kernel::heap-exhausted-error
restart)))
(setq initial-print-object-cache cache)
(set-dfun gf dfun (copy-cache cache) info))))))
((gf-precompute-dfun-and-emf-p (slot-value gf 'arg-info))
(make-final-dfun gf))
(t
(make-initial-dfun gf))))
(function dfun-state)
(cons (car dfun-state))))))
in general we need to support SBCL 's encapsulation for generic
;;; functions: the default implementation of encapsulation changes the
;;; identity of the function bound to a name, which breaks anything
;;; class-based, so we implement the encapsulation ourselves in the
;;; discriminating function.
(defun sb-impl::encapsulate-generic-function (gf type function)
(push (cons type function) (generic-function-encapsulations gf))
(reinitialize-instance gf))
(defun sb-impl::unencapsulate-generic-function (gf type)
(setf (generic-function-encapsulations gf)
(remove type (generic-function-encapsulations gf)
:key #'car :count 1))
(reinitialize-instance gf))
(defun sb-impl::encapsulated-generic-function-p (gf type)
(position type (generic-function-encapsulations gf) :key #'car))
(defun maybe-encapsulate-discriminating-function (gf encs std)
(if (null encs)
std
(let ((inner (maybe-encapsulate-discriminating-function
gf (cdr encs) std))
(function (cdar encs)))
(lambda (&rest args)
(apply function inner args)))))
(defmethod compute-discriminating-function ((gf standard-generic-function))
(standard-compute-discriminating-function gf))
(defmethod compute-discriminating-function :around ((gf standard-generic-function))
(maybe-encapsulate-discriminating-function
gf (generic-function-encapsulations gf) (call-next-method)))
(defmethod (setf class-name) (new-value class)
(let ((classoid (wrapper-classoid (class-wrapper class))))
(if (and new-value (symbolp new-value))
(setf (classoid-name classoid) new-value)
(setf (classoid-name classoid) nil)))
(reinitialize-instance class :name new-value)
new-value)
(defmethod (setf generic-function-name) (new-value generic-function)
(reinitialize-instance generic-function :name new-value)
new-value)
(defmethod function-keywords ((method standard-method))
(multiple-value-bind (llks nreq nopt keywords)
(analyze-lambda-list (if (consp method)
(early-method-lambda-list method)
(method-lambda-list method)))
(declare (ignore nreq nopt))
(values keywords (ll-kwds-allowp llks))))
;;; This is based on the rules of method lambda list congruency
;;; defined in the spec. The lambda list it constructs is the pretty
;;; union of the lambda lists of the generic function and of all its
;;; methods. It doesn't take method applicability into account; we
;;; also ignore non-public parts of the interface (e.g. &AUX, default
;;; and supplied-p parameters)
;;; The compiler uses this for type-checking that callers pass acceptable
;;; keywords, so don't make this do anything fancy like looking at effective
;;; methods without also fixing the compiler.
(defmethod generic-function-pretty-arglist ((gf standard-generic-function) &optional methods-in-compilation-unit)
(let ((gf-lambda-list (generic-function-lambda-list gf))
(methods (generic-function-methods gf)))
(flet ((lambda-list (m)
(or (and methods-in-compilation-unit
(gethash (cons (method-qualifiers m)
(unparse-specializers gf (method-specializers m)))
methods-in-compilation-unit))
(method-lambda-list m)))
(canonize (k)
(multiple-value-bind (kw var)
(parse-key-arg-spec k)
(if (and (eql (symbol-package kw) *keyword-package*)
(string= kw var))
var
(list (list kw var))))))
(multiple-value-bind (llks required optional rest keys)
(parse-lambda-list gf-lambda-list :silent t)
(if (or (ll-kwds-keyp llks)
(ll-kwds-restp llks))
(collect ((keys (mapcar #'canonize keys)))
;; Possibly extend the keyword parameters of the gf by
;; additional key parameters of its methods:
(flet ((process (lambda-list)
(binding* (((m.llks nil nil nil m.keys)
(parse-lambda-list lambda-list :silent t)))
(setq llks (logior llks m.llks))
(dolist (k m.keys)
(unless (member (parse-key-arg-spec k) (keys)
:key #'parse-key-arg-spec :test #'eq)
(keys (canonize k)))))))
(dolist (m methods)
(process (lambda-list m))))
(make-lambda-list llks nil required optional rest (keys)))
(make-lambda-list llks nil required optional))))))
(defun gf-merge-arglists (methods-in-compilation-unit)
(flet ((canonize (k)
(multiple-value-bind (kw var)
(parse-key-arg-spec k)
(if (and (eql (symbol-package kw) *keyword-package*)
(string= kw var))
var
(list (list kw var))))))
(with-hash-table-iterator (iterator methods-in-compilation-unit)
(multiple-value-bind (llks required optional rest keys)
(parse-lambda-list (nth-value 2 (iterator)) :silent t)
(if (or (ll-kwds-keyp llks)
(ll-kwds-restp llks))
(collect ((keys (mapcar #'canonize keys)))
;; Possibly extend the keyword parameters of the gf by
;; additional key parameters of its methods:
(flet ((process (lambda-list)
(binding* (((m.llks nil nil nil m.keys)
(parse-lambda-list lambda-list :silent t)))
(setq llks (logior llks m.llks))
(dolist (k m.keys)
(unless (member (parse-key-arg-spec k) (keys)
:key #'parse-key-arg-spec :test #'eq)
(keys (canonize k)))))))
(loop
(multiple-value-bind (more key value) (iterator)
(declare (ignore key))
(unless more
(return))
(process value)))
(make-lambda-list llks nil required optional rest (keys))))
(make-lambda-list llks nil required optional))))))
| null | https://raw.githubusercontent.com/sbcl/sbcl/63b95f9e7d9c7fbb02da834dc16edfe8eae24e6a/src/pcl/methods.lisp | lisp | more information.
Corporation. Copyright and release statements follow. Later modifications
to the software are in the public domain and are provided with
absolutely no warranty. See the COPYING and CREDITS files for more
information.
All rights reserved.
Use and copying of this software and preparation of derivative works based
upon this software are permitted. Any distribution of this software or
control laws.
warranty about the software, its performance or its conformity to any
specification.
methods
Methods themselves are simple inanimate objects. Most properties of
methods are immutable, methods cannot be reinitialized. The following
properties of methods can be changed:
METHOD-GENERIC-FUNCTION
initialization
Error checking is done in before methods. Because of the simplicity of
standard method objects the standard primary method can fill the slots.
Methods are not reinitializable.
NEW being a subclass of method is dealt with in the general
method of CHANGE-CLASS
specializing on classes which are "not defined", leaving
unclear what the definedness of a class is; AMOP suggests that
forward-referenced-classes, since they have proper names and
all, are at least worthy of some level of definition. We allow
methods specialized on forward-referenced-classes, but it's
non-portable and potentially dubious, so
~1{~S and ~S~}~:;~{~#[~;and ~]~S~^ , ~}~ ] ~
specializers~].~@ :> "
this extra paranoia and nothing else does; either everything
should be aggressively checking initargs, or nothing much should.
In either case, it would probably be better to have :type
declarations in slots, which would then give a suitable type
error (if we implement type-checking for slots...) rather than
this hand-crafted thing.
FIXME: Destructive modification of &REST list.
Since we internally bypass FIND-METHOD by using GET-METHOD
instead we need to do this here or users may get hit by a
was asked to ~
~:*qualifiers ~:S~] and specializers ~
ANSI about FIND-METHOD: "The specializers argument contains the
parameter specializers for the method. It must correspond in
length to the number of required arguments of the generic
function, or an error is signaled."
This error checking is done by REAL-GET-METHOD.
ANSI for FIND-METHOD seems to imply that in fact specializers
should always be passed in parsed form instead of being parsed
at this point. Since there's no ANSI-blessed way of getting an
part of the ANSI standard. Parsing introduces a tiny semantic problem in
That one must be supplied as a pre-parsed #<EQL-SPECIALIZER> because if
not, we'd parse it into a specializer whose object is :X.
Compute various information about a generic-function's arglist by looking
at the argument lists of the methods. The hair for trying not to use
&REST arguments lives here.
The values returned are:
number-of-required-arguments
the number of required arguments to this generic-function's
discriminating function
&rest-argument-p
whether or not this generic-function's discriminating
function takes an &rest argument.
specialized-argument-positions
a list of the positions of the arguments this generic-function
specializes (e.g. for a classical generic-function this is the
(declare (values number-of-required-arguments &rest-argument-p
specialized-argument-postions))
Go through this methods arguments seeing how many are required,
and whether there is an &rest argument.
Now go through this method's type specifiers to see which
argument positions are type specified. Treat T specially
in the usual sort of way. For efficiency don't bother to
keep specialized-argument-positions sorted, rather depend
on our caller to do that.
Finally merge the values for this method into the values
for the exisiting methods and return them. Note that if
and we depend on that.
FIXME: Because ARG-INFO is a STRUCTURE-OBJECT, it does not get
WAS: with-slots (arg-info) gf
comes from permutation vectors. Would be nicer to define
REAL-ADD-METHOD with a proper method so that we could efficiently
Optimization note: REAL-ADD-METHOD has a lot of O(N) stuff in it (as
many of the things we now keep in lists as either purely functional
O(log N) sets, or --if we don't mind the memory cost-- using
specialized hash-tables: most things are used to answer questions about
set-membership, not ordering.
Is there any reason not to do this as soon as possible?
While doing it with every ADD/REMOVE-METHOD call could result in
wasted work, it seems like unnecessary complexity.
I think it's just to get through bootstrap, probably,
but if it's a semantics thing, it deserves some explanation.
tautological ?
If the GF has &REST without &KEY then we don't augment
which seem not to be accepted.
It would be nice if globaldb were transactional,
so that either both updates or neither occur.
The defaulting expression for (:FUNCTION :TYPE) does not store
the default. For :GENERIC-FUNCTION that is not FBOUNDP we also
don't, however this branch should never be reached because the
info only stores :GENERIC-FUNCTION when methods are loaded.
it can't be added to another generic ~
System lock because interrupts need to be disabled as
well: it would be bad to unwind and leave the gf in an
inconsistent state.
If there is already a method like this one then we must get
rid of it before proceeding. Note that we call the generic
function REMOVE-METHOD to remove it rather than doing it in
some internal way.
the optimizing function here: it precomputes the effective
method, assuming that there is no dispatch to be done on
the new-value argument.
detecting attempts to add methods with incongruent lambda
it also depends on the new method already having been added
to the generic function. Therefore, we need to remove it
again on error:
KLUDGE II: ANSI saith that it is not an error to add a
method with invalid qualifiers to a generic function of the
wrong kind; it's only an error at generic function
invocation time; I dunno what the rationale was, and it
sucks. Nevertheless, it's probably a programmer error, so
System lock because interrupts need to be disabled as well:
it would be bad to unwind and leave the gf in an inconsistent
state.
superclass class
metaclass built-in-class
direct subclasses of t, but not array
or symbol
direct subclasses of t, but not
sequence
direct subclasses of number
direct subclasses of rational
direct subclasses of sequence
direct subclasses of list
direct subclasses of vector
FIXME: This method is wacky, and indicative of a coding style in which
metaphorically the left hand does not know what the right is doing.
If you want this to be the abstract comparator, and you "don't know"
But if you *do* know that they're interned, then why does this method
exist at all? The method on SPECIALIZER works fine.
A bit of deception to confuse the enemy?
of these generic functions, TYPE is one of the symbols READER,
optimized standard functions for the purpose, which are used
when only standard methods are applicable.
FIXME: Change all these wacky function names to something sane.
used by OPTIMIZE-SLOT-VALUE-BY-CLASS-P (vector.lisp)
need to get them again, as finalization might
have happened in between, which would
invalidate wrappers.
This is going to be cached (in *fgens*),
and structure type tests do not check for invalid layout.
Cache the wrapper itself, which is going to be different after
redifinition.
We know that known-type implies neither new-type nor `(not ,new-type).
CONTENDERS
is a (sorted) list of methods that must be discriminated.
KNOWN-TYPE
is the type of this argument, constructed from tests
already made.
WINNERS
is a (sorted) list of methods that are potentially
applicable after the discrimination has been made.
because name mismatches will render the hashing ineffective.
The value returned by compute-discriminating-function is a function
object. It is called a discriminating function because it is called
when the generic function is called and its role is to discriminate
on the arguments to the generic function and then call appropriate
method functions.
A discriminating function can only be called when it is installed as
the funcallable instance function of the generic function for which
it was computed.
More precisely, if compute-discriminating-function is called with
an argument <gf1>, and returns a result <df1>, that result must
not be passed to apply or funcall directly. Rather, <df1> must be
stored as the funcallable instance function of the same generic
generic function can be passed to funcall or apply.
An important exception is that methods on this generic function are
permitted to return a function which itself ends up calling the value
returned by a more specific method. This kind of `encapsulation' of
As an example, the following canonical case is legal:
(defmethod compute-discriminating-function ((gf my-generic-function))
(let ((std (call-next-method)))
(lambda (arg)
(print (list 'call-to-gf gf arg))
(funcall std arg))))
Because many discriminating functions would like to use a dynamic
strategy in which the precise discriminating function changes with
time it is important to specify how a discriminating function is
permitted itself to change the funcallable instance function of the
generic function.
Discriminating functions may set the funcallable instance function
of the generic function, but the new value must be generated by making
a call to COMPUTE-DISCRIMINATING-FUNCTION. This is to ensure that any
more specific methods which may have encapsulated the discriminating
function will get a chance to encapsulate the new, inner discriminating
function.
This implies that if a discriminating function wants to modify itself
and then call compute-discriminating-function. The appropriate method
on compute-discriminating-function will see the information stored in
the generic function and generate a discriminating function accordingly.
The following is an example of a discriminating function which modifies
itself in accordance with this protocol:
(defmethod compute-discriminating-function ((gf my-generic-function))
(lambda (arg)
(cond (<some condition>
<store some info in the generic function>
(set-funcallable-instance-function
gf
(compute-discriminating-function gf))
(funcall gf arg))
(t
<call-a-method-of-gf>))))
Whereas this code would not be legal:
(defmethod compute-discriminating-function ((gf my-generic-function))
(lambda (arg)
(cond (<some condition>
(set-funcallable-instance-function
gf
(lambda (a) ..))
(funcall gf arg))
(t
<call-a-method-of-gf>))))
NOTE: All the examples above assume that all instances of the class
this is the normal function for computing the discriminating
function of a standard-generic-function
if we have a special case for
in the cache is misplaced.
of having a desperately special discriminating function.
However, it is important that the machinery for printing
conditions for stack and heap exhaustion, and the
restarts offered by the debugger, work without consuming
the relevant PRINT-OBJECT methods get defined
the effective method for our classes earlier
than the relevant PRINT-OBJECT methods are
defined...
functions: the default implementation of encapsulation changes the
identity of the function bound to a name, which breaks anything
class-based, so we implement the encapsulation ourselves in the
discriminating function.
This is based on the rules of method lambda list congruency
defined in the spec. The lambda list it constructs is the pretty
union of the lambda lists of the generic function and of all its
methods. It doesn't take method applicability into account; we
also ignore non-public parts of the interface (e.g. &AUX, default
and supplied-p parameters)
The compiler uses this for type-checking that callers pass acceptable
keywords, so don't make this do anything fancy like looking at effective
methods without also fixing the compiler.
Possibly extend the keyword parameters of the gf by
additional key parameters of its methods:
Possibly extend the keyword parameters of the gf by
additional key parameters of its methods: | This software is part of the SBCL system . See the README file for
This software is derived from software originally released by Xerox
copyright information from original PCL sources :
Copyright ( c ) 1985 , 1986 , 1987 , 1988 , 1989 , 1990 Xerox Corporation .
derivative works must comply with all applicable United States export
This software is made available AS IS , and Xerox Corporation makes no
(in-package "SB-PCL")
(define-condition metaobject-initialization-violation
(reference-condition simple-error)
())
(defun change-class-to-metaobject-violation (to-name
&optional from-name references)
(error 'metaobject-initialization-violation
:format-control "~@<Cannot ~S~@[ ~S~] objects into ~S metaobjects.~@:>"
:format-arguments (list 'change-class from-name to-name)
:references references))
(macrolet ((def (name args control)
`(defmethod ,name ,args
(declare (ignore initargs))
(error 'metaobject-initialization-violation
:format-control ,(format nil "~~@<~A~~@:>" control)
:format-arguments (list ',name)
:references '((:amop :initialization method))))))
(def reinitialize-instance ((method method) &rest initargs)
"Method objects cannot be redefined by ~S.")
(def change-class ((method method) new &rest initargs)
"Method objects cannot be redefined by ~S.")
(def update-instance-for-redefined-class ((method method) added discarded
plist &rest initargs)
"No behaviour specified for ~S on method objects.")
(def update-instance-for-different-class (old (new method) &rest initargs)
"No behaviour specified for ~S on method objects.")
(def update-instance-for-different-class ((old method) new &rest initargs)
"No behaviour specified for ~S on method objects."))
(define-condition invalid-method-initarg (simple-program-error)
((method :initarg :method :reader invalid-method-initarg-method))
(:report
(lambda (c s)
(format s "~@<In initialization of ~S:~2I~_~?~@:>"
(invalid-method-initarg-method c)
(simple-condition-format-control c)
(simple-condition-format-arguments c)))))
(defun invalid-method-initarg (method format-control &rest args)
(error 'invalid-method-initarg :method method
:format-control format-control :format-arguments args))
(defun check-documentation (method doc)
(unless (or (null doc) (stringp doc))
(invalid-method-initarg method "~@<~S of ~S is neither ~S nor a ~S.~@:>"
:documentation doc 'null 'string)))
(defun check-lambda-list (method ll)
(declare (ignore method ll))
nil)
(defun check-method-function (method fun)
(unless (functionp fun)
(invalid-method-initarg method "~@<~S of ~S is not a ~S.~@:>"
:function fun 'function)))
(macrolet ((dolist-carefully ((var list improper-list-handler) &body body)
`(let ((,var nil)
(.dolist-carefully. ,list))
(loop (when (null .dolist-carefully.) (return nil))
(if (consp .dolist-carefully.)
(progn
(setq ,var (pop .dolist-carefully.))
,@body)
(,improper-list-handler))))))
(defun check-qualifiers (method qualifiers)
(flet ((improper-list ()
(invalid-method-initarg method
"~@<~S of ~S is an improper list.~@:>"
:qualifiers qualifiers)))
(dolist-carefully (q qualifiers improper-list)
(unless (and q (atom q))
(invalid-method-initarg method
"~@<~S, in ~S ~S, is not a non-~S atom.~@:>"
q :qualifiers qualifiers 'null)))))
(defun check-slot-name (method name)
(declare (ignore method))
(unless (symbolp name)
(invalid-method-initarg "~@<~S of ~S is not a ~S.~@:>"
:slot-name name 'symbol)))
(defun check-specializers (method specializers)
(flet ((improper-list ()
(invalid-method-initarg method
"~@<~S of ~S is an improper list.~@:>"
:specializers specializers)))
(dolist-carefully (s specializers improper-list)
(unless (specializerp s)
(invalid-method-initarg method
"~@<~S, in ~S ~S, is not a ~S.~@:>"
s :specializers specializers 'specializer)))
: ANSI says that it 's not valid to have methods
(let ((frcs (remove-if-not #'forward-referenced-class-p specializers)))
(unless (null frcs)
(style-warn "~@<Defining a method using ~
(length frcs) frcs)))))
end MACROLET
(defmethod shared-initialize :before
((method standard-method) slot-names &key
qualifiers lambda-list specializers function documentation)
(declare (ignore slot-names))
FIXME : it 's not clear to me ( CSR , 2006 - 08 - 09 ) why methods get
(check-qualifiers method qualifiers)
(check-lambda-list method lambda-list)
(check-specializers method specializers)
(check-method-function method function)
(check-documentation method documentation))
(defmethod shared-initialize :before
((method standard-accessor-method) slot-names &key
slot-name slot-definition)
(declare (ignore slot-names))
(unless slot-definition
(check-slot-name method slot-name)))
(defmethod shared-initialize :after ((method standard-method) slot-names
&rest initargs &key ((method-cell method-cell)))
(declare (ignore slot-names method-cell))
(initialize-method-function initargs method))
(define-load-time-global *the-class-standard-generic-function*
(find-class 'standard-generic-function))
(defmethod shared-initialize :before
((generic-function standard-generic-function)
slot-names
&key (lambda-list () lambda-list-p)
argument-precedence-order
declarations
documentation
(method-class nil method-class-supplied-p)
(method-combination nil method-combination-supplied-p))
(declare (ignore slot-names
declarations argument-precedence-order documentation
lambda-list lambda-list-p))
(flet ((initarg-error (initarg value string)
(error "when initializing the generic function ~S:~%~
The ~S initialization argument was: ~A.~%~
It must be ~A."
generic-function initarg value string)))
(cond (method-class-supplied-p
(when (symbolp method-class)
(setq method-class (find-class method-class)))
(unless (and (classp method-class)
(*subtypep (class-eq-specializer method-class)
*the-class-method*))
(initarg-error :method-class
method-class
"a subclass of the class METHOD"))
(setf (slot-value generic-function 'method-class) method-class))
((slot-boundp generic-function 'method-class))
(t
(initarg-error :method-class
"not supplied"
"a subclass of the class METHOD")))
(cond (method-combination-supplied-p
(unless (method-combination-p method-combination)
(initarg-error :method-combination
method-combination
"a method combination object")))
((slot-boundp generic-function '%method-combination))
(t
(initarg-error :method-combination
"not supplied"
"a method combination object")))))
(defun find-generic-function (name &optional (errorp t))
(let ((fun (and (fboundp name) (fdefinition name))))
(cond
((and fun (typep fun 'generic-function)) fun)
(errorp (error "No generic function named ~S." name))
(t nil))))
(defun real-add-named-method (generic-function-name qualifiers
specializers lambda-list &rest other-initargs)
(let* ((existing-gf (find-generic-function generic-function-name nil))
(generic-function
(if existing-gf
(ensure-generic-function
generic-function-name
:generic-function-class (class-of existing-gf))
(ensure-generic-function generic-function-name)))
(proto (method-prototype-for-gf generic-function-name)))
(setf (getf (getf other-initargs 'plist) :name)
(make-method-spec generic-function qualifiers specializers))
(let ((new (apply #'make-instance (class-of proto)
:qualifiers qualifiers :specializers specializers
:lambda-list lambda-list other-initargs)))
(add-method generic-function new)
new)))
(define-condition find-method-length-mismatch
(reference-condition simple-error)
()
(:default-initargs :references '((:ansi-cl :function find-method))))
(defun real-get-method (generic-function qualifiers specializers
&optional (errorp t)
always-check-specializers)
(sb-thread::with-recursive-system-lock ((gf-lock generic-function))
(let ((specializer-count (length specializers))
(methods (generic-function-methods generic-function)))
(when (or methods always-check-specializers)
(let ((required-parameter-count
(length (arg-info-metatypes (gf-arg-info generic-function)))))
failed AVER instead of a sensible error message .
(unless (= specializer-count required-parameter-count)
(error
'find-method-length-mismatch
:format-control "~@<The generic function ~S takes ~D ~
find a method with specializers ~:S~@:>"
:format-arguments (list generic-function required-parameter-count
(unparse-specializers generic-function specializers))))))
(flet ((congruentp (other-method)
(let ((other-specializers (method-specializers other-method)))
(aver (= specializer-count (length other-specializers)))
(and (equal qualifiers (safe-method-qualifiers other-method))
(every #'same-specializer-p specializers other-specializers)))))
(declare (dynamic-extent #'congruentp))
(cond ((find-if #'congruentp methods))
((null errorp) nil)
(t
(error "~@<There is no method on ~S with ~:[no ~
~:S.~@:>"
generic-function qualifiers specializers)))))))
(defmethod find-method ((generic-function standard-generic-function)
qualifiers specializers &optional (errorp t))
(real-get-method
generic-function qualifiers
EQL specializer , that seems unnecessarily painful , so we are
nice to our users . -- CSR , 2007 - 06 - 01
Note that INTERN - EQL - SPECIALIZER is exported from SB - MOP , but MOP is n't
the edge case of an EQL specializer whose object is literally ( EQL :X ) .
(parse-specializers generic-function specializers) errorp t))
list : ( 1 ) ) .
(defmethod compute-discriminating-function-arglist-info
((generic-function standard-generic-function))
(let ((number-required nil)
(restp nil)
(specialized-positions ())
(methods (generic-function-methods generic-function)))
(dolist (method methods)
(multiple-value-setq (number-required restp specialized-positions)
(compute-discriminating-function-arglist-info-internal
generic-function method number-required restp specialized-positions)))
(values number-required restp (sort specialized-positions #'<))))
(defun compute-discriminating-function-arglist-info-internal
(generic-function method number-of-requireds restp
specialized-argument-positions)
(declare (ignore generic-function)
(type (or null fixnum) number-of-requireds))
(let ((requireds 0))
(declare (fixnum requireds))
(dolist (arg (method-lambda-list method))
(cond ((eq arg '&aux) (return))
((memq arg '(&optional &rest &key))
(return (setq restp t)))
((memq arg lambda-list-keywords))
(t (incf requireds))))
(let ((pos 0))
(dolist (type-spec (method-specializers method))
(unless (eq type-spec *the-class-t*)
(pushnew pos specialized-argument-positions :test #'eq))
(incf pos)))
num - of - requireds is NIL it means this is the first method
(values (min (or number-of-requireds requireds) requireds)
(or restp
(and number-of-requireds (/= number-of-requireds requireds)))
specialized-argument-positions)))
(defmethod generic-function-argument-precedence-order
((gf standard-generic-function))
(aver (eq **boot-state** 'complete))
(loop with arg-info = (gf-arg-info gf)
with lambda-list = (arg-info-lambda-list arg-info)
for argument-position in (arg-info-precedence arg-info)
collect (nth argument-position lambda-list)))
(defmethod generic-function-lambda-list ((gf generic-function))
(gf-lambda-list gf))
(defmethod gf-fast-method-function-p ((gf standard-generic-function))
(gf-info-fast-mf-p (slot-value gf 'arg-info)))
(defun add-to-weak-hashset (key set)
(with-system-mutex ((hashset-mutex set))
(hashset-insert set key)))
(defun remove-from-weak-hashset (key set)
(with-system-mutex ((hashset-mutex set))
(hashset-remove set key)))
(defun weak-hashset-memberp (key set)
(with-system-mutex ((hashset-mutex set))
(hashset-find set key)))
(defmethod initialize-instance :after ((gf standard-generic-function)
&key (lambda-list nil lambda-list-p)
argument-precedence-order)
a permutation vector , and therefore the code that SLOT - VALUE transforms
to winds up punting to # ' ( SLOT - ACCESSOR : GLOBAL ARG - INFO READER ) .
Using SLOT - VALUE the " slow " way sidesteps some bootstrap issues .
(declare (notinline slot-value))
(if lambda-list-p
(set-arg-info gf
:lambda-list lambda-list
:argument-precedence-order argument-precedence-order)
(set-arg-info gf))
(let ((mc (generic-function-method-combination gf)))
(add-to-weak-hashset gf (method-combination-%generic-functions mc)))
(when (arg-info-valid-p (slot-value gf 'arg-info))
(update-dfun gf))))
(defmethod reinitialize-instance :around
((gf standard-generic-function) &rest args &key
(lambda-list nil lambda-list-p) (argument-precedence-order nil apo-p))
(let* ((old-mc (generic-function-method-combination gf))
(mc (getf args :method-combination old-mc)))
(unless (eq mc old-mc)
(aver (weak-hashset-memberp gf (method-combination-%generic-functions old-mc)))
(aver (not (weak-hashset-memberp gf (method-combination-%generic-functions mc)))))
(prog1 (call-next-method)
(unless (eq mc old-mc)
(remove-from-weak-hashset gf (method-combination-%generic-functions old-mc))
(add-to-weak-hashset gf (method-combination-%generic-functions mc))
(flush-effective-method-cache gf))
(sb-thread::with-recursive-system-lock ((gf-lock gf))
(cond
((and lambda-list-p apo-p)
(set-arg-info gf
:lambda-list lambda-list
:argument-precedence-order argument-precedence-order))
(lambda-list-p (set-arg-info gf :lambda-list lambda-list))
(t (set-arg-info gf)))
(when (arg-info-valid-p (gf-arg-info gf))
(update-dfun gf))
(map-dependents gf (lambda (dependent)
(apply #'update-dependent gf dependent args)))))))
(defun set-methods (gf methods)
(setf (generic-function-methods gf) nil)
(loop (when (null methods) (return gf))
(real-add-method gf (pop methods) methods)))
(define-condition new-value-specialization (reference-condition error)
((%method :initarg :method :reader new-value-specialization-method))
(:report
(lambda (c s)
(format s "~@<Cannot add method ~S to ~S, as it specializes the ~
new-value argument.~@:>"
(new-value-specialization-method c)
#'(setf slot-value-using-class))))
(:default-initargs :references
(list '(:sbcl :node "Metaobject Protocol")
'(:amop :generic-function (setf slot-value-using-class)))))
(defgeneric values-for-add-method (gf method)
(:method ((gf standard-generic-function) (method standard-method))
: Just a single generic dispatch , and everything else
use SLOT - VALUE there .
does PCL as a whole ) . It should not be too hard to internally store
(values (slot-value gf '%lock)
(slot-value method 'qualifiers)
(slot-value method 'specializers)
(slot-value method 'lambda-list)
(slot-value method '%generic-function)
(slot-value gf 'name))))
(define-condition print-object-stream-specializer (reference-condition simple-warning)
()
(:default-initargs
:references '((:ansi-cl :function print-object))
:format-control "~@<Specializing on the second argument to ~S has ~
unportable effects, and also interferes with ~
precomputation of print functions for exceptional ~
situations.~@:>"
:format-arguments (list 'print-object)))
(defun defer-ftype-computation (gf)
(let ((name (generic-function-name gf)))
(unless (eq (info :function :where-from name) :declared)
(when (and (fboundp name) (eq (fdefinition name) gf))
(setf (info :function :type name) :generic-function))))))
(defun compute-gf-ftype (name)
(let ((gf (and (fboundp name) (fdefinition name)))
(methods-in-compilation-unit (and (boundp 'sb-c::*methods-in-compilation-unit*)
sb-c::*methods-in-compilation-unit*
(gethash name sb-c::*methods-in-compilation-unit*))))
(cond ((generic-function-p gf)
(let* ((ll (generic-function-lambda-list gf))
the FTYPE with keywords , so as not to complain about keywords
(type (sb-c::ftype-from-lambda-list
(if (and (member '&rest ll) (not (member '&key ll)))
ll
(generic-function-pretty-arglist gf methods-in-compilation-unit)))))
(setf (info :function :where-from name) :defined-method
(info :function :type name) type)))
(methods-in-compilation-unit
(setf (info :function :where-from name) :defined-method
(info :function :type name)
(sb-c::ftype-from-lambda-list
(gf-merge-arglists methods-in-compilation-unit))))
(t
Maybe AVER that it does not happen ?
(sb-c::ftype-from-definition name)))))
(defun real-add-method (generic-function method &optional skip-dfun-update-p)
(flet ((similar-lambda-lists-p (old-method new-lambda-list)
(binding* (((a-llks a-nreq a-nopt)
(analyze-lambda-list (method-lambda-list old-method)))
((b-llks b-nreq b-nopt)
(analyze-lambda-list new-lambda-list)))
(and (= a-nreq b-nreq)
(= a-nopt b-nopt)
(eq (ll-keyp-or-restp a-llks)
(ll-keyp-or-restp b-llks))))))
(multiple-value-bind (lock qualifiers specializers new-lambda-list
method-gf name)
(values-for-add-method generic-function method)
(when method-gf
(error "~@<The method ~S is already part of the generic ~
function until it is removed from the first one.~@:>"
method method-gf))
(when (and (eq name 'print-object) (not (eq (second specializers) *the-class-t*)))
(warn 'print-object-stream-specializer))
(handler-case
(sb-thread::with-recursive-system-lock (lock)
(let ((existing (get-method generic-function
qualifiers
specializers
nil)))
(when (and existing (similar-lambda-lists-p existing new-lambda-list))
(remove-method generic-function existing))
: We have a special case here , as we disallow
specializations of the NEW - VALUE argument to ( SETF
SLOT - VALUE - USING - CLASS ) . GET - ACCESSOR - METHOD - FUNCTION is
(when (and (eq generic-function #'(setf slot-value-using-class))
(not (eq *the-class-t* (first specializers))))
(error 'new-value-specialization :method method))
(setf (method-generic-function method) generic-function)
(pushnew method (generic-function-methods generic-function) :test #'eq)
(dolist (specializer specializers)
(add-direct-method specializer method))
: SET - ARG - INFO contains the error - detecting logic for
lists . However , according to on cmucl - imp ,
(let ((remove-again-p t))
(unwind-protect
(progn
(set-arg-info generic-function :new-method method)
(setq remove-again-p nil))
(when remove-again-p
(remove-method generic-function method))))
let 's warn anyway . -- CSR , 2003 - 08 - 20
(let* ((mc (generic-function-method-combination generic-function))
(type-name (method-combination-type-name mc)))
(flet ((invalid ()
(warn "~@<Invalid qualifiers for ~S method ~
combination in method ~S:~2I~_~S.~@:>"
type-name method qualifiers)))
(cond
((and (eq mc *standard-method-combination*)
qualifiers
(or (cdr qualifiers)
(not (standard-method-combination-qualifier-p
(car qualifiers)))))
(invalid))
((and (short-method-combination-p mc)
(or (null qualifiers)
(cdr qualifiers)
(not (short-method-combination-qualifier-p
type-name (car qualifiers)))))
(invalid)))))
(unless skip-dfun-update-p
(update-ctors 'add-method
:generic-function generic-function
:method method)
(update-dfun generic-function))
(defer-ftype-computation generic-function)
(map-dependents generic-function
(lambda (dep)
(update-dependent generic-function
dep 'add-method method)))))
(serious-condition (c)
(error c)))))
generic-function)
(defun real-remove-method (generic-function method)
(when (eq generic-function (method-generic-function method))
(flush-effective-method-cache generic-function)
(let ((lock (gf-lock generic-function)))
(sb-thread::with-recursive-system-lock (lock)
(let* ((specializers (method-specializers method))
(methods (generic-function-methods generic-function))
(new-methods (remove method methods)))
(setf (method-generic-function method) nil
(generic-function-methods generic-function) new-methods)
(dolist (specializer specializers)
(remove-direct-method specializer method))
(set-arg-info generic-function)
(update-ctors 'remove-method
:generic-function generic-function
:method method)
(update-dfun generic-function)
(defer-ftype-computation generic-function)
(map-dependents generic-function
(lambda (dep)
(update-dependent generic-function
dep 'remove-method method)))))))
generic-function)
(defun compute-applicable-methods-function (generic-function arguments)
(values (compute-applicable-methods-using-types
generic-function
(types-from-args generic-function arguments 'eql))))
(defmethod compute-applicable-methods
((generic-function generic-function) arguments)
(values (compute-applicable-methods-using-types
generic-function
(types-from-args generic-function arguments 'eql))))
(defmethod compute-applicable-methods-using-classes
((generic-function generic-function) classes)
(compute-applicable-methods-using-types
generic-function
(types-from-args generic-function classes 'class-eq)))
(defun !proclaim-incompatible-superclasses (classes)
(setq classes (mapcar (lambda (class)
(if (symbolp class)
(find-class class)
class))
classes))
(dolist (class classes)
(dolist (other-class classes)
(unless (eq class other-class)
(pushnew other-class (class-incompatible-superclass-list class) :test #'eq)))))
(defun superclasses-compatible-p (class1 class2)
(let ((cpl1 (cpl-or-nil class1))
(cpl2 (cpl-or-nil class2)))
(dolist (sc1 cpl1 t)
(dolist (ic (class-incompatible-superclass-list sc1))
(when (memq ic cpl2)
(return-from superclasses-compatible-p nil))))))
(mapc
#'!proclaim-incompatible-superclasses
direct subclasses of pcl - class
(standard-class funcallable-standard-class)
superclass metaobject
(class eql-specializer class-eq-specializer method method-combination
generic-function slot-definition)
))
(defmethod same-specializer-p ((specl1 specializer) (specl2 specializer))
(eql specl1 specl2))
(defmethod same-specializer-p ((specl1 class) (specl2 class))
(eq specl1 specl2))
(defmethod specializer-class ((specializer class))
specializer)
(defmethod same-specializer-p ((specl1 class-eq-specializer)
(specl2 class-eq-specializer))
(eq (specializer-class specl1) (specializer-class specl2)))
that EQL - specializers are interned , then the comparator should be EQL .
(defmethod same-specializer-p ((specl1 eql-specializer)
(specl2 eql-specializer))
(eq (specializer-object specl1) (specializer-object specl2)))
(defmethod specializer-class ((specializer eql-specializer))
(class-of (slot-value specializer 'object)))
(defun specializer-class-or-nil (specializer)
(and (standard-specializer-p specializer)
(specializer-class specializer)))
(defun error-need-at-least-n-args (function n)
(%program-error "~@<The function ~2I~_~S ~I~_requires at least ~W ~
argument~:P.~:>"
function n))
(defun types-from-args (generic-function arguments &optional type-modifier)
(multiple-value-bind (nreq applyp metatypes nkeys arg-info)
(get-generic-fun-info generic-function)
(declare (ignore applyp metatypes nkeys))
(let ((types-rev nil))
(dotimes-fixnum (i nreq)
(unless arguments
(error-need-at-least-n-args (generic-function-name generic-function)
nreq))
(let ((arg (pop arguments)))
(push (if type-modifier `(,type-modifier ,arg) arg) types-rev)))
(values (nreverse types-rev) arg-info))))
(defun get-wrappers-from-classes (nkeys wrappers classes metatypes)
(let* ((w wrappers) (w-tail w) (mt-tail metatypes))
(dolist (class (ensure-list classes))
(unless (eq t (car mt-tail))
(let ((c-w (class-wrapper class)))
(unless c-w (return-from get-wrappers-from-classes nil))
(if (eql nkeys 1)
(setq w c-w)
(setf (car w-tail) c-w
w-tail (cdr w-tail)))))
(setq mt-tail (cdr mt-tail)))
w))
(defun sdfun-for-caching (gf classes)
(let ((types (mapcar #'class-eq-type classes)))
(multiple-value-bind (methods all-applicable-and-sorted-p)
(compute-applicable-methods-using-types gf types)
(let ((generator (get-secondary-dispatch-function1
gf methods types nil t all-applicable-and-sorted-p)))
(make-callable generator
nil (mapcar #'class-wrapper classes))))))
(defun value-for-caching (gf classes)
(let ((methods (compute-applicable-methods-using-types
gf (mapcar #'class-eq-type classes))))
(method-plist-value (car methods) :constant-value)))
(defun default-secondary-dispatch-function (generic-function)
(lambda (&rest args)
(let ((methods (compute-applicable-methods generic-function args)))
(if methods
(let ((emf (get-effective-method-function generic-function
methods)))
(invoke-emf emf args))
(call-no-applicable-method generic-function args)))))
(define-load-time-global *std-cam-methods* nil)
(defun compute-applicable-methods-emf (generic-function)
(if (eq **boot-state** 'complete)
(let* ((cam (gdefinition 'compute-applicable-methods))
(cam-methods (compute-applicable-methods-using-types
cam (list `(eql ,generic-function) t))))
(values (get-effective-method-function cam cam-methods)
(list-elts-eq cam-methods
(or *std-cam-methods*
(setq *std-cam-methods*
(compute-applicable-methods-using-types
cam (list `(eql ,cam) t)))))))
(values #'compute-applicable-methods-function t)))
(defun compute-applicable-methods-emf-std-p (gf)
(gf-info-c-a-m-emf-std-p (gf-arg-info gf)))
(defvar *old-c-a-m-gf-methods* nil)
(defun update-all-c-a-m-gf-info (c-a-m-gf)
(let ((methods (generic-function-methods c-a-m-gf)))
(if (and *old-c-a-m-gf-methods*
(every (lambda (old-method)
(member old-method methods :test #'eq))
*old-c-a-m-gf-methods*))
(let ((gfs-to-do nil)
(gf-classes-to-do nil))
(dolist (method methods)
(unless (member method *old-c-a-m-gf-methods* :test #'eq)
(let ((specl (car (method-specializers method))))
(if (eql-specializer-p specl)
(pushnew (specializer-object specl) gfs-to-do :test #'eq)
(pushnew (specializer-class specl) gf-classes-to-do :test #'eq)))))
(map-all-generic-functions
(lambda (gf)
(when (or (member gf gfs-to-do :test #'eq)
(dolist (class gf-classes-to-do nil)
(member class
(class-precedence-list (class-of gf))
:test #'eq)))
(update-c-a-m-gf-info gf)))))
(map-all-generic-functions #'update-c-a-m-gf-info))
(setq *old-c-a-m-gf-methods* methods)))
(defun update-gf-info (gf)
(update-c-a-m-gf-info gf)
(update-gf-simple-accessor-type gf))
(defun update-c-a-m-gf-info (gf)
(unless (early-gf-p gf)
(multiple-value-bind (c-a-m-emf std-p)
(compute-applicable-methods-emf gf)
(let ((arg-info (gf-arg-info gf)))
(setf (gf-info-static-c-a-m-emf arg-info) c-a-m-emf)
(setf (gf-info-c-a-m-emf-std-p arg-info) std-p)))))
(defun update-gf-simple-accessor-type (gf)
(let ((arg-info (gf-arg-info gf)))
(setf (gf-info-simple-accessor-type arg-info)
(let* ((methods (generic-function-methods gf))
(class (and methods (class-of (car methods))))
(type
(and class
(cond ((or (eq class *the-class-standard-reader-method*)
(eq class *the-class-global-reader-method*))
'reader)
((or (eq class *the-class-standard-writer-method*)
(eq class *the-class-global-writer-method*))
'writer)
((eq class *the-class-global-boundp-method*)
'boundp)
((eq class *the-class-global-makunbound-method*)
'makunbound)))))
(when (and (gf-info-c-a-m-emf-std-p arg-info)
type
(dolist (method (cdr methods) t)
(unless (eq class (class-of method)) (return nil)))
(eq (generic-function-method-combination gf)
*standard-method-combination*))
type)))))
CMUCL ( Gerd 's PCL , 2002 - 04 - 25 ) comment :
Return two values . First value is a function to be stored in
effective slot definition SLOTD for reading it with
SLOT - VALUE - USING - CLASS , setting it with ( SETF
SLOT - VALUE - USING - CLASS ) , testing it with SLOT - BOUNDP - USING - CLASS ,
or making it unbound with SLOT - MAKUNBOUND - USING - CLASS . GF is one
WRITER , BOUNDP , MAKUNBOUND . CLASS is SLOTD 's class .
Second value is true if the function returned is one of the
(defun get-accessor-method-function (gf type class slotd)
(let* ((std-method (standard-svuc-method type))
(str-method (structure-svuc-method type))
(types1 `((eql ,class) (class-eq ,class) (eql ,slotd)))
(types (if (eq type 'writer) `(t ,@types1) types1))
(methods (compute-applicable-methods-using-types gf types))
(std-p (null (cdr methods))))
(values
(if std-p
(get-optimized-std-accessor-method-function class slotd type)
(let* ((optimized-std-fun
(get-optimized-std-slot-value-using-class-method-function
class slotd type))
(method-alist
`((,(car (or (member std-method methods :test #'eq)
(member str-method methods :test #'eq)
(bug "error in ~S"
'get-accessor-method-function)))
,optimized-std-fun)))
(wrappers
(let ((wrappers (list (wrapper-of class)
(class-wrapper class)
(wrapper-of slotd))))
(if (eq type 'writer)
(cons (class-wrapper *the-class-t*) wrappers)
wrappers)))
(sdfun (get-secondary-dispatch-function
gf methods types method-alist wrappers)))
(get-accessor-from-svuc-method-function class slotd sdfun type)))
std-p)))
(defun update-slot-value-gf-info (gf type)
(unless *new-class*
(update-std-or-str-methods gf type))
(when (and (standard-svuc-method type) (structure-svuc-method type))
(flet ((update-accessor-info (class)
(when (class-finalized-p class)
(dolist (slotd (class-slots class))
(compute-slot-accessor-info slotd type gf)))))
(if *new-class*
(update-accessor-info *new-class*)
(map-all-classes #'update-accessor-info 'slot-object)))))
(define-load-time-global *standard-slot-value-using-class-method* nil)
(define-load-time-global *standard-setf-slot-value-using-class-method* nil)
(define-load-time-global *standard-slot-boundp-using-class-method* nil)
(define-load-time-global *standard-slot-makunbound-using-class-method* nil)
(define-load-time-global *condition-slot-value-using-class-method* nil)
(define-load-time-global *condition-setf-slot-value-using-class-method* nil)
(define-load-time-global *condition-slot-boundp-using-class-method* nil)
(define-load-time-global *condition-slot-makunbound-using-class-method* nil)
(define-load-time-global *structure-slot-value-using-class-method* nil)
(define-load-time-global *structure-setf-slot-value-using-class-method* nil)
(define-load-time-global *structure-slot-boundp-using-class-method* nil)
(define-load-time-global *structure-slot-makunbound-using-class-method* nil)
(defun standard-svuc-method (type)
(case type
(reader *standard-slot-value-using-class-method*)
(writer *standard-setf-slot-value-using-class-method*)
(boundp *standard-slot-boundp-using-class-method*)
(makunbound *standard-slot-makunbound-using-class-method*)))
(defun set-standard-svuc-method (type method)
(case type
(reader (setq *standard-slot-value-using-class-method* method))
(writer (setq *standard-setf-slot-value-using-class-method* method))
(boundp (setq *standard-slot-boundp-using-class-method* method))
(makunbound (setq *standard-slot-makunbound-using-class-method* method))))
(defun condition-svuc-method (type)
(case type
(reader *condition-slot-value-using-class-method*)
(writer *condition-setf-slot-value-using-class-method*)
(boundp *condition-slot-boundp-using-class-method*)
(makunbound *condition-slot-makunbound-using-class-method*)))
(defun set-condition-svuc-method (type method)
(case type
(reader (setq *condition-slot-value-using-class-method* method))
(writer (setq *condition-setf-slot-value-using-class-method* method))
(boundp (setq *condition-slot-boundp-using-class-method* method))
(makunbound (setq *condition-slot-makunbound-using-class-method* method))))
(defun structure-svuc-method (type)
(case type
(reader *structure-slot-value-using-class-method*)
(writer *structure-setf-slot-value-using-class-method*)
(boundp *structure-slot-boundp-using-class-method*)
(makunbound *standard-slot-makunbound-using-class-method*)))
(defun set-structure-svuc-method (type method)
(case type
(reader (setq *structure-slot-value-using-class-method* method))
(writer (setq *structure-setf-slot-value-using-class-method* method))
(boundp (setq *structure-slot-boundp-using-class-method* method))
(makunbound (setq *structure-slot-makunbound-using-class-method* method))))
(defun update-std-or-str-methods (gf type)
(dolist (method (generic-function-methods gf))
(let ((specls (method-specializers method)))
(when (and (or (not (eq type 'writer))
(eq (pop specls) *the-class-t*))
(every #'classp specls))
(cond ((and (eq (class-name (car specls)) 'std-class)
(eq (class-name (cadr specls)) 'standard-object)
(eq (class-name (caddr specls))
'standard-effective-slot-definition))
(set-standard-svuc-method type method))
((and (eq (class-name (car specls)) 'condition-class)
(eq (class-name (cadr specls)) 'condition)
(eq (class-name (caddr specls))
'condition-effective-slot-definition))
(set-condition-svuc-method type method))
((and (eq (class-name (car specls)) 'structure-class)
(eq (class-name (cadr specls)) 'structure-object)
(eq (class-name (caddr specls))
'structure-effective-slot-definition))
(set-structure-svuc-method type method)))))))
(defun mec-all-classes-internal (spec precompute-p)
(let ((wrapper (class-wrapper (specializer-class spec))))
(unless (or (not wrapper) (invalid-wrapper-p wrapper))
(cons (specializer-class spec)
(and (classp spec)
precompute-p
(not (or (eq spec *the-class-t*)
(eq spec *the-class-slot-object*)
(eq spec *the-class-standard-object*)
(eq spec *the-class-structure-object*)))
(let ((sc (class-direct-subclasses spec)))
(when sc
(mapcan (lambda (class)
(mec-all-classes-internal class precompute-p))
sc))))))))
(defun mec-all-classes (spec precompute-p)
(let ((classes (mec-all-classes-internal spec precompute-p)))
(if (null (cdr classes))
classes
(let* ((a-classes (cons nil classes))
(tail classes))
(loop (when (null (cdr tail))
(return (cdr a-classes)))
(let ((class (cadr tail))
(ttail (cddr tail)))
(if (dolist (c ttail nil)
(when (eq class c) (return t)))
(setf (cdr tail) (cddr tail))
(setf tail (cdr tail)))))))))
(defun mec-all-class-lists (spec-list precompute-p)
(if (null spec-list)
(list nil)
(let* ((car-all-classes (mec-all-classes (car spec-list)
precompute-p))
(all-class-lists (mec-all-class-lists (cdr spec-list)
precompute-p)))
(mapcan (lambda (list)
(mapcar (lambda (c) (cons c list)) car-all-classes))
all-class-lists))))
(defun make-emf-cache (generic-function valuep cache classes-list new-class)
(let* ((arg-info (gf-arg-info generic-function))
(nkeys (arg-info-nkeys arg-info))
(metatypes (arg-info-metatypes arg-info))
(wrappers (unless (eq nkeys 1) (make-list nkeys)))
(precompute-p (gf-precompute-dfun-and-emf-p arg-info)))
(flet ((add-class-list (classes)
(when (or (null new-class) (memq new-class classes))
(let ((%wrappers (get-wrappers-from-classes
nkeys wrappers classes metatypes)))
(when (and %wrappers (not (probe-cache cache %wrappers)))
(let ((value (cond ((eq valuep t)
(sdfun-for-caching generic-function
classes))
((eq valuep :constant-value)
(value-for-caching generic-function
classes)))))
(let ((wrappers (get-wrappers-from-classes
nkeys wrappers classes metatypes)))
(when (if (atom wrappers)
(not (invalid-wrapper-p wrappers))
(every (complement #'invalid-wrapper-p)
wrappers))
(setq cache (fill-cache cache wrappers value))))))))))
(if classes-list
(mapc #'add-class-list classes-list)
(dolist (method (generic-function-methods generic-function))
(mapc #'add-class-list
(mec-all-class-lists (method-specializers method)
precompute-p))))
cache)))
(defmacro class-test (arg class)
(cond
((eq class *the-class-t*) t)
((eq class *the-class-standard-object*)
`(or (std-instance-p ,arg) (fsc-instance-p ,arg)))
((eq class *the-class-funcallable-standard-object*)
`(fsc-instance-p ,arg))
((structure-class-p class)
`(sb-c::%instance-typep ,arg ,(class-wrapper class)))
(t
`(typep ,arg ',(class-name class)))))
(defmacro class-eq-test (arg class)
`(eq (class-of ,arg) ',class))
(defun dnet-methods-p (form)
(and (consp form)
(or (eq (car form) 'methods)
(eq (car form) 'unordered-methods))))
This is CASE , but without .
(defmacro scase (arg &rest clauses)
`(let ((.case-arg. ,arg))
(cond ,@(mapcar (lambda (clause)
(list* (cond ((null (car clause))
nil)
((consp (car clause))
(if (null (cdar clause))
`(eql .case-arg.
',(caar clause))
`(member .case-arg.
',(car clause))))
((member (car clause) '(t otherwise))
`t)
(t
`(eql .case-arg. ',(car clause))))
nil
(cdr clause)))
clauses))))
(defmacro mcase (arg &rest clauses) `(scase ,arg ,@clauses))
(defun generate-discrimination-net (generic-function methods types sorted-p)
(let* ((arg-info (gf-arg-info generic-function))
(c-a-m-emf-std-p (gf-info-c-a-m-emf-std-p arg-info))
(precedence (arg-info-precedence arg-info)))
(generate-discrimination-net-internal
generic-function methods types
(lambda (methods known-types)
(if (or sorted-p
(and c-a-m-emf-std-p
(block one-order-p
(let ((sorted-methods nil))
(map-all-orders
(copy-list methods) precedence
(lambda (methods)
(when sorted-methods (return-from one-order-p nil))
(setq sorted-methods methods)))
(setq methods sorted-methods))
t)))
`(methods ,methods ,known-types)
`(unordered-methods ,methods ,known-types)))
(lambda (position type true-value false-value)
(let ((arg (dfun-arg-symbol position)))
(if (eq (car type) 'eql)
(let* ((false-case-p (and (consp false-value)
(or (eq (car false-value) 'scase)
(eq (car false-value) 'mcase))
(eq arg (cadr false-value))))
(false-clauses (if false-case-p
(cddr false-value)
`((t ,false-value))))
(case-sym (if (and (dnet-methods-p true-value)
(if false-case-p
(eq (car false-value) 'mcase)
(dnet-methods-p false-value)))
'mcase
'scase))
(type-sym `(,(cadr type))))
`(,case-sym ,arg
(,type-sym ,true-value)
,@false-clauses))
`(if ,(let ((arg (dfun-arg-symbol position)))
(case (car type)
(class `(class-test ,arg ,(cadr type)))
(class-eq `(class-eq-test ,arg ,(cadr type)))))
,true-value
,false-value))))
#'identity)))
(defun class-from-type (type)
(if (or (atom type) (eq (car type) t))
*the-class-t*
(case (car type)
(and (dolist (type (cdr type) *the-class-t*)
(when (and (consp type) (not (eq (car type) 'not)))
(return (class-from-type type)))))
(not *the-class-t*)
(eql (class-of (cadr type)))
(class-eq (cadr type))
(class (cadr type)))))
(defun augment-type (new-type known-type)
(if (or (eq known-type t)
(eq (car new-type) 'eql))
new-type
(let ((so-far (if (and (consp known-type) (eq (car known-type) 'and))
(cdr known-type)
(list known-type))))
(unless (eq (car new-type) 'not)
(setq so-far
(mapcan (lambda (type)
(unless (*subtypep new-type type)
(list type)))
so-far)))
(if (null so-far)
new-type
`(and ,new-type ,@so-far)))))
(defun generate-discrimination-net-internal
(gf methods types methods-function test-fun type-function)
(let* ((arg-info (gf-arg-info gf))
(precedence (arg-info-precedence arg-info))
(nreq (arg-info-number-required arg-info))
(metatypes (arg-info-metatypes arg-info)))
(labels ((do-column (p-tail contenders known-types)
(if p-tail
(let* ((position (car p-tail))
(known-type (or (nth position types) t)))
(if (eq (nth position metatypes) t)
(do-column (cdr p-tail) contenders
(cons (cons position known-type)
known-types))
(do-methods p-tail contenders
known-type () known-types)))
(funcall methods-function contenders
(let ((k-t (make-list nreq)))
(dolist (index+type known-types)
(setf (nth (car index+type) k-t)
(cdr index+type)))
k-t))))
(do-methods (p-tail contenders known-type winners known-types)
(if (null contenders)
(do-column (cdr p-tail)
winners
(cons (cons (car p-tail) known-type)
known-types))
(let* ((position (car p-tail))
(method (car contenders))
(specl (nth position (method-specializers method)))
(type (funcall type-function
(type-from-specializer specl))))
(multiple-value-bind (app-p maybe-app-p)
(specializer-applicable-using-type-p type known-type)
(flet ((determined-to-be (truth-value)
(if truth-value app-p (not maybe-app-p)))
(do-if (truth &optional implied)
(let ((ntype (if truth type `(not ,type))))
(do-methods p-tail
(cdr contenders)
(if implied
known-type
(augment-type ntype known-type))
(if truth
(append winners `(,method))
winners)
known-types))))
(cond ((determined-to-be nil) (do-if nil t))
((determined-to-be t) (do-if t t))
(t (funcall test-fun position type
(do-if t) (do-if nil))))))))))
(do-column precedence methods ()))))
(defun compute-secondary-dispatch-function (generic-function net &optional
method-alist wrappers)
(funcall (the function (compute-secondary-dispatch-function1 generic-function net))
method-alist wrappers))
(defvar *eq-case-table-limit* 15)
(defvar *case-table-limit* 10)
(defun compute-mcase-parameters (case-list)
(unless (eq t (caar (last case-list)))
(error "The key for the last case arg to mcase was not T"))
(let* ((eq-p (dolist (case case-list t)
(unless (or (eq (car case) t)
(symbolp (caar case)))
(return nil))))
(len (1- (length case-list)))
(type (cond ((= len 1)
:simple)
((<= len
(if eq-p
*eq-case-table-limit*
*case-table-limit*))
:assoc)
(t
:hash-table))))
(list eq-p type)))
(defmacro mlookup (key info default &optional eq-p type)
(unless (or (eq eq-p t) (null eq-p))
(bug "Invalid eq-p argument: ~S" eq-p))
(ecase type
(:simple
`(if (locally
(declare (optimize (inhibit-warnings 3)))
(,(if eq-p 'eq 'eql) ,key (car ,info)))
(cdr ,info)
,default))
(:assoc
`(dolist (e ,info ,default)
(when (locally
(declare (optimize (inhibit-warnings 3)))
(,(if eq-p 'eq 'eql) (car e) ,key))
(return (cdr e)))))
(:hash-table
`(gethash ,key ,info ,default))))
(defun net-test-converter (form)
(if (atom form)
(default-test-converter form)
(case (car form)
((invoke-effective-method-function invoke-fast-method-call
invoke-effective-narrow-method-function)
'.call.)
(methods
'.methods.)
(unordered-methods
'.umethods.)
(mcase
`(mlookup ,(cadr form)
nil
nil
,@(compute-mcase-parameters (cddr form))))
(t (default-test-converter form)))))
(defun net-code-converter (form)
(if (atom form)
(default-code-converter form)
(case (car form)
((methods unordered-methods)
(let ((gensym (gensym)))
(values gensym
(list gensym))))
(mcase
(let ((mp (compute-mcase-parameters (cddr form)))
(gensym (gensym)) (default (gensym)))
(values `(mlookup ,(cadr form) ,gensym ,default ,@mp)
(list gensym default))))
(t
(default-code-converter form)))))
(defun net-constant-converter (form generic-function)
(or (let ((c (methods-converter form generic-function)))
(when c (list c)))
(if (atom form)
(default-constant-converter form)
(case (car form)
(mcase
(let* ((mp (compute-mcase-parameters (cddr form)))
(list (mapcar (lambda (clause)
(let ((key (car clause))
(meth (cadr clause)))
(cons (if (consp key) (car key) key)
(methods-converter
meth generic-function))))
(cddr form)))
(default (car (last list))))
(list (list* :mcase mp (nbutlast list))
(cdr default))))
(t
(default-constant-converter form))))))
(defun methods-converter (form generic-function)
(cond ((and (consp form) (eq (car form) 'methods))
(cons '.methods.
(get-effective-method-function1 generic-function (cadr form))))
((and (consp form) (eq (car form) 'unordered-methods))
(default-secondary-dispatch-function generic-function))))
(defun convert-methods (constant method-alist wrappers)
(if (and (consp constant)
(eq (car constant) '.methods.))
(funcall (cdr constant) method-alist wrappers)
constant))
(defun convert-table (constant method-alist wrappers)
(cond ((and (consp constant)
(eq (car constant) :mcase))
(let ((alist (mapcar (lambda (k+m)
(cons (car k+m)
(convert-methods (cdr k+m)
method-alist
wrappers)))
(cddr constant)))
(mp (cadr constant)))
(ecase (cadr mp)
(:simple
(car alist))
(:assoc
alist)
(:hash-table
(let ((table (make-hash-table :test (if (car mp) 'eq 'eql))))
(dolist (k+m alist)
(setf (gethash (car k+m) table) (cdr k+m)))
table)))))))
(defun compute-secondary-dispatch-function1 (generic-function net
&optional function-p)
(cond
((and (eq (car net) 'methods) (not function-p))
(get-effective-method-function1 generic-function (cadr net)))
(t
(let* ((name (generic-function-name generic-function))
(arg-info (gf-arg-info generic-function))
(metatypes (arg-info-metatypes arg-info))
(nargs (length metatypes))
(applyp (arg-info-applyp arg-info))
(fmc-arg-info (cons nargs applyp))
(arglist (if function-p
(make-dfun-lambda-list nargs applyp)
(make-fast-method-call-lambda-list nargs applyp))))
(multiple-value-bind (cfunction constants)
We do n't want NAMED - LAMBDA for any expressions handed to FNGEN ,
(get-fun `(lambda ,arglist
(declare (optimize (sb-c::store-closure-debug-pointer 3)))
,@(unless function-p
`((declare (ignore .pv. .next-method-call.))))
(locally (declare #.*optimize-speed*)
(let ((emf ,net))
,(make-emf-call nargs applyp 'emf))))
#'net-test-converter
#'net-code-converter
(lambda (form)
(net-constant-converter form generic-function)))
(lambda (method-alist wrappers)
(let* ((alist (list nil))
(alist-tail alist))
(dolist (constant constants)
(let* ((a (or (dolist (a alist nil)
(when (eq (car a) constant)
(return a)))
(cons constant
(or (convert-table
constant method-alist wrappers)
(convert-methods
constant method-alist wrappers)))))
(new (list a)))
(setf (cdr alist-tail) new)
(setf alist-tail new)))
(let ((function (apply cfunction (mapcar #'cdr (cdr alist)))))
(if function-p
(set-fun-name function `(gf-dispatch ,name))
(make-fast-method-call
:function (set-fun-name function `(sdfun-method ,name))
:arg-info fmc-arg-info))))))))))
(defvar *show-make-unordered-methods-emf-calls* nil)
(defun make-unordered-methods-emf (generic-function methods)
(when *show-make-unordered-methods-emf-calls*
(format t "~&make-unordered-methods-emf ~S~%"
(generic-function-name generic-function)))
(lambda (&rest args)
(let* ((types (types-from-args generic-function args 'eql))
(smethods (sort-applicable-methods generic-function
methods
types))
(emf (get-effective-method-function generic-function smethods)))
(invoke-emf emf args))))
function < gf1 > ( using SET - FUNCALLABLE - INSTANCE - FUNCTION ) . Then the
discriminating function is critical to many uses of the MOP .
it should first store some information in the generic function proper ,
my - generic - function accept only one argument .
(defun slot-value-using-class-dfun (class object slotd)
(declare (ignore class))
(funcall (slot-info-reader (slot-definition-info slotd)) object))
(defun setf-slot-value-using-class-dfun (new-value class object slotd)
(declare (ignore class))
(funcall (slot-info-writer (slot-definition-info slotd)) new-value object))
(defun slot-boundp-using-class-dfun (class object slotd)
(declare (ignore class))
(funcall (slot-info-boundp (slot-definition-info slotd)) object))
(defun slot-makunbound-using-class-dfun (class object slotd)
(declare (ignore class))
(funcall (slot-info-makunbound (slot-definition-info slotd)) object))
(defun special-case-for-compute-discriminating-function-p (gf)
(or (eq gf #'slot-value-using-class)
(eq gf #'(setf slot-value-using-class))
(eq gf #'slot-boundp-using-class)
(eq gf #'slot-makunbound-using-class)))
(let (initial-print-object-cache)
(defun standard-compute-discriminating-function (gf)
(declare (notinline slot-value))
(let ((dfun-state (slot-value gf 'dfun-state)))
(when (special-case-for-compute-discriminating-function-p gf)
COMPUTE - DISCRIMINATING - FUNCTION , then ( at least for the
special cases implemented as of 2006 - 05 - 09 ) any information
(aver (null dfun-state)))
(typecase dfun-state
(null
(when (eq gf (load-time-value #'compute-applicable-methods t))
(update-all-c-a-m-gf-info gf))
(cond ((eq gf (load-time-value #'slot-value-using-class t))
(update-slot-value-gf-info gf 'reader)
#'slot-value-using-class-dfun)
((eq gf (load-time-value #'(setf slot-value-using-class) t))
(update-slot-value-gf-info gf 'writer)
#'setf-slot-value-using-class-dfun)
((eq gf (load-time-value #'slot-boundp-using-class t))
(update-slot-value-gf-info gf 'boundp)
#'slot-boundp-using-class-dfun)
((eq gf (load-time-value #'slot-makunbound-using-class t))
(update-slot-value-gf-info gf 'makunbound)
#'slot-makunbound-using-class-dfun)
: PRINT - OBJECT is not a special - case in the sense
many extra resources . -- CSR , 2008 - 06 - 09
((eq gf (locally (declare (optimize (safety 0))) #'print-object))
(let ((nkeys (nth-value 3 (get-generic-fun-info gf))))
(cond ((/= nkeys 1)
: someone has defined a method
specialized on the second argument : punt .
(setf initial-print-object-cache nil)
(make-initial-dfun gf))
(initial-print-object-cache
(multiple-value-bind (dfun cache info)
(make-caching-dfun gf (copy-cache initial-print-object-cache))
(set-dfun gf dfun cache info)))
late , by delayed DEFMETHOD . We must n't cache
((boundp '*!delayed-defmethod-args*)
(make-initial-dfun gf))
(t (multiple-value-bind (dfun cache info)
(make-final-dfun-internal
gf
(mapcar (lambda (x) (list (find-class x)))
'(sb-kernel::control-stack-exhausted
sb-kernel::binding-stack-exhausted
sb-kernel::alien-stack-exhausted
sb-kernel::heap-exhausted-error
restart)))
(setq initial-print-object-cache cache)
(set-dfun gf dfun (copy-cache cache) info))))))
((gf-precompute-dfun-and-emf-p (slot-value gf 'arg-info))
(make-final-dfun gf))
(t
(make-initial-dfun gf))))
(function dfun-state)
(cons (car dfun-state))))))
in general we need to support SBCL 's encapsulation for generic
(defun sb-impl::encapsulate-generic-function (gf type function)
(push (cons type function) (generic-function-encapsulations gf))
(reinitialize-instance gf))
(defun sb-impl::unencapsulate-generic-function (gf type)
(setf (generic-function-encapsulations gf)
(remove type (generic-function-encapsulations gf)
:key #'car :count 1))
(reinitialize-instance gf))
(defun sb-impl::encapsulated-generic-function-p (gf type)
(position type (generic-function-encapsulations gf) :key #'car))
(defun maybe-encapsulate-discriminating-function (gf encs std)
(if (null encs)
std
(let ((inner (maybe-encapsulate-discriminating-function
gf (cdr encs) std))
(function (cdar encs)))
(lambda (&rest args)
(apply function inner args)))))
(defmethod compute-discriminating-function ((gf standard-generic-function))
(standard-compute-discriminating-function gf))
(defmethod compute-discriminating-function :around ((gf standard-generic-function))
(maybe-encapsulate-discriminating-function
gf (generic-function-encapsulations gf) (call-next-method)))
(defmethod (setf class-name) (new-value class)
(let ((classoid (wrapper-classoid (class-wrapper class))))
(if (and new-value (symbolp new-value))
(setf (classoid-name classoid) new-value)
(setf (classoid-name classoid) nil)))
(reinitialize-instance class :name new-value)
new-value)
(defmethod (setf generic-function-name) (new-value generic-function)
(reinitialize-instance generic-function :name new-value)
new-value)
(defmethod function-keywords ((method standard-method))
(multiple-value-bind (llks nreq nopt keywords)
(analyze-lambda-list (if (consp method)
(early-method-lambda-list method)
(method-lambda-list method)))
(declare (ignore nreq nopt))
(values keywords (ll-kwds-allowp llks))))
(defmethod generic-function-pretty-arglist ((gf standard-generic-function) &optional methods-in-compilation-unit)
(let ((gf-lambda-list (generic-function-lambda-list gf))
(methods (generic-function-methods gf)))
(flet ((lambda-list (m)
(or (and methods-in-compilation-unit
(gethash (cons (method-qualifiers m)
(unparse-specializers gf (method-specializers m)))
methods-in-compilation-unit))
(method-lambda-list m)))
(canonize (k)
(multiple-value-bind (kw var)
(parse-key-arg-spec k)
(if (and (eql (symbol-package kw) *keyword-package*)
(string= kw var))
var
(list (list kw var))))))
(multiple-value-bind (llks required optional rest keys)
(parse-lambda-list gf-lambda-list :silent t)
(if (or (ll-kwds-keyp llks)
(ll-kwds-restp llks))
(collect ((keys (mapcar #'canonize keys)))
(flet ((process (lambda-list)
(binding* (((m.llks nil nil nil m.keys)
(parse-lambda-list lambda-list :silent t)))
(setq llks (logior llks m.llks))
(dolist (k m.keys)
(unless (member (parse-key-arg-spec k) (keys)
:key #'parse-key-arg-spec :test #'eq)
(keys (canonize k)))))))
(dolist (m methods)
(process (lambda-list m))))
(make-lambda-list llks nil required optional rest (keys)))
(make-lambda-list llks nil required optional))))))
(defun gf-merge-arglists (methods-in-compilation-unit)
(flet ((canonize (k)
(multiple-value-bind (kw var)
(parse-key-arg-spec k)
(if (and (eql (symbol-package kw) *keyword-package*)
(string= kw var))
var
(list (list kw var))))))
(with-hash-table-iterator (iterator methods-in-compilation-unit)
(multiple-value-bind (llks required optional rest keys)
(parse-lambda-list (nth-value 2 (iterator)) :silent t)
(if (or (ll-kwds-keyp llks)
(ll-kwds-restp llks))
(collect ((keys (mapcar #'canonize keys)))
(flet ((process (lambda-list)
(binding* (((m.llks nil nil nil m.keys)
(parse-lambda-list lambda-list :silent t)))
(setq llks (logior llks m.llks))
(dolist (k m.keys)
(unless (member (parse-key-arg-spec k) (keys)
:key #'parse-key-arg-spec :test #'eq)
(keys (canonize k)))))))
(loop
(multiple-value-bind (more key value) (iterator)
(declare (ignore key))
(unless more
(return))
(process value)))
(make-lambda-list llks nil required optional rest (keys))))
(make-lambda-list llks nil required optional))))))
|
c7a93981c00f377b1ba0e93590a3d442078fabfa9b07d14c881c2e668fe78d5c | racket/eopl | lang.rkt | #lang eopl
;; grammar for the CLASSES language. Based on IMPLICIT-REFS, plus
;; multiple-argument procedures, multiple-declaration letrecs, and
;; multiple-declaration lets.
(provide (all-defined-out))
;;;;;;;;;;;;;;;; grammatical specification ;;;;;;;;;;;;;;;;
(define the-lexical-spec
'((whitespace (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier
(letter (arbno (or letter digit "_" "-" "?")))
symbol)
(number (digit (arbno digit)) number)
(number ("-" digit (arbno digit)) number)
))
(define the-grammar
'((program ((arbno class-decl) expression) a-program)
(expression (number) const-exp)
(expression
("-" "(" expression "," expression ")")
diff-exp)
(expression
("+" "(" expression "," expression ")")
sum-exp)
(expression
("zero?" "(" expression ")")
zero?-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression (identifier) var-exp)
(expression
("let" (arbno identifier "=" expression) "in" expression)
let-exp)
(expression
("proc" "(" (separated-list identifier ",") ")" expression)
proc-exp)
(expression
("(" expression (arbno expression) ")")
call-exp)
(expression
("letrec"
(arbno identifier "(" (separated-list identifier ",") ")"
"=" expression)
"in" expression)
letrec-exp)
(expression
("begin" expression (arbno ";" expression) "end")
begin-exp)
(expression
("set" identifier "=" expression)
assign-exp)
(expression
("list" "(" (separated-list expression ",") ")" )
list-exp)
;; new productions for oop
(class-decl
("class" identifier
"extends" identifier
(arbno "field" identifier)
(arbno method-decl)
)
a-class-decl)
(method-decl
("method" identifier
"(" (separated-list identifier ",") ")" ; method formals
expression
)
a-method-decl)
(expression
("new" identifier "(" (separated-list expression ",") ")")
new-object-exp)
;; this is special-cased to prevent it from mutation
(expression
("self")
self-exp)
(expression
("send" expression identifier
"(" (separated-list expression ",") ")")
method-call-exp)
(expression
("super" identifier "(" (separated-list expression ",") ")")
super-call-exp)
))
;;;;;;;;;;;;;;;; sllgen boilerplate ;;;;;;;;;;;;;;;;
(sllgen:make-define-datatypes the-lexical-spec the-grammar)
(define show-the-datatypes
(lambda () (sllgen:list-define-datatypes the-lexical-spec the-grammar)))
(define scan&parse
(sllgen:make-string-parser the-lexical-spec the-grammar))
(define just-scan
(sllgen:make-string-scanner the-lexical-spec the-grammar))
| null | https://raw.githubusercontent.com/racket/eopl/43575d6e95dc34ca6e49b305180f696565e16e0f/tests/chapter9/classes/lang.rkt | racket | grammar for the CLASSES language. Based on IMPLICIT-REFS, plus
multiple-argument procedures, multiple-declaration letrecs, and
multiple-declaration lets.
grammatical specification ;;;;;;;;;;;;;;;;
new productions for oop
method formals
this is special-cased to prevent it from mutation
sllgen boilerplate ;;;;;;;;;;;;;;;; | #lang eopl
(provide (all-defined-out))
(define the-lexical-spec
'((whitespace (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier
(letter (arbno (or letter digit "_" "-" "?")))
symbol)
(number (digit (arbno digit)) number)
(number ("-" digit (arbno digit)) number)
))
(define the-grammar
'((program ((arbno class-decl) expression) a-program)
(expression (number) const-exp)
(expression
("-" "(" expression "," expression ")")
diff-exp)
(expression
("+" "(" expression "," expression ")")
sum-exp)
(expression
("zero?" "(" expression ")")
zero?-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression (identifier) var-exp)
(expression
("let" (arbno identifier "=" expression) "in" expression)
let-exp)
(expression
("proc" "(" (separated-list identifier ",") ")" expression)
proc-exp)
(expression
("(" expression (arbno expression) ")")
call-exp)
(expression
("letrec"
(arbno identifier "(" (separated-list identifier ",") ")"
"=" expression)
"in" expression)
letrec-exp)
(expression
("begin" expression (arbno ";" expression) "end")
begin-exp)
(expression
("set" identifier "=" expression)
assign-exp)
(expression
("list" "(" (separated-list expression ",") ")" )
list-exp)
(class-decl
("class" identifier
"extends" identifier
(arbno "field" identifier)
(arbno method-decl)
)
a-class-decl)
(method-decl
("method" identifier
expression
)
a-method-decl)
(expression
("new" identifier "(" (separated-list expression ",") ")")
new-object-exp)
(expression
("self")
self-exp)
(expression
("send" expression identifier
"(" (separated-list expression ",") ")")
method-call-exp)
(expression
("super" identifier "(" (separated-list expression ",") ")")
super-call-exp)
))
(sllgen:make-define-datatypes the-lexical-spec the-grammar)
(define show-the-datatypes
(lambda () (sllgen:list-define-datatypes the-lexical-spec the-grammar)))
(define scan&parse
(sllgen:make-string-parser the-lexical-spec the-grammar))
(define just-scan
(sllgen:make-string-scanner the-lexical-spec the-grammar))
|
5f1fc2aad94cace7e64bab6609e977f69bf8d9c5ad066d35ce38aa4f21bd9a1f | Apress/practical-webdev-haskell | Main.hs | module Adapter.HTTP.API.Main where
import Domain.Auth
import ClassyPrelude
import Web.Scotty.Trans
import Network.HTTP.Types.Status
import qualified Adapter.HTTP.API.Auth as Auth
import Adapter.HTTP.API.Common
import Katip
import Network.Wai
import Network.Wai.Middleware.Gzip
main :: ( MonadIO m, KatipContext m, AuthRepo m
, EmailVerificationNotif m, SessionRepo m)
=> (m Response -> IO Response) -> IO Application
main runner =
scottyAppT runner routes
routes :: ( MonadIO m, KatipContext m, AuthRepo m
, EmailVerificationNotif m, SessionRepo m)
=> ScottyT LText m ()
routes = do
middleware $ gzip $ def { gzipFiles = GzipCompress }
Auth.routes
notFound $ do
status status404
json ("NotFound" :: Text)
defaultHandler $ \e -> do
lift $ $(logTM) ErrorS $ "Unhandled error: " <> ls (showError e)
status status500
json ("InternalServerError" :: Text)
| null | https://raw.githubusercontent.com/Apress/practical-webdev-haskell/17b90c06030def254bb0497b9e357f5d3b96d0cf/08/src/Adapter/HTTP/API/Main.hs | haskell | module Adapter.HTTP.API.Main where
import Domain.Auth
import ClassyPrelude
import Web.Scotty.Trans
import Network.HTTP.Types.Status
import qualified Adapter.HTTP.API.Auth as Auth
import Adapter.HTTP.API.Common
import Katip
import Network.Wai
import Network.Wai.Middleware.Gzip
main :: ( MonadIO m, KatipContext m, AuthRepo m
, EmailVerificationNotif m, SessionRepo m)
=> (m Response -> IO Response) -> IO Application
main runner =
scottyAppT runner routes
routes :: ( MonadIO m, KatipContext m, AuthRepo m
, EmailVerificationNotif m, SessionRepo m)
=> ScottyT LText m ()
routes = do
middleware $ gzip $ def { gzipFiles = GzipCompress }
Auth.routes
notFound $ do
status status404
json ("NotFound" :: Text)
defaultHandler $ \e -> do
lift $ $(logTM) ErrorS $ "Unhandled error: " <> ls (showError e)
status status500
json ("InternalServerError" :: Text)
|
|
93000d7312dcee8c315751450489d96417a35828df0fd9d4c54c722813c5c09c | erlangonrails/devdb | exmpp_compress.erl | Copyright ProcessOne 2006 - 2010 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
@author < >
%% @doc
%% The module <strong>{@module}</strong> provides functions to handle
%% stream compression.
-module(exmpp_compress).
-behaviour(gen_server).
%% Initialization.
-export([
start/0,
start_link/0
]).
%% Registry handling.
-export([
register_engine/3,
register_engine/4,
get_compress_methods/0,
get_engine_names/0,
get_engine_names/1,
get_prefered_engine_name/1,
is_engine_available/1,
get_engine_driver/1
]).
%% Compression activation.
-export([
enable_compression/2,
disable_compression/1
]).
%% Common socket API.
-export([
send/2,
recv/1,
recv/2,
getopts/2,
setopts/2,
peername/1,
sockname/1,
controlling_process/2,
close/1,
port_revision/1,
recv_data/2,
send_data/2
]).
%% gen_server(3erl) callbacks.
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-record(state, {engines,
by_compress_method
}).
-record(compress_engine, {name,
driver_path,
driver,
compress_methods = []
}).
-record(compress_socket, {socket,
packet_mode = binary,
port
}).
-define(SERVER, ?MODULE).
-define(DEFAULT_ENGINE, zlib).
-define(COMMAND_SET_COMPRESS_METHOD, 1).
-define(COMMAND_SET_COMPRESS_LEVEL, 2).
-define(COMMAND_PREPARE_COMPRESS, 3).
-define(COMMAND_PREPARE_UNCOMPRESS, 4).
-define(COMMAND_COMPRESS, 5).
-define(COMMAND_UNCOMPRESS, 6).
-define(COMMAND_SVN_REVISION, 7).
%% --------------------------------------------------------------------
%% Initialization.
%% --------------------------------------------------------------------
%% @hidden
start() ->
Ret = gen_server:start({local, ?SERVER}, ?MODULE, [], []),
register_builtin_engines(),
Ret.
%% @hidden
start_link() ->
Ret = gen_server:start_link({local, ?SERVER}, ?MODULE, [], []),
register_builtin_engines(),
Ret.
-ifdef(HAVE_ZLIB).
-define(REGISTER_ZLIB,
register_builtin_engine(zlib, exmpp_compress_zlib,
[{zlib, 10}, {gzip, 10}])).
-else.
-define(REGISTER_ZLIB, ok).
-endif.
register_builtin_engines() ->
?REGISTER_ZLIB,
ok.
register_builtin_engine(Name, Driver, Compress_Methods) ->
try
register_engine(Name, Driver, Compress_Methods)
catch
throw:{port_driver, load, Reason, Driver_Name} ->
error_logger:warning_msg("Failed to load driver \"~s\": ~s~n",
[Driver_Name,
erl_ddll:format_error(Reason)])
end.
%% --------------------------------------------------------------------
%% Registry handling.
%% --------------------------------------------------------------------
%% @spec (Name, Driver, Compress_Methods) -> ok
%% Name = atom()
%% Driver = atom()
%% Compress_Mehods = [{atom(), Priority}]
%% Priority = integer()
%% @doc Add a new compression engine.
register_engine(Name, Driver, Compress_Methods) ->
register_engine(Name, undefined, Driver, Compress_Methods).
%% @spec (Name, Driver_Path, Driver, Compress_Methods) -> ok
%% Name = atom()
%% Driver_Path = string()
%% Driver = atom()
%% Compress_Mehods = [{atom(), Priority}]
%% Priority = integer()
%% @doc Add a new compression engine.
register_engine(Name, Driver_Path, Driver, Compress_Methods)
when is_atom(Name), is_list(Compress_Methods), length(Compress_Methods) > 0 ->
Engine = #compress_engine{name = Name,
driver_path = Driver_Path,
driver = Driver,
compress_methods = Compress_Methods
},
case gen_server:call(?SERVER, {register_engine, Engine}) of
ok -> ok;
{error, Exception} -> throw(Exception)
end.
( ) - > [ Compress_Method ]
%% Compress_Method = atom()
%% @doc Return the list of supported compress methods.
get_compress_methods() ->
gen_server:call(?SERVER, get_compress_methods).
( ) - > [ Engine_Name ]
%% Engine_Name = atom()
%% @doc Return the list of compression engines.
get_engine_names() ->
gen_server:call(?SERVER, get_engine_names).
( Compress_Method ) - > [ Engine_Name ]
%% Compress_Method = atom()
%% Engine_Name = atom()
%% @doc Return the list of compression engines which support the given compress method.
%%
%% The list is sorted from the most to the least prefered engine.
get_engine_names(Compress_Method) ->
Engines = gen_server:call(?SERVER, {get_engines, Compress_Method}),
[E#compress_engine.name || E <- Engines].
( Compress_Method ) - > [ Engine_Name ]
%% Compress_Method = atom()
%% Engine_Name = atom()
%% @doc Return the name of the prefered compression engines which support the
%% given compress method.
get_prefered_engine_name(Compress_Method) ->
case get_prefered_engine(Compress_Method) of
undefined -> undefined;
Engine -> Engine#compress_engine.name
end.
get_prefered_engine(Compress_Method) ->
Engines = gen_server:call(?SERVER, {get_engines, Compress_Method}),
case Engines of
[] -> undefined;
[Engine | _] -> Engine
end.
%% @spec (Engine_Name) -> bool()
%% Engine_Name = atom()
%% @doc Tell if `Engine_Name' is available.
is_engine_available(Engine_Name) ->
case gen_server:call(?SERVER, {get_engine, Engine_Name}) of
undefined -> false;
_ -> true
end.
@spec ( Engine_Name ) - >
%% Engine_Name = atom()
= atom ( )
%% @doc Return the port driver name associated to the given engine.
get_engine_driver(Engine_Name) ->
case gen_server:call(?SERVER, {get_engine, Engine_Name}) of
undefined -> undefined;
#compress_engine{driver = Driver_Name} -> Driver_Name
end.
%% --------------------------------------------------------------------
%% Compression activation.
%% --------------------------------------------------------------------
%% @spec (Socket_Desc, Options) -> Compress_Socket
%% Socket_Desc = {Mod, Socket}
%% Mod = atom()
%% Socket = term()
%% Options = [Option]
%% Option = {compress_method, Method} | {engine, Engine} | {mode, Mode} | {compress_level, Level}
%% Method = atom()
%% Engine = atom()
%% Mode = binary | list
%% Level = integer()
%% Compress_Socket = compress_socket()
%% @doc Enable compression over the given socket.
enable_compression(Socket_Desc, Options) ->
%% Start a port driver instance.
Driver_Name = get_engine_from_options(Options),
Port = exmpp_internals:open_port(Driver_Name),
Initialize the port .
try
%% Set compression method.
case proplists:get_value(compress_method, Options) of
undefined -> ok;
CM -> engine_set_compress_method(Port, CM)
end,
%% Set compression level.
case proplists:get_value(compress_level, Options) of
undefined -> ok;
Level -> engine_set_compress_level(Port, Level)
end,
%% Packet mode.
Packet_Mode = proplists:get_value(mode, Options, binary),
%% Enable compression.
engine_prepare_compress(Port),
engine_prepare_uncompress(Port),
#compress_socket{socket = Socket_Desc,
packet_mode = Packet_Mode,
port = Port}
catch
_:Exception ->
exmpp_internals:close_port(Port),
throw(Exception)
end.
( Compress_Socket ) - > Socket_Desc
%% Compress_Socket = compress_socket()
%% Socket_Desc = {Mod, Socket}
%% Mod = atom()
%% Socket = term()
%% @doc Disable compression and return the underlying socket.
disable_compression(#compress_socket{socket = Socket_Desc, port = Port}) ->
exmpp_internals:close_port(Port),
Socket_Desc.
%% --------------------------------------------------------------------
%% Activation helpers.
%% --------------------------------------------------------------------
%% Choose the most appropriate engine.
get_engine_from_options(Options) ->
Engine_Name =
case proplists:get_value(engine, Options) of
undefined ->
case proplists:get_value(compress_method, Options) of
undefined ->
case get_engine_names() of
[] ->
throw({compress, options, no_engine_available,
undefined});
[Name | _] = Names ->
case lists:member(?DEFAULT_ENGINE, Names) of
true -> ?DEFAULT_ENGINE;
false -> Name
end
end;
CM ->
get_prefered_engine_name(CM)
end;
Name ->
case is_engine_available(Name) of
true ->
Name;
false ->
throw({compress, options, engine_unavailable, Name})
end
end,
get_engine_driver(Engine_Name).
%% --------------------------------------------------------------------
%% Common socket API.
%% --------------------------------------------------------------------
( Compress_Socket , Orig_Packet ) - > ok | { error , Reason }
%% Compress_Socket = compress_socket()
%% Orig_Packet = binary() | list()
%% Reason = term()
%% @doc Send `Orig_Packet' over a compressed connection.
send(#compress_socket{socket = Socket_Desc, port = Port}, Packet) ->
try
Compressed = engine_compress(Port, Packet),
exmpp_internals:gen_send(Socket_Desc, Compressed)
catch
Exception ->
{error, Exception}
end.
( Compress_Socket , Orig_Data ) - > { ok , CompressedData } | { error , Reason }
%% Compress_Socket = compress_socket()
%% Orig_Data = binary() | list()
%% Reason = term()
%% @doc Compress `Orig_Data' before sending over compressed connection.
send_data(#compress_socket{port = Port}, Data) ->
try
Compressed = engine_compress(Port, Data),
{ok, Compressed}
catch
Exception ->
{error, Exception}
end.
( Compress_Socket ) - > { ok , Orig_Packet } | { error , Reason }
%% Compress_Socket = compress_socket()
%% Orig_Packet = binary() | list()
%% Reason = term()
%% @doc Receive data over a compressed connection.
recv(Compress_Socket) ->
recv(Compress_Socket, infinity).
( Compress_Socket , Timeout ) - > { ok , Orig_Packet } | { error , Reason }
%% Compress_Socket = compress_socket()
%% Timeout = integer()
%% Orig_Packet = binary() | list()
%% Reason = term()
%% @doc Receive data over a compressed connection.
recv(#compress_socket{socket = Socket_Desc} = Compress_Socket, Timeout) ->
try
case exmpp_internals:gen_recv(Socket_Desc, Timeout) of
{ok, Packet} ->
recv_data(Compress_Socket, Packet);
{error, Reason} ->
{error, Reason}
end
catch
Exception ->
{error, Exception}
end.
( Compress_Socket , Packet ) - > { ok , Orig_Packet } | { error , Reason }
%% Compress_Socket = compress_socket()
%% Packet = binary() | list()
%% Orig_Packet = binary() | list()
%% Reason = term()
@doc Uncompress already received data .
recv_data(#compress_socket{port = Port, packet_mode = Packet_Mode}, Packet) ->
try
Uncompressed = engine_uncompress(Port, Packet),
case Packet_Mode of
binary -> {ok, Uncompressed};
list -> {ok, binary_to_list(Uncompressed)}
end
catch
Exception ->
{error, Exception}
end.
( Compress_Socket , Options ) - > { ok , Option_Values } | { error , posix ( ) }
%% Compress_Socket = tls_socket()
%% Mod = atom()
%% Socket = term()
%% Options = list()
Option_Values = list ( )
@doc Sets one or more options for a socket .
getopts(#compress_socket{socket = Socket_Desc}, Options) ->
exmpp_internals:gen_getopts(Socket_Desc, Options).
( Compress_Socket , Options ) - > ok | { error , posix ( ) }
%% Compress_Socket = tls_socket()
%% Mod = atom()
%% Socket = term()
%% Options = list()
@doc Sets one or more options for a socket .
setopts(#compress_socket{socket = Socket_Desc}, Options) ->
exmpp_internals:gen_setopts(Socket_Desc, Options).
( Compress_Socket ) - > { ok , { Address , Port } } | { error , posix ( ) }
%% Compress_Socket = tls_socket()
%% Mod = atom()
%% Socket = term()
%% Address = ip_address()
%% Port = integer()
%% @doc Returns the address and port for the other end of a connection.
peername(#compress_socket{socket = Socket_Desc}) ->
exmpp_internals:gen_peername(Socket_Desc).
( Compress_Socket ) - > { ok , { Address , Port } } | { error , posix ( ) }
%% Compress_Socket = tls_socket()
%% Mod = atom()
%% Socket = term()
%% Address = ip_address()
%% Port = integer()
%% @doc Returns the local address and port number for a socket.
sockname(#compress_socket{socket = Socket_Desc}) ->
exmpp_internals:gen_sockname(Socket_Desc).
( Compress_Socket , Pid ) - > ok | { error , Reason }
%% Compress_Socket = compress_socket()
Pid = pid ( )
%% Reason = term()
%% @doc Change the controlling socket of the underlying socket.
controlling_process(#compress_socket{socket = Socket_Desc}, Pid) ->
exmpp_internals:gen_controlling_process(Socket_Desc, Pid).
( Compress_Socket ) - > ok | { error , Reason }
%% Compress_Socket = compress_socket()
%% Reason = term()
%% @doc Turn off compression and close the underlying socket.
close(#compress_socket{socket = Socket_Desc} = Compress_Socket) ->
First , turn off compression .
disable_compression(Compress_Socket),
%% Close the underlying socket.
exmpp_internals:gen_close(Socket_Desc).
%% @hidden
port_revision(#compress_socket{port = Port}) ->
engine_svn_revision(Port).
%% --------------------------------------------------------------------
%% Engine function wrappers.
%% --------------------------------------------------------------------
control(Port, Command, Data) ->
case port_control(Port, Command, Data) of
<<0, Result/binary>> -> Result;
<<1, Error/binary>> -> {error, binary_to_term(Error)}
end.
engine_set_compress_method(Port, Method) ->
case control(Port, ?COMMAND_SET_COMPRESS_METHOD,
term_to_binary(Method)) of
{error, Reason} ->
throw({compress, compress, set_compress_method, Reason});
_ ->
ok
end.
engine_set_compress_level(Port, Level) ->
case control(Port, ?COMMAND_SET_COMPRESS_LEVEL,
term_to_binary(Level)) of
{error, Reason} ->
throw({compress, compress, set_compress_level, Reason});
_ ->
ok
end.
engine_prepare_compress(Port) ->
case control(Port, ?COMMAND_PREPARE_COMPRESS, <<>>) of
{error, Reason} ->
throw({compress, compress, prepare_compress, Reason});
_ ->
ok
end.
engine_prepare_uncompress(Port) ->
case control(Port, ?COMMAND_PREPARE_UNCOMPRESS, <<>>) of
{error, Reason} ->
throw({compress, compress, prepare_uncompress, Reason});
_ ->
ok
end.
engine_compress(Port, Data) when is_list(Data) ->
engine_compress(Port, list_to_binary(Data));
engine_compress(_Port, <<>>) ->
<<>>;
engine_compress(Port, Data) ->
case control(Port, ?COMMAND_COMPRESS, Data) of
{error, Reason} ->
throw({compress, compress, do_compress, Reason});
Result ->
Result
end.
engine_uncompress(Port, Data) when is_list(Data) ->
engine_uncompress(Port, list_to_binary(Data));
engine_uncompress(_Port, <<>>) ->
<<>>;
engine_uncompress(Port, Data) ->
case control(Port, ?COMMAND_UNCOMPRESS, Data) of
{error, Reason} ->
throw({compress, uncompress, do_uncompress, Reason});
Result ->
Result
end.
engine_svn_revision(Port) ->
case control(Port, ?COMMAND_SVN_REVISION, <<>>) of
{error, Reason} ->
throw({compress, handshake, svn_revision, Reason});
Revision ->
binary_to_term(Revision)
end.
%% --------------------------------------------------------------------
%% gen_server(3erl) callbacks.
%% --------------------------------------------------------------------
%% @hidden
init([]) ->
Engines = dict:new(),
By_CM = dict:new(),
{ok, #state{engines = Engines, by_compress_method = By_CM}}.
%% @hidden
handle_call({register_engine,
#compress_engine{name = Name,
compress_methods = Compress_Methods,
driver_path = Driver_Path,
driver = Driver_Name} = Engine},
_From,
#state{engines = Engines, by_compress_method = By_CM} = State) ->
try
%% Load the driver now.
case Driver_Path of
undefined ->
exmpp_internals:load_driver(Driver_Name);
_ ->
exmpp_internals:load_driver(Driver_Name, [Driver_Path])
end,
%% Add engine to the global list.
New_Engines = dict:store(Name, Engine, Engines),
%% Index engine by its compress methods.
Fun = fun({CM, Prio}, {E, CM_Dict}) ->
New_CM_Dict =
case dict:is_key(CM, CM_Dict) of
true ->
L = [{E, Prio} | dict:fetch(CM, CM_Dict)],
New_L = lists:keysort(2, L),
dict:store(CM, New_L, CM_Dict);
false ->
dict:store(CM, [{E, Prio}], CM_Dict)
end,
{E, New_CM_Dict}
end,
{_, New_By_CM} = lists:foldl(Fun, {Engine, By_CM}, Compress_Methods),
{reply, ok, State#state{engines = New_Engines,
by_compress_method = New_By_CM
}}
catch
_:Exception ->
{reply, {error, Exception}, State}
end;
handle_call(get_compress_methods, _From,
#state{by_compress_method = By_CM} = State) ->
{reply, dict:fetch_keys(By_CM), State};
handle_call(get_engine_names, _From,
#state{engines = Engines} = State) ->
{reply, dict:fetch_keys(Engines), State};
handle_call({get_engines, CM}, _From,
#state{by_compress_method = By_CM} = State) ->
case dict:is_key(CM, By_CM) of
true -> {reply, [E || {E, _P} <- dict:fetch(CM, By_CM)], State};
false -> {reply, [], State}
end;
handle_call({get_engine, Engine_Name}, _From,
#state{engines = Engines} = State) ->
case dict:is_key(Engine_Name, Engines) of
true -> {reply, dict:fetch(Engine_Name, Engines), State};
false -> {reply, undefined, State}
end;
handle_call(Request, From, State) ->
error_logger:info_msg("~p:handle_call/3:~n- Request: ~p~n- From: ~p~n"
"- State: ~p~n", [?MODULE, Request, From, State]),
{reply, ok, State}.
%% @hidden
handle_cast(Request, State) ->
error_logger:info_msg("~p:handle_cast/2:~n- Request: ~p~n"
"- State: ~p~n", [?MODULE, Request, State]),
{noreply, State}.
%% @hidden
handle_info(Info, State) ->
error_logger:info_msg("~p:handle_info/2:~n- Info: ~p~n"
"- State: ~p~n", [?MODULE, Info, State]),
{noreply, State}.
%% @hidden
code_change(Old_Vsn, State, Extra) ->
error_logger:info_msg("~p:code_change/3:~n- Old_Vsn: ~p~n- Extra: ~p~n"
"- State: ~p~n", [?MODULE, Old_Vsn, Extra, State]),
{ok, State}.
%% @hidden
terminate(_Reason, _State) ->
ok.
%% --------------------------------------------------------------------
%% Documentation / type definitions.
%% --------------------------------------------------------------------
%% @type compress_socket().
%% Compression socket obtained with {@link compress/2}.
| null | https://raw.githubusercontent.com/erlangonrails/devdb/0e7eaa6bd810ec3892bfc3d933439560620d0941/dev/exmpp-0.9.5/src/core/exmpp_compress.erl | erlang |
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
@doc
The module <strong>{@module}</strong> provides functions to handle
stream compression.
Initialization.
Registry handling.
Compression activation.
Common socket API.
gen_server(3erl) callbacks.
--------------------------------------------------------------------
Initialization.
--------------------------------------------------------------------
@hidden
@hidden
--------------------------------------------------------------------
Registry handling.
--------------------------------------------------------------------
@spec (Name, Driver, Compress_Methods) -> ok
Name = atom()
Driver = atom()
Compress_Mehods = [{atom(), Priority}]
Priority = integer()
@doc Add a new compression engine.
@spec (Name, Driver_Path, Driver, Compress_Methods) -> ok
Name = atom()
Driver_Path = string()
Driver = atom()
Compress_Mehods = [{atom(), Priority}]
Priority = integer()
@doc Add a new compression engine.
Compress_Method = atom()
@doc Return the list of supported compress methods.
Engine_Name = atom()
@doc Return the list of compression engines.
Compress_Method = atom()
Engine_Name = atom()
@doc Return the list of compression engines which support the given compress method.
The list is sorted from the most to the least prefered engine.
Compress_Method = atom()
Engine_Name = atom()
@doc Return the name of the prefered compression engines which support the
given compress method.
@spec (Engine_Name) -> bool()
Engine_Name = atom()
@doc Tell if `Engine_Name' is available.
Engine_Name = atom()
@doc Return the port driver name associated to the given engine.
--------------------------------------------------------------------
Compression activation.
--------------------------------------------------------------------
@spec (Socket_Desc, Options) -> Compress_Socket
Socket_Desc = {Mod, Socket}
Mod = atom()
Socket = term()
Options = [Option]
Option = {compress_method, Method} | {engine, Engine} | {mode, Mode} | {compress_level, Level}
Method = atom()
Engine = atom()
Mode = binary | list
Level = integer()
Compress_Socket = compress_socket()
@doc Enable compression over the given socket.
Start a port driver instance.
Set compression method.
Set compression level.
Packet mode.
Enable compression.
Compress_Socket = compress_socket()
Socket_Desc = {Mod, Socket}
Mod = atom()
Socket = term()
@doc Disable compression and return the underlying socket.
--------------------------------------------------------------------
Activation helpers.
--------------------------------------------------------------------
Choose the most appropriate engine.
--------------------------------------------------------------------
Common socket API.
--------------------------------------------------------------------
Compress_Socket = compress_socket()
Orig_Packet = binary() | list()
Reason = term()
@doc Send `Orig_Packet' over a compressed connection.
Compress_Socket = compress_socket()
Orig_Data = binary() | list()
Reason = term()
@doc Compress `Orig_Data' before sending over compressed connection.
Compress_Socket = compress_socket()
Orig_Packet = binary() | list()
Reason = term()
@doc Receive data over a compressed connection.
Compress_Socket = compress_socket()
Timeout = integer()
Orig_Packet = binary() | list()
Reason = term()
@doc Receive data over a compressed connection.
Compress_Socket = compress_socket()
Packet = binary() | list()
Orig_Packet = binary() | list()
Reason = term()
Compress_Socket = tls_socket()
Mod = atom()
Socket = term()
Options = list()
Compress_Socket = tls_socket()
Mod = atom()
Socket = term()
Options = list()
Compress_Socket = tls_socket()
Mod = atom()
Socket = term()
Address = ip_address()
Port = integer()
@doc Returns the address and port for the other end of a connection.
Compress_Socket = tls_socket()
Mod = atom()
Socket = term()
Address = ip_address()
Port = integer()
@doc Returns the local address and port number for a socket.
Compress_Socket = compress_socket()
Reason = term()
@doc Change the controlling socket of the underlying socket.
Compress_Socket = compress_socket()
Reason = term()
@doc Turn off compression and close the underlying socket.
Close the underlying socket.
@hidden
--------------------------------------------------------------------
Engine function wrappers.
--------------------------------------------------------------------
--------------------------------------------------------------------
gen_server(3erl) callbacks.
--------------------------------------------------------------------
@hidden
@hidden
Load the driver now.
Add engine to the global list.
Index engine by its compress methods.
@hidden
@hidden
@hidden
@hidden
--------------------------------------------------------------------
Documentation / type definitions.
--------------------------------------------------------------------
@type compress_socket().
Compression socket obtained with {@link compress/2}. | Copyright ProcessOne 2006 - 2010 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
@author < >
-module(exmpp_compress).
-behaviour(gen_server).
-export([
start/0,
start_link/0
]).
-export([
register_engine/3,
register_engine/4,
get_compress_methods/0,
get_engine_names/0,
get_engine_names/1,
get_prefered_engine_name/1,
is_engine_available/1,
get_engine_driver/1
]).
-export([
enable_compression/2,
disable_compression/1
]).
-export([
send/2,
recv/1,
recv/2,
getopts/2,
setopts/2,
peername/1,
sockname/1,
controlling_process/2,
close/1,
port_revision/1,
recv_data/2,
send_data/2
]).
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-record(state, {engines,
by_compress_method
}).
-record(compress_engine, {name,
driver_path,
driver,
compress_methods = []
}).
-record(compress_socket, {socket,
packet_mode = binary,
port
}).
-define(SERVER, ?MODULE).
-define(DEFAULT_ENGINE, zlib).
-define(COMMAND_SET_COMPRESS_METHOD, 1).
-define(COMMAND_SET_COMPRESS_LEVEL, 2).
-define(COMMAND_PREPARE_COMPRESS, 3).
-define(COMMAND_PREPARE_UNCOMPRESS, 4).
-define(COMMAND_COMPRESS, 5).
-define(COMMAND_UNCOMPRESS, 6).
-define(COMMAND_SVN_REVISION, 7).
start() ->
Ret = gen_server:start({local, ?SERVER}, ?MODULE, [], []),
register_builtin_engines(),
Ret.
start_link() ->
Ret = gen_server:start_link({local, ?SERVER}, ?MODULE, [], []),
register_builtin_engines(),
Ret.
-ifdef(HAVE_ZLIB).
-define(REGISTER_ZLIB,
register_builtin_engine(zlib, exmpp_compress_zlib,
[{zlib, 10}, {gzip, 10}])).
-else.
-define(REGISTER_ZLIB, ok).
-endif.
register_builtin_engines() ->
?REGISTER_ZLIB,
ok.
register_builtin_engine(Name, Driver, Compress_Methods) ->
try
register_engine(Name, Driver, Compress_Methods)
catch
throw:{port_driver, load, Reason, Driver_Name} ->
error_logger:warning_msg("Failed to load driver \"~s\": ~s~n",
[Driver_Name,
erl_ddll:format_error(Reason)])
end.
register_engine(Name, Driver, Compress_Methods) ->
register_engine(Name, undefined, Driver, Compress_Methods).
register_engine(Name, Driver_Path, Driver, Compress_Methods)
when is_atom(Name), is_list(Compress_Methods), length(Compress_Methods) > 0 ->
Engine = #compress_engine{name = Name,
driver_path = Driver_Path,
driver = Driver,
compress_methods = Compress_Methods
},
case gen_server:call(?SERVER, {register_engine, Engine}) of
ok -> ok;
{error, Exception} -> throw(Exception)
end.
( ) - > [ Compress_Method ]
get_compress_methods() ->
gen_server:call(?SERVER, get_compress_methods).
( ) - > [ Engine_Name ]
get_engine_names() ->
gen_server:call(?SERVER, get_engine_names).
( Compress_Method ) - > [ Engine_Name ]
get_engine_names(Compress_Method) ->
Engines = gen_server:call(?SERVER, {get_engines, Compress_Method}),
[E#compress_engine.name || E <- Engines].
( Compress_Method ) - > [ Engine_Name ]
get_prefered_engine_name(Compress_Method) ->
case get_prefered_engine(Compress_Method) of
undefined -> undefined;
Engine -> Engine#compress_engine.name
end.
get_prefered_engine(Compress_Method) ->
Engines = gen_server:call(?SERVER, {get_engines, Compress_Method}),
case Engines of
[] -> undefined;
[Engine | _] -> Engine
end.
is_engine_available(Engine_Name) ->
case gen_server:call(?SERVER, {get_engine, Engine_Name}) of
undefined -> false;
_ -> true
end.
@spec ( Engine_Name ) - >
= atom ( )
get_engine_driver(Engine_Name) ->
case gen_server:call(?SERVER, {get_engine, Engine_Name}) of
undefined -> undefined;
#compress_engine{driver = Driver_Name} -> Driver_Name
end.
enable_compression(Socket_Desc, Options) ->
Driver_Name = get_engine_from_options(Options),
Port = exmpp_internals:open_port(Driver_Name),
Initialize the port .
try
case proplists:get_value(compress_method, Options) of
undefined -> ok;
CM -> engine_set_compress_method(Port, CM)
end,
case proplists:get_value(compress_level, Options) of
undefined -> ok;
Level -> engine_set_compress_level(Port, Level)
end,
Packet_Mode = proplists:get_value(mode, Options, binary),
engine_prepare_compress(Port),
engine_prepare_uncompress(Port),
#compress_socket{socket = Socket_Desc,
packet_mode = Packet_Mode,
port = Port}
catch
_:Exception ->
exmpp_internals:close_port(Port),
throw(Exception)
end.
( Compress_Socket ) - > Socket_Desc
disable_compression(#compress_socket{socket = Socket_Desc, port = Port}) ->
exmpp_internals:close_port(Port),
Socket_Desc.
get_engine_from_options(Options) ->
Engine_Name =
case proplists:get_value(engine, Options) of
undefined ->
case proplists:get_value(compress_method, Options) of
undefined ->
case get_engine_names() of
[] ->
throw({compress, options, no_engine_available,
undefined});
[Name | _] = Names ->
case lists:member(?DEFAULT_ENGINE, Names) of
true -> ?DEFAULT_ENGINE;
false -> Name
end
end;
CM ->
get_prefered_engine_name(CM)
end;
Name ->
case is_engine_available(Name) of
true ->
Name;
false ->
throw({compress, options, engine_unavailable, Name})
end
end,
get_engine_driver(Engine_Name).
( Compress_Socket , Orig_Packet ) - > ok | { error , Reason }
send(#compress_socket{socket = Socket_Desc, port = Port}, Packet) ->
try
Compressed = engine_compress(Port, Packet),
exmpp_internals:gen_send(Socket_Desc, Compressed)
catch
Exception ->
{error, Exception}
end.
( Compress_Socket , Orig_Data ) - > { ok , CompressedData } | { error , Reason }
send_data(#compress_socket{port = Port}, Data) ->
try
Compressed = engine_compress(Port, Data),
{ok, Compressed}
catch
Exception ->
{error, Exception}
end.
( Compress_Socket ) - > { ok , Orig_Packet } | { error , Reason }
recv(Compress_Socket) ->
recv(Compress_Socket, infinity).
( Compress_Socket , Timeout ) - > { ok , Orig_Packet } | { error , Reason }
recv(#compress_socket{socket = Socket_Desc} = Compress_Socket, Timeout) ->
try
case exmpp_internals:gen_recv(Socket_Desc, Timeout) of
{ok, Packet} ->
recv_data(Compress_Socket, Packet);
{error, Reason} ->
{error, Reason}
end
catch
Exception ->
{error, Exception}
end.
( Compress_Socket , Packet ) - > { ok , Orig_Packet } | { error , Reason }
@doc Uncompress already received data .
recv_data(#compress_socket{port = Port, packet_mode = Packet_Mode}, Packet) ->
try
Uncompressed = engine_uncompress(Port, Packet),
case Packet_Mode of
binary -> {ok, Uncompressed};
list -> {ok, binary_to_list(Uncompressed)}
end
catch
Exception ->
{error, Exception}
end.
( Compress_Socket , Options ) - > { ok , Option_Values } | { error , posix ( ) }
Option_Values = list ( )
@doc Sets one or more options for a socket .
getopts(#compress_socket{socket = Socket_Desc}, Options) ->
exmpp_internals:gen_getopts(Socket_Desc, Options).
( Compress_Socket , Options ) - > ok | { error , posix ( ) }
@doc Sets one or more options for a socket .
setopts(#compress_socket{socket = Socket_Desc}, Options) ->
exmpp_internals:gen_setopts(Socket_Desc, Options).
( Compress_Socket ) - > { ok , { Address , Port } } | { error , posix ( ) }
peername(#compress_socket{socket = Socket_Desc}) ->
exmpp_internals:gen_peername(Socket_Desc).
( Compress_Socket ) - > { ok , { Address , Port } } | { error , posix ( ) }
sockname(#compress_socket{socket = Socket_Desc}) ->
exmpp_internals:gen_sockname(Socket_Desc).
( Compress_Socket , Pid ) - > ok | { error , Reason }
Pid = pid ( )
controlling_process(#compress_socket{socket = Socket_Desc}, Pid) ->
exmpp_internals:gen_controlling_process(Socket_Desc, Pid).
( Compress_Socket ) - > ok | { error , Reason }
close(#compress_socket{socket = Socket_Desc} = Compress_Socket) ->
First , turn off compression .
disable_compression(Compress_Socket),
exmpp_internals:gen_close(Socket_Desc).
port_revision(#compress_socket{port = Port}) ->
engine_svn_revision(Port).
control(Port, Command, Data) ->
case port_control(Port, Command, Data) of
<<0, Result/binary>> -> Result;
<<1, Error/binary>> -> {error, binary_to_term(Error)}
end.
engine_set_compress_method(Port, Method) ->
case control(Port, ?COMMAND_SET_COMPRESS_METHOD,
term_to_binary(Method)) of
{error, Reason} ->
throw({compress, compress, set_compress_method, Reason});
_ ->
ok
end.
engine_set_compress_level(Port, Level) ->
case control(Port, ?COMMAND_SET_COMPRESS_LEVEL,
term_to_binary(Level)) of
{error, Reason} ->
throw({compress, compress, set_compress_level, Reason});
_ ->
ok
end.
engine_prepare_compress(Port) ->
case control(Port, ?COMMAND_PREPARE_COMPRESS, <<>>) of
{error, Reason} ->
throw({compress, compress, prepare_compress, Reason});
_ ->
ok
end.
engine_prepare_uncompress(Port) ->
case control(Port, ?COMMAND_PREPARE_UNCOMPRESS, <<>>) of
{error, Reason} ->
throw({compress, compress, prepare_uncompress, Reason});
_ ->
ok
end.
engine_compress(Port, Data) when is_list(Data) ->
engine_compress(Port, list_to_binary(Data));
engine_compress(_Port, <<>>) ->
<<>>;
engine_compress(Port, Data) ->
case control(Port, ?COMMAND_COMPRESS, Data) of
{error, Reason} ->
throw({compress, compress, do_compress, Reason});
Result ->
Result
end.
engine_uncompress(Port, Data) when is_list(Data) ->
engine_uncompress(Port, list_to_binary(Data));
engine_uncompress(_Port, <<>>) ->
<<>>;
engine_uncompress(Port, Data) ->
case control(Port, ?COMMAND_UNCOMPRESS, Data) of
{error, Reason} ->
throw({compress, uncompress, do_uncompress, Reason});
Result ->
Result
end.
engine_svn_revision(Port) ->
case control(Port, ?COMMAND_SVN_REVISION, <<>>) of
{error, Reason} ->
throw({compress, handshake, svn_revision, Reason});
Revision ->
binary_to_term(Revision)
end.
init([]) ->
Engines = dict:new(),
By_CM = dict:new(),
{ok, #state{engines = Engines, by_compress_method = By_CM}}.
handle_call({register_engine,
#compress_engine{name = Name,
compress_methods = Compress_Methods,
driver_path = Driver_Path,
driver = Driver_Name} = Engine},
_From,
#state{engines = Engines, by_compress_method = By_CM} = State) ->
try
case Driver_Path of
undefined ->
exmpp_internals:load_driver(Driver_Name);
_ ->
exmpp_internals:load_driver(Driver_Name, [Driver_Path])
end,
New_Engines = dict:store(Name, Engine, Engines),
Fun = fun({CM, Prio}, {E, CM_Dict}) ->
New_CM_Dict =
case dict:is_key(CM, CM_Dict) of
true ->
L = [{E, Prio} | dict:fetch(CM, CM_Dict)],
New_L = lists:keysort(2, L),
dict:store(CM, New_L, CM_Dict);
false ->
dict:store(CM, [{E, Prio}], CM_Dict)
end,
{E, New_CM_Dict}
end,
{_, New_By_CM} = lists:foldl(Fun, {Engine, By_CM}, Compress_Methods),
{reply, ok, State#state{engines = New_Engines,
by_compress_method = New_By_CM
}}
catch
_:Exception ->
{reply, {error, Exception}, State}
end;
handle_call(get_compress_methods, _From,
#state{by_compress_method = By_CM} = State) ->
{reply, dict:fetch_keys(By_CM), State};
handle_call(get_engine_names, _From,
#state{engines = Engines} = State) ->
{reply, dict:fetch_keys(Engines), State};
handle_call({get_engines, CM}, _From,
#state{by_compress_method = By_CM} = State) ->
case dict:is_key(CM, By_CM) of
true -> {reply, [E || {E, _P} <- dict:fetch(CM, By_CM)], State};
false -> {reply, [], State}
end;
handle_call({get_engine, Engine_Name}, _From,
#state{engines = Engines} = State) ->
case dict:is_key(Engine_Name, Engines) of
true -> {reply, dict:fetch(Engine_Name, Engines), State};
false -> {reply, undefined, State}
end;
handle_call(Request, From, State) ->
error_logger:info_msg("~p:handle_call/3:~n- Request: ~p~n- From: ~p~n"
"- State: ~p~n", [?MODULE, Request, From, State]),
{reply, ok, State}.
handle_cast(Request, State) ->
error_logger:info_msg("~p:handle_cast/2:~n- Request: ~p~n"
"- State: ~p~n", [?MODULE, Request, State]),
{noreply, State}.
handle_info(Info, State) ->
error_logger:info_msg("~p:handle_info/2:~n- Info: ~p~n"
"- State: ~p~n", [?MODULE, Info, State]),
{noreply, State}.
code_change(Old_Vsn, State, Extra) ->
error_logger:info_msg("~p:code_change/3:~n- Old_Vsn: ~p~n- Extra: ~p~n"
"- State: ~p~n", [?MODULE, Old_Vsn, Extra, State]),
{ok, State}.
terminate(_Reason, _State) ->
ok.
|
94865d4cafcc257ee5fd88ed507d217a2710ff3a1660c627ffa1d5488f5fb8af | pkpkpk/fress | bytestream.clj | (ns fress.impl.bytestream
(:import java.nio.ByteBuffer
org.fressian.impl.BytesOutputStream)
(:gen-class
:implements [clojure.lang.IDeref]
:extends org.fressian.impl.BytesOutputStream))
(defn -deref [^BytesOutputStream this]
(ByteBuffer/wrap (.internalBuffer this) 0 (.length this)))
| null | https://raw.githubusercontent.com/pkpkpk/fress/7ed0f063692263f1209ec05ffd740afaf54a0157/src/main/clj/fress/impl/bytestream.clj | clojure | (ns fress.impl.bytestream
(:import java.nio.ByteBuffer
org.fressian.impl.BytesOutputStream)
(:gen-class
:implements [clojure.lang.IDeref]
:extends org.fressian.impl.BytesOutputStream))
(defn -deref [^BytesOutputStream this]
(ByteBuffer/wrap (.internalBuffer this) 0 (.length this)))
|
|
e817642eb32d37c40e0a7a1da61106d9be7ebaa3ba2c362de32a29f33ace0e40 | triffon/fp-2022-23 | 03.count-palindromes.rkt | #lang racket
(require rackunit)
(require rackunit/text-ui)
(require "common.03.rkt")
# # # Зад 3
интервала [ a , b ] .
(define (count-palindromes a b)
'тук)
(run-tests
(test-suite "count-palindromes tests"
(check-eq? (count-palindromes 100 200)
10)
(check-eq? (count-palindromes 1 200)
28)
(check-eq? (count-palindromes 1 10000)
198))
'verbose)
| null | https://raw.githubusercontent.com/triffon/fp-2022-23/27f1575529dfd29ac756303e40c95cd3bd9098dc/exercises/cs2/03.scheme.hof-accumulate/03.count-palindromes.rkt | racket | #lang racket
(require rackunit)
(require rackunit/text-ui)
(require "common.03.rkt")
# # # Зад 3
интервала [ a , b ] .
(define (count-palindromes a b)
'тук)
(run-tests
(test-suite "count-palindromes tests"
(check-eq? (count-palindromes 100 200)
10)
(check-eq? (count-palindromes 1 200)
28)
(check-eq? (count-palindromes 1 10000)
198))
'verbose)
|
|
0c9d6d0df800a7b9ad970221c52f47d5c7fad311c25e9774f7c7d9400d03c4e3 | damn/cdq | update_ingame.clj | (ns game.update-ingame
(:require (engine [input :as input]
[statebasedgame :as state]))
(:use [game.settings :only (get-setting debug-mode)]
[game.ingame-gui :only (some-visible-frame? close-all-frames options-hotkey)]
(game.components [core :only (update-removelist player-body)]
[destructible :only (is-dead?)]
[ingame-loop :only (get-ingame-loop-entities)]
update)
(game.maps [data :only (iterating-map-dependent-comps get-current-map-data)]
[mapchange :only (check-change-map)]
[contentfields :only (get-entities-in-active-content-fields)])
(game.item [instance :only (put-item-on-ground)]
[cells :only (is-item-in-hand?)]
[update :only (update-items)])
[game.player.core :only (try-revive-player player-death)]
[game.state.main :only (mainmenu-gamestate)]
game.player.skill.selection-list))
(defn update-game [delta]
(when (input/is-key-pressed? options-hotkey)
(cond
; when game is paused and/or the player is dead, let player be able to drop item-in-hand?
; or drop it automatically when dead?
; need to drop it here else in options menu it is still item-in-hand at cursor!
(is-item-in-hand?) (put-item-on-ground)
revive first before closing GUI ?
(some-skill-selection-list-visible?) (close-skill-selection-lists)
(is-dead? player-body) (when-not (try-revive-player)
(state/enter-state mainmenu-gamestate))
:else (state/enter-state game.state.ids/options)))
(when (input/is-key-pressed? :TAB)
(state/enter-state game.state.ids/minimap))
(when (and (get-setting :debug-mode)
(input/is-key-pressed? :D))
(swap! debug-mode not))
(when (and (get-setting :is-pausable)
(input/is-key-pressed? :P))
(swap! running not))
(when @running
(input/update-mousebutton-state)
(update-removelist)
Drag+Drop updating VOR gui update mehr da gui alles consumet .
(update-items) ; items vor components da items leftm-consumed vlt
; Erst map-independent, da:
- gui i m input consumen bevor player bei player - body @ map - dependent - comps
- map current fields zusammen die bei update map dependent comps genutzt werden .
(try
(update-active-components delta (get-ingame-loop-entities))
(catch Throwable t
(println "Catched throwable: " t)
(reset! running false)))
(reset! iterating-map-dependent-comps true)
(try
(update-active-components delta (get-entities-in-active-content-fields))
(catch Throwable t
(println "Catched throwable: " t)
(reset! running false)))
(reset! iterating-map-dependent-comps false)
(when (is-dead? player-body)
(player-death))
(check-change-map)))
| null | https://raw.githubusercontent.com/damn/cdq/5dbe979da6c198091ad86748f04054ebe1df7981/src/game/update_ingame.clj | clojure | when game is paused and/or the player is dead, let player be able to drop item-in-hand?
or drop it automatically when dead?
need to drop it here else in options menu it is still item-in-hand at cursor!
items vor components da items leftm-consumed vlt
Erst map-independent, da: | (ns game.update-ingame
(:require (engine [input :as input]
[statebasedgame :as state]))
(:use [game.settings :only (get-setting debug-mode)]
[game.ingame-gui :only (some-visible-frame? close-all-frames options-hotkey)]
(game.components [core :only (update-removelist player-body)]
[destructible :only (is-dead?)]
[ingame-loop :only (get-ingame-loop-entities)]
update)
(game.maps [data :only (iterating-map-dependent-comps get-current-map-data)]
[mapchange :only (check-change-map)]
[contentfields :only (get-entities-in-active-content-fields)])
(game.item [instance :only (put-item-on-ground)]
[cells :only (is-item-in-hand?)]
[update :only (update-items)])
[game.player.core :only (try-revive-player player-death)]
[game.state.main :only (mainmenu-gamestate)]
game.player.skill.selection-list))
(defn update-game [delta]
(when (input/is-key-pressed? options-hotkey)
(cond
(is-item-in-hand?) (put-item-on-ground)
revive first before closing GUI ?
(some-skill-selection-list-visible?) (close-skill-selection-lists)
(is-dead? player-body) (when-not (try-revive-player)
(state/enter-state mainmenu-gamestate))
:else (state/enter-state game.state.ids/options)))
(when (input/is-key-pressed? :TAB)
(state/enter-state game.state.ids/minimap))
(when (and (get-setting :debug-mode)
(input/is-key-pressed? :D))
(swap! debug-mode not))
(when (and (get-setting :is-pausable)
(input/is-key-pressed? :P))
(swap! running not))
(when @running
(input/update-mousebutton-state)
(update-removelist)
Drag+Drop updating VOR gui update mehr da gui alles consumet .
- gui i m input consumen bevor player bei player - body @ map - dependent - comps
- map current fields zusammen die bei update map dependent comps genutzt werden .
(try
(update-active-components delta (get-ingame-loop-entities))
(catch Throwable t
(println "Catched throwable: " t)
(reset! running false)))
(reset! iterating-map-dependent-comps true)
(try
(update-active-components delta (get-entities-in-active-content-fields))
(catch Throwable t
(println "Catched throwable: " t)
(reset! running false)))
(reset! iterating-map-dependent-comps false)
(when (is-dead? player-body)
(player-death))
(check-change-map)))
|
5780789e444946473fcaa8d09a79e3b81a835a19e6ed2844ab8b82b1b4d79619 | Stratus3D/programming_erlang_exercises | nano_client.erl | -module(nano_client).
-export([send/3]).
send(Mod, Func, Args) ->
{ok, Socket} = gen_udp:open(0, [binary]),
ok = gen_udp:send(Socket, "localhost", 2345, term_to_binary({Mod, Func, Args})),
Value = receive
{udp, Socket, _, _, Bin} ->
io:format("Client received binary = ~p~n", [Bin]),
Val = binary_to_term(Bin),
io:format("Client result = ~p~n", [Val]),
Val
after 2000 ->
{error, timeout}
end,
gen_udp:close(Socket),
Value.
| null | https://raw.githubusercontent.com/Stratus3D/programming_erlang_exercises/e4fd01024812059d338facc20f551e7dff4dac7e/chapter_17/exercise_3/nano_client.erl | erlang | -module(nano_client).
-export([send/3]).
send(Mod, Func, Args) ->
{ok, Socket} = gen_udp:open(0, [binary]),
ok = gen_udp:send(Socket, "localhost", 2345, term_to_binary({Mod, Func, Args})),
Value = receive
{udp, Socket, _, _, Bin} ->
io:format("Client received binary = ~p~n", [Bin]),
Val = binary_to_term(Bin),
io:format("Client result = ~p~n", [Val]),
Val
after 2000 ->
{error, timeout}
end,
gen_udp:close(Socket),
Value.
|
|
54d6f3309df02d076a45db5885a6463a884a30e3c45aa5042b51ad1ac06883fd | samrocketman/home | noclayto-hue-change-animator.scm | ;
; The GIMP -- an image manipulation program
Copyright ( C ) 1995 and
;
Hue Change Animator script for GIMP 2.4
; Original author: noclayto <www.gimptalk.com>
;
; Tags: animation
;
; Author statement:
;
;; Will cycle through the HUE-MODE by a step.
;; This script is mainly for learning. Use at your own risk.
;
; --------------------------------------------------------------------
Distributed by Gimp FX Foundry project
; --------------------------------------------------------------------
; - Changelog -
;
; --------------------------------------------------------------------
;
; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program. If not, see </>.
;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (script-fu-hue-change-animator img drawable step)
(let* (
(counter step)
(new-layer drawable)
)
(gimp-image-undo-group-start img)
;;(gimp-image-raise-layer-to-top img drawable) ;;error???
(while (< counter 180)
(set! new-layer (car (gimp-layer-copy drawable TRUE)))
(gimp-image-add-layer img new-layer -1)
(gimp-hue-saturation new-layer 0 counter 0 0)
(set! counter (+ counter step))
)
(set! counter (- counter 360))
(while (< counter 0)
(set! new-layer (car (gimp-layer-copy drawable TRUE)))
(gimp-image-add-layer img new-layer -1)
(gimp-hue-saturation new-layer 0 counter 0 0)
(set! counter (+ counter step))
)
(gimp-image-undo-group-end img)
(gimp-displays-flush)))
(script-fu-register "script-fu-hue-change-animator"
_"<Image>/FX-Foundry/Animation/Hue Changer ..."
""
"noclayto"
"noclayto"
"July 2005"
""
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-ADJUSTMENT _"Color Step" '(45 1 360 1 10 0 1)
)
| null | https://raw.githubusercontent.com/samrocketman/home/63a8668a71dc594ea9ed76ec56bf8ca43b2a86ca/dotfiles/.gimp/scripts/noclayto-hue-change-animator.scm | scheme |
The GIMP -- an image manipulation program
Original author: noclayto <www.gimptalk.com>
Tags: animation
Author statement:
Will cycle through the HUE-MODE by a step.
This script is mainly for learning. Use at your own risk.
--------------------------------------------------------------------
--------------------------------------------------------------------
- Changelog -
--------------------------------------------------------------------
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
(gimp-image-raise-layer-to-top img drawable) ;;error??? | Copyright ( C ) 1995 and
Hue Change Animator script for GIMP 2.4
Distributed by Gimp FX Foundry project
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(define (script-fu-hue-change-animator img drawable step)
(let* (
(counter step)
(new-layer drawable)
)
(gimp-image-undo-group-start img)
(while (< counter 180)
(set! new-layer (car (gimp-layer-copy drawable TRUE)))
(gimp-image-add-layer img new-layer -1)
(gimp-hue-saturation new-layer 0 counter 0 0)
(set! counter (+ counter step))
)
(set! counter (- counter 360))
(while (< counter 0)
(set! new-layer (car (gimp-layer-copy drawable TRUE)))
(gimp-image-add-layer img new-layer -1)
(gimp-hue-saturation new-layer 0 counter 0 0)
(set! counter (+ counter step))
)
(gimp-image-undo-group-end img)
(gimp-displays-flush)))
(script-fu-register "script-fu-hue-change-animator"
_"<Image>/FX-Foundry/Animation/Hue Changer ..."
""
"noclayto"
"noclayto"
"July 2005"
""
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-ADJUSTMENT _"Color Step" '(45 1 360 1 10 0 1)
)
|
e4fd0675110e42c1935b7ce96b1c4a9f5b09bd0d370d070200cdffdf56366839 | basho/riak_core | hashtree_tree.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2013 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc This module implements a specialized hash tree that is used
%% primarily by cluster metadata's anti-entropy exchanges and by
%% metadata clients for determining when groups of metadata keys have
%% changed locally. The tree can be used, generally, for determining
%% the differences in groups of keys, or to find missing groups, between
two stores .
%%
%% Each node of the tree is itself a hash tree, specifically a {@link
%% hashtree}. The tree has a fixed height but each node has a
%% variable amount of children. The height of the tree directly
%% corresponds to the number of prefixes supported by the tree. A list
%% of prefixes, or a "prefix list", represent a group of keys. Each
%% unique prefix list is a node in the tree. The leaves store hashes
%% for the individual keys in the segments of the node's {@link
%% hashtree}. The buckets of the leaves' hashtree provide an efficient
way of determining when keys in the segments differ between two
%% trees. The tails of the prefix list are used to roll up groups
%% into parent groups. For example, the prefixes `[a, b]', `[a, c]',
%% `[d, e]' will be rolled up into parent groups `a', containing `c'
%% and `b', and `d', containing only 'e'. The parent group's node has
%% children corresponding to each child group. The top-hashes of the
%% child nodes are stored in the parent nodes' segments. The parent
%% nodes' buckets are used as an efficient method for determining when
child groups differ between two trees . The root node corresponds to
%% the empty list and it acts like any other node, storing hashes for
the first level of child groups . The top hash of the root node is
%% the top hash of the tree.
%%
%% The tree in the example above might store something like:
%%
%% node parent top-hash segments
%% ---------------------------------------------------
root none 1 [ { a , 2 } , { d , 3 } ]
[ a ] root 2 [ { b , 4 } , { c , 5 } ]
[ d ] root 3 [ { e , 6 } ]
[ a , b ] [ a ] 4 [ { k1 , 0 } , { k2 , 6 } , ... ]
[ a , c ] [ a ] 5 [ { k1 , 1 } , { k2 , 4 } , ... ]
[ d , e ] [ d ] 6 [ { k1 , 2 } , { k2 , 3 } , ... ]
%%
%%
%% When a key is inserted into the tree it is inserted into the leaf
%% corresponding to the given prefix list. The leaf and its parents
%% are not updated at this time. Instead the leaf is added to a dirty
%% set. The nodes are later updated in bulk.
%%
Updating the hashtree is a two step process . First , a snapshot of
%% the tree must be obtained. This prevents new writes from affecting
%% the update. Snapshotting the tree will snapshot each dirty
%% leaf. Since writes to nodes other than leaves only occur during
updates no snapshot is taken for them . Second , the tree is updated
using the snapshot . The update is performed by updating the { @link
%% hashtree} nodes at each level starting with the leaves. The top
%% hash of each node in a level is inserted into its parent node after
%% being updated. The list of dirty parents is then updated, moving up
%% the tree. Once the root is reached and has been updated the process
%% is complete. This process is designed to minimize the traversal of
%% the tree and ensure that each node is only updated once.
%%
%% The typical use for updating a tree is to compare it with another
%% recently updated tree. Comparison is done with the ``compare/4''
%% function. Compare provides a sort of fold over the differences of
%% the tree allowing for callers to determine what to do with those
%% differences. In addition, the caller can accumulate a value, such
%% as the difference list or stats about differencces.
%%
%% The tree implemented in this module assumes that it will be managed
%% by a single process and that all calls will be made to it synchronously, with
%% a couple exceptions:
%%
1 . Updating a tree with a snapshot can be done in another process . The snapshot
%% must be taken by the owning process, synchronously.
2 . Comparing two trees may be done by a seperate process . Compares should should use
%% a snapshot and only be performed after an update.
%%
%% The nodes in this tree are backend by LevelDB, however, this is
most likely temporary and Cluster Metadata 's use of the tree is
%% ephemeral. Trees are only meant to live for the lifetime of a
%% running node and are rebuilt on start. To ensure the tree is fresh
%% each time, when nodes are created the backing LevelDB store is
%% opened, closed, and then re-opened to ensure any lingering files
%% are removed. Additionally, the nodes themselves (references to
{ @link hashtree } , are stored in { @link ets } .
-module(hashtree_tree).
-export([new/2,
destroy/1,
insert/4,
insert/5,
update_snapshot/1,
update_perform/1,
local_compare/2,
compare/4,
top_hash/1,
prefix_hash/2,
get_bucket/4,
key_hashes/3]).
-export_type([tree/0, tree_node/0, handler_fun/1, remote_fun/0]).
-type hashtree_gb_set() :: gb_sets:set().
-record(hashtree_tree, {
%% the identifier for this tree. used as part of the ids
%% passed to hashtree.erl and in keys used to store nodes in
%% the tree's ets tables.
id :: term(),
%% directory where nodes are stored on disk
data_root :: file:name_all(),
%% number of levels in the tree excluding leaves (height - 1)
num_levels :: non_neg_integer(),
%% ets table that holds hashtree nodes in the tree
nodes :: ets:tab(),
%% ets table that holds snapshot nodes
snapshot :: ets:tab(),
%% set of dirty leaves
dirty :: hashtree_gb_set()
}).
-define(ROOT, '$ht_root').
-define(NUM_LEVELS, 2).
-opaque tree() :: #hashtree_tree{}.
-type prefix() :: atom() | binary().
-type prefixes() :: [prefix()].
-opaque tree_node() :: prefixes() | ?ROOT.
-type prefix_diff() :: {missing_prefix, local | remote, prefixes()}.
-type key_diffs() :: {key_diffs, prefixes(),[{missing |
remote_missing |
different, binary()}]}.
-type diff() :: prefix_diff() | key_diffs().
-type handler_fun(X) :: fun((diff(), X) -> X).
-type remote_fun() :: fun((prefixes(),
{get_bucket, {integer(), integer()}} |
{key_hashses, integer()}) -> orddict:orddict()).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc Creates a new hashtree.
%%
%% Takes the following options:
%% * num_levels - the height of the tree excluding leaves. corresponds to the
%% length of the prefix list passed to {@link insert/5}.
%% * data_dir - the directory where the LevelDB instances for the nodes will
%% be stored.
-type new_opt_num_levels() :: {num_levels, non_neg_integer()}.
-type new_opt_data_dir() :: {data_dir, file:name_all()}.
-type new_opt() :: new_opt_num_levels() | new_opt_data_dir().
-type new_opts() :: [new_opt()].
-spec new(term(), new_opts()) -> tree().
new(TreeId, Opts) ->
NumLevels = proplists:get_value(num_levels, Opts, ?NUM_LEVELS),
DataRoot = data_root(Opts),
Tree = #hashtree_tree{id = TreeId,
data_root = DataRoot,
num_levels = NumLevels,
%% table needs to be public to allow async update
nodes = ets:new(undefined, [public]),
snapshot = undefined,
dirty = gb_sets:new()},
get_node(?ROOT, Tree),
Tree.
%% @doc Destroys the tree cleaning up any used resources.
%% This deletes the LevelDB files for the nodes.
-spec destroy(tree()) -> ok.
destroy(Tree) ->
ets:foldl(fun({_, Node}, _) ->
Node1 = hashtree:close(Node),
hashtree:destroy(Node1)
end, undefined, Tree#hashtree_tree.nodes),
catch ets:delete(Tree#hashtree_tree.nodes),
ok.
@doc an alias for insert(Prefixes , Key , Hash , [ ] , Tree )
-spec insert(prefixes(), binary(), binary(), tree()) -> tree() | {error, term()}.
insert(Prefixes, Key, Hash, Tree) ->
insert(Prefixes, Key, Hash, [], Tree).
@doc Insert a hash into the tree . The length of ` Prefixes ' must
%% correspond to the height of the tree -- the value used for
%% `num_levels' when creating the tree. The hash is inserted into
%% a leaf of the tree and that leaf is marked as dirty. The tree is not
%% updated at this time. Future operations on the tree should used the
%% tree returend by this fucntion.
%%
%% Insert takes the following options:
%% * if_missing - if `true' then the hash is only inserted into the tree
%% if the key is not already present. This is useful for
%% ensuring writes concurrent with building the tree
%% take precedence over older values. `false' is the default
%% value.
-type insert_opt_if_missing() :: {if_missing, boolean()}.
-type insert_opt() :: insert_opt_if_missing().
-type insert_opts() :: [insert_opt()].
-spec insert(prefixes(), binary(), binary(), insert_opts(), tree()) -> tree() | {error, term()}.
insert(Prefixes, Key, Hash, Opts, Tree) ->
NodeName = prefixes_to_node_name(Prefixes),
case valid_prefixes(NodeName, Tree) of
true ->
insert_hash(Key, Hash, Opts, NodeName, Tree);
false ->
{error, bad_prefixes}
end.
%% @doc Snapshot the tree for updating. The return tree should be
%% updated using {@link update_perform/1} and to perform future operations
%% on the tree
-spec update_snapshot(tree()) -> tree().
update_snapshot(Tree=#hashtree_tree{dirty=Dirty,nodes=Nodes,snapshot=Snapshot0}) ->
catch ets:delete(Snapshot0),
FoldRes = gb_sets:fold(fun(DirtyName, Acc) ->
DirtyKey = node_key(DirtyName, Tree),
Node = lookup_node(DirtyName, Tree),
{DirtyNode, NewNode} = hashtree:update_snapshot(Node),
[{{DirtyKey, DirtyNode}, {DirtyKey, NewNode}} | Acc]
end, [], Dirty),
{Snaps, NewNodes} = lists:unzip(FoldRes),
Snapshot = ets:new(undefined, []),
ets:insert(Snapshot, Snaps),
ets:insert(Nodes, NewNodes),
Tree#hashtree_tree{dirty=gb_sets:new(),snapshot=Snapshot}.
%% @doc Update the tree with a snapshot obtained by {@link
update_snapshot/1 } . This function may be called by a process other
%% than the one managing the tree.
-spec update_perform(tree()) -> ok.
update_perform(Tree=#hashtree_tree{snapshot=Snapshot}) ->
DirtyParents = ets:foldl(fun(DirtyLeaf, DirtyParentsAcc) ->
update_dirty_leaves(DirtyLeaf, DirtyParentsAcc, Tree)
end,
gb_sets:new(), Snapshot),
update_dirty_parents(DirtyParents, Tree),
catch ets:delete(Snapshot),
ok.
@doc Compare two local trees . This function is primarily for
%% local debugging and testing.
-spec local_compare(tree(), tree()) -> [diff()].
local_compare(T1, T2) ->
RemoteFun = fun(Prefixes, {get_bucket, {Level, Bucket}}) ->
hashtree_tree:get_bucket(Prefixes, Level, Bucket, T2);
(Prefixes, {key_hashes, Segment}) ->
[{_, Hashes}] = hashtree_tree:key_hashes(Prefixes, Segment, T2),
Hashes
end,
HandlerFun = fun(Diff, Acc) -> Acc ++ [Diff] end,
compare(T1, RemoteFun, HandlerFun, []).
@doc Compare a local and remote tree . ` RemoteFun ' is used to
%% access the buckets and segments of nodes in the remote
tree . ` HandlerFun ' will be called for each difference found in the
%% tree. A difference is either a missing local or remote prefix, or a
%% list of key differences, which themselves signify different or
missing keys . ` HandlerAcc ' is passed to the first call of
` HandlerFun ' and each subsequent call is passed the value returned
%% by the previous call. The return value of this function is the
return value from the last call to ` HandlerFun ' .
-spec compare(tree(), remote_fun(), handler_fun(X), X) -> X.
compare(LocalTree, RemoteFun, HandlerFun, HandlerAcc) ->
compare(?ROOT, 1, LocalTree, RemoteFun, HandlerFun, HandlerAcc).
%% @doc Returns the top-hash of the tree. This is the top-hash of the
%% root node.
-spec top_hash(tree()) -> undefined | binary().
top_hash(Tree) ->
prefix_hash([], Tree).
%% @doc Returns the top-hash of the node corresponding to the given
%% prefix list. The length of the prefix list can be less than or
%% equal to the height of the tree. If the tree has not been updated
%% or if the prefix list is not found or invalid, then `undefined' is
%% returned. Otherwise the hash value from the most recent update is
%% returned.
-spec prefix_hash(prefixes(), tree()) -> undefined | binary().
prefix_hash(Prefixes, Tree) ->
NodeName = prefixes_to_node_name(Prefixes),
case lookup_node(NodeName, Tree) of
undefined -> undefined;
Node -> extract_top_hash(hashtree:top_hash(Node))
end.
%% @doc Returns the {@link hashtree} buckets for a given node in the
%% tree. This is used primarily for accessing buckets of a remote tree
%% during compare.
-spec get_bucket(tree_node(), integer(), integer(), tree()) -> orddict:orddict().
get_bucket(Prefixes, Level, Bucket, Tree) ->
case lookup_node(prefixes_to_node_name(Prefixes), Tree) of
undefined -> orddict:new();
Node -> hashtree:get_bucket(Level, Bucket, Node)
end.
%% @doc Returns the {@link hashtree} segment hashes for a given node
%% in the tree. This is used primarily for accessing key hashes of a
%% remote tree during compare.
-spec key_hashes(tree_node(), integer(), tree()) -> [{integer(), orddict:orddict()}].
key_hashes(Prefixes, Segment, Tree) ->
case lookup_node(prefixes_to_node_name(Prefixes), Tree) of
undefined -> [{Segment, orddict:new()}];
Node -> hashtree:key_hashes(Node, Segment)
end.
%%%===================================================================
Internal functions
%%%===================================================================
@private
insert_hash(Key, Hash, Opts, NodeName, Tree) ->
Node = get_node(NodeName, Tree),
insert_hash(Key, Hash, Opts, NodeName, Node, Tree).
@private
insert_hash(Key, Hash, Opts, NodeName, Node, Tree=#hashtree_tree{dirty=Dirty}) ->
Node2 = hashtree:insert(Key, Hash, Node, Opts),
Dirty2 = gb_sets:add_element(NodeName, Dirty),
_ = set_node(NodeName, Node2, Tree),
Tree#hashtree_tree{dirty=Dirty2}.
@private
update_dirty_leaves({DirtyKey, DirtyNode}, DirtyParents, Tree) ->
update_dirty(node_key_to_name(DirtyKey), DirtyNode, DirtyParents, Tree).
@private
update_dirty_parents(DirtyParents, Tree) ->
case gb_sets:is_empty(DirtyParents) of
true -> ok;
false ->
NextDirty = gb_sets:fold(
fun(DirtyParent, DirtyAcc) ->
DirtyNode = lookup_node(DirtyParent, Tree),
{DirtySnap, DirtyNode2} = hashtree:update_snapshot(DirtyNode),
NextDirty = update_dirty(DirtyParent, DirtySnap, DirtyAcc, Tree),
_ = set_node(DirtyParent, DirtyNode2, Tree),
NextDirty
end, gb_sets:new(), DirtyParents),
update_dirty_parents(NextDirty, Tree)
end.
@private
update_dirty(DirtyName, DirtyNode, NextDirty, Tree) ->
%% ignore returned tree b/c we are tracking dirty nodes in this fold seperately
_ = hashtree:update_perform(DirtyNode),
case parent_node(DirtyName, Tree) of
undefined ->
NextDirty;
{ParentName, ParentNode} ->
TopHash = extract_top_hash(hashtree:top_hash(DirtyNode)),
ParentKey = to_parent_key(DirtyName),
%% ignore returned tree b/c we are tracking dirty nodes in this fold seperately
_ = insert_hash(ParentKey, TopHash, [], ParentName, ParentNode, Tree),
gb_sets:add_element(ParentName, NextDirty)
end.
@private
compare(NodeName, Level, LocalTree, RemoteFun, HandlerFun, HandlerAcc)
when Level =:= LocalTree#hashtree_tree.num_levels + 1 ->
Prefixes = node_name_to_prefixes(NodeName),
LocalNode = lookup_node(NodeName, LocalTree),
RemoteNode = fun(Action, Info) ->
RemoteFun(Prefixes, {Action, Info})
end,
AccFun = fun(Diffs, CompareAcc) ->
Res = HandlerFun({key_diffs, Prefixes, Diffs},
extract_compare_acc(CompareAcc, HandlerAcc)),
[{acc, Res}]
end,
CompareRes = hashtree:compare(LocalNode, RemoteNode, AccFun, []),
extract_compare_acc(CompareRes, HandlerAcc);
compare(NodeName, Level, LocalTree, RemoteFun, HandlerFun, HandlerAcc) ->
Prefixes = node_name_to_prefixes(NodeName),
LocalNode = lookup_node(NodeName, LocalTree),
RemoteNode = fun(Action, Info) ->
RemoteFun(Prefixes, {Action, Info})
end,
AccFoldFun = fun({missing, NodeKey}, HandlerAcc2) ->
missing_prefix(NodeKey, local, HandlerFun, HandlerAcc2);
({remote_missing, NodeKey}, HandlerAcc2) ->
missing_prefix(NodeKey, remote, HandlerFun, HandlerAcc2);
({different, NodeKey}, HandlerAcc2) ->
compare(from_parent_key(NodeKey), Level+1, LocalTree,
RemoteFun, HandlerFun, HandlerAcc2)
end,
AccFun = fun(Diffs, CompareAcc) ->
Res = lists:foldl(AccFoldFun,
extract_compare_acc(CompareAcc, HandlerAcc), Diffs),
[{acc, Res}]
end,
CompareRes = hashtree:compare(LocalNode, RemoteNode, AccFun, []),
extract_compare_acc(CompareRes, HandlerAcc).
@private
missing_prefix(NodeKey, Type, HandlerFun, HandlerAcc) ->
HandlerFun({missing_prefix, Type, node_name_to_prefixes(from_parent_key(NodeKey))},
HandlerAcc).
@private
extract_compare_acc([], HandlerAcc) ->
HandlerAcc;
extract_compare_acc([{acc, Acc}], _HandlerAcc) ->
Acc.
@private
get_node(NodeName, Tree) ->
Node = lookup_node(NodeName, Tree),
get_node(NodeName, Node, Tree).
@private
get_node(NodeName, undefined, Tree) ->
create_node(NodeName, Tree);
get_node(_NodeName, Node, _Tree) ->
Node.
@private
lookup_node(NodeName, Tree=#hashtree_tree{nodes=Nodes}) ->
NodeKey = node_key(NodeName, Tree),
case ets:lookup(Nodes, NodeKey) of
[] -> undefined;
[{NodeKey, Node}] -> Node
end.
@private
create_node(?ROOT, Tree) ->
NodeId = node_id(?ROOT, Tree),
NodePath = node_path(Tree),
NumSegs = node_num_segs(?ROOT),
Width = node_width(?ROOT),
Opts = [{segment_path, NodePath}, {segments, NumSegs}, {width, Width}],
%% destroy any data that previously existed because its lingering from
%% a tree that was not properly destroyed
ok = hashtree:destroy(NodePath),
Node = hashtree:new(NodeId, Opts),
set_node(?ROOT, Node, Tree);
create_node([], Tree) ->
create_node(?ROOT, Tree);
create_node(NodeName, Tree) ->
NodeId = node_id(NodeName, Tree),
RootNode = get_node(?ROOT, Tree),
NumSegs = node_num_segs(NodeName),
Width = node_width(NodeName),
Opts = [{segments, NumSegs}, {width, Width}],
%% share segment store accross all nodes
Node = hashtree:new(NodeId, RootNode, Opts),
set_node(NodeName, Node, Tree).
@private
set_node(NodeName, Node, Tree) when is_list(NodeName) orelse NodeName =:= ?ROOT ->
set_node(node_key(NodeName, Tree), Node, Tree);
set_node(NodeKey, Node, #hashtree_tree{nodes=Nodes}) when is_tuple(NodeKey) ->
ets:insert(Nodes, [{NodeKey, Node}]),
Node.
@private
parent_node(?ROOT, _Tree) ->
%% root has no parent
undefined;
parent_node([_Single], Tree) ->
parent of first level is the root
{?ROOT, get_node(?ROOT, Tree)};
parent_node([_Prefix | Parent], Tree) ->
%% parent of subsequent level is tail of node name
{Parent, get_node(Parent, Tree)}.
@private
node_width(?ROOT) ->
256;
node_width(NodeName) ->
case length(NodeName) < 2 of
true -> 512;
false -> 1024
end.
@private
node_num_segs(?ROOT) ->
256 * 256;
node_num_segs(NodeName) ->
case length(NodeName) < 2 of
true -> 512 * 512;
false -> 1024 * 1024
end.
@private
node_path(#hashtree_tree{data_root=DataRoot}) ->
DataRoot.
@private
node_key(NodeName, #hashtree_tree{id=TreeId}) ->
{TreeId, NodeName}.
@private
node_key_to_name({_TreeId, NodeName}) ->
NodeName.
@private
node_id(?ROOT, #hashtree_tree{id=TreeId}) ->
{TreeId, <<0:176/integer>>};
node_id(NodeName, #hashtree_tree{id=TreeId}) ->
<<NodeMD5:128/integer>> = riak_core_util:md5(term_to_binary(NodeName)),
{TreeId, <<NodeMD5:176/integer>>}.
@private
to_parent_key(NodeName) ->
term_to_binary(NodeName).
@private
from_parent_key(NodeKey) ->
binary_to_term(NodeKey).
@private
valid_prefixes(NodeName, #hashtree_tree{num_levels=NumLevels}) ->
length(NodeName) =:= NumLevels.
@private
prefixes_to_node_name([]) ->
?ROOT;
prefixes_to_node_name(Prefixes) ->
lists:reverse(Prefixes).
@private
node_name_to_prefixes(?ROOT) ->
[];
node_name_to_prefixes(NodeName) ->
lists:reverse(NodeName).
@private
extract_top_hash([]) ->
undefined;
extract_top_hash([{0, Hash}]) ->
Hash.
@private
data_root(Opts) ->
case proplists:get_value(data_dir, Opts) of
undefined ->
Base = "/tmp/hashtree_tree",
<<P:128/integer>> = riak_core_util:md5(term_to_binary(os:timestamp())),
filename:join(Base, riak_core_util:integer_to_list(P, 16));
Root -> Root
end.
| null | https://raw.githubusercontent.com/basho/riak_core/abbcca3cfb7da10798d8fc169043955638d4d9db/src/hashtree_tree.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc This module implements a specialized hash tree that is used
primarily by cluster metadata's anti-entropy exchanges and by
metadata clients for determining when groups of metadata keys have
changed locally. The tree can be used, generally, for determining
the differences in groups of keys, or to find missing groups, between
Each node of the tree is itself a hash tree, specifically a {@link
hashtree}. The tree has a fixed height but each node has a
variable amount of children. The height of the tree directly
corresponds to the number of prefixes supported by the tree. A list
of prefixes, or a "prefix list", represent a group of keys. Each
unique prefix list is a node in the tree. The leaves store hashes
for the individual keys in the segments of the node's {@link
hashtree}. The buckets of the leaves' hashtree provide an efficient
trees. The tails of the prefix list are used to roll up groups
into parent groups. For example, the prefixes `[a, b]', `[a, c]',
`[d, e]' will be rolled up into parent groups `a', containing `c'
and `b', and `d', containing only 'e'. The parent group's node has
children corresponding to each child group. The top-hashes of the
child nodes are stored in the parent nodes' segments. The parent
nodes' buckets are used as an efficient method for determining when
the empty list and it acts like any other node, storing hashes for
the top hash of the tree.
The tree in the example above might store something like:
node parent top-hash segments
---------------------------------------------------
When a key is inserted into the tree it is inserted into the leaf
corresponding to the given prefix list. The leaf and its parents
are not updated at this time. Instead the leaf is added to a dirty
set. The nodes are later updated in bulk.
the tree must be obtained. This prevents new writes from affecting
the update. Snapshotting the tree will snapshot each dirty
leaf. Since writes to nodes other than leaves only occur during
hashtree} nodes at each level starting with the leaves. The top
hash of each node in a level is inserted into its parent node after
being updated. The list of dirty parents is then updated, moving up
the tree. Once the root is reached and has been updated the process
is complete. This process is designed to minimize the traversal of
the tree and ensure that each node is only updated once.
The typical use for updating a tree is to compare it with another
recently updated tree. Comparison is done with the ``compare/4''
function. Compare provides a sort of fold over the differences of
the tree allowing for callers to determine what to do with those
differences. In addition, the caller can accumulate a value, such
as the difference list or stats about differencces.
The tree implemented in this module assumes that it will be managed
by a single process and that all calls will be made to it synchronously, with
a couple exceptions:
must be taken by the owning process, synchronously.
a snapshot and only be performed after an update.
The nodes in this tree are backend by LevelDB, however, this is
ephemeral. Trees are only meant to live for the lifetime of a
running node and are rebuilt on start. To ensure the tree is fresh
each time, when nodes are created the backing LevelDB store is
opened, closed, and then re-opened to ensure any lingering files
are removed. Additionally, the nodes themselves (references to
the identifier for this tree. used as part of the ids
passed to hashtree.erl and in keys used to store nodes in
the tree's ets tables.
directory where nodes are stored on disk
number of levels in the tree excluding leaves (height - 1)
ets table that holds hashtree nodes in the tree
ets table that holds snapshot nodes
set of dirty leaves
===================================================================
API
===================================================================
@doc Creates a new hashtree.
Takes the following options:
* num_levels - the height of the tree excluding leaves. corresponds to the
length of the prefix list passed to {@link insert/5}.
* data_dir - the directory where the LevelDB instances for the nodes will
be stored.
table needs to be public to allow async update
@doc Destroys the tree cleaning up any used resources.
This deletes the LevelDB files for the nodes.
correspond to the height of the tree -- the value used for
`num_levels' when creating the tree. The hash is inserted into
a leaf of the tree and that leaf is marked as dirty. The tree is not
updated at this time. Future operations on the tree should used the
tree returend by this fucntion.
Insert takes the following options:
* if_missing - if `true' then the hash is only inserted into the tree
if the key is not already present. This is useful for
ensuring writes concurrent with building the tree
take precedence over older values. `false' is the default
value.
@doc Snapshot the tree for updating. The return tree should be
updated using {@link update_perform/1} and to perform future operations
on the tree
@doc Update the tree with a snapshot obtained by {@link
than the one managing the tree.
local debugging and testing.
access the buckets and segments of nodes in the remote
tree. A difference is either a missing local or remote prefix, or a
list of key differences, which themselves signify different or
by the previous call. The return value of this function is the
@doc Returns the top-hash of the tree. This is the top-hash of the
root node.
@doc Returns the top-hash of the node corresponding to the given
prefix list. The length of the prefix list can be less than or
equal to the height of the tree. If the tree has not been updated
or if the prefix list is not found or invalid, then `undefined' is
returned. Otherwise the hash value from the most recent update is
returned.
@doc Returns the {@link hashtree} buckets for a given node in the
tree. This is used primarily for accessing buckets of a remote tree
during compare.
@doc Returns the {@link hashtree} segment hashes for a given node
in the tree. This is used primarily for accessing key hashes of a
remote tree during compare.
===================================================================
===================================================================
ignore returned tree b/c we are tracking dirty nodes in this fold seperately
ignore returned tree b/c we are tracking dirty nodes in this fold seperately
destroy any data that previously existed because its lingering from
a tree that was not properly destroyed
share segment store accross all nodes
root has no parent
parent of subsequent level is tail of node name | Copyright ( c ) 2013 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
two stores .
way of determining when keys in the segments differ between two
child groups differ between two trees . The root node corresponds to
the first level of child groups . The top hash of the root node is
root none 1 [ { a , 2 } , { d , 3 } ]
[ a ] root 2 [ { b , 4 } , { c , 5 } ]
[ d ] root 3 [ { e , 6 } ]
[ a , b ] [ a ] 4 [ { k1 , 0 } , { k2 , 6 } , ... ]
[ a , c ] [ a ] 5 [ { k1 , 1 } , { k2 , 4 } , ... ]
[ d , e ] [ d ] 6 [ { k1 , 2 } , { k2 , 3 } , ... ]
Updating the hashtree is a two step process . First , a snapshot of
updates no snapshot is taken for them . Second , the tree is updated
using the snapshot . The update is performed by updating the { @link
1 . Updating a tree with a snapshot can be done in another process . The snapshot
2 . Comparing two trees may be done by a seperate process . Compares should should use
most likely temporary and Cluster Metadata 's use of the tree is
{ @link hashtree } , are stored in { @link ets } .
-module(hashtree_tree).
-export([new/2,
destroy/1,
insert/4,
insert/5,
update_snapshot/1,
update_perform/1,
local_compare/2,
compare/4,
top_hash/1,
prefix_hash/2,
get_bucket/4,
key_hashes/3]).
-export_type([tree/0, tree_node/0, handler_fun/1, remote_fun/0]).
-type hashtree_gb_set() :: gb_sets:set().
-record(hashtree_tree, {
id :: term(),
data_root :: file:name_all(),
num_levels :: non_neg_integer(),
nodes :: ets:tab(),
snapshot :: ets:tab(),
dirty :: hashtree_gb_set()
}).
-define(ROOT, '$ht_root').
-define(NUM_LEVELS, 2).
-opaque tree() :: #hashtree_tree{}.
-type prefix() :: atom() | binary().
-type prefixes() :: [prefix()].
-opaque tree_node() :: prefixes() | ?ROOT.
-type prefix_diff() :: {missing_prefix, local | remote, prefixes()}.
-type key_diffs() :: {key_diffs, prefixes(),[{missing |
remote_missing |
different, binary()}]}.
-type diff() :: prefix_diff() | key_diffs().
-type handler_fun(X) :: fun((diff(), X) -> X).
-type remote_fun() :: fun((prefixes(),
{get_bucket, {integer(), integer()}} |
{key_hashses, integer()}) -> orddict:orddict()).
-type new_opt_num_levels() :: {num_levels, non_neg_integer()}.
-type new_opt_data_dir() :: {data_dir, file:name_all()}.
-type new_opt() :: new_opt_num_levels() | new_opt_data_dir().
-type new_opts() :: [new_opt()].
-spec new(term(), new_opts()) -> tree().
new(TreeId, Opts) ->
NumLevels = proplists:get_value(num_levels, Opts, ?NUM_LEVELS),
DataRoot = data_root(Opts),
Tree = #hashtree_tree{id = TreeId,
data_root = DataRoot,
num_levels = NumLevels,
nodes = ets:new(undefined, [public]),
snapshot = undefined,
dirty = gb_sets:new()},
get_node(?ROOT, Tree),
Tree.
-spec destroy(tree()) -> ok.
destroy(Tree) ->
ets:foldl(fun({_, Node}, _) ->
Node1 = hashtree:close(Node),
hashtree:destroy(Node1)
end, undefined, Tree#hashtree_tree.nodes),
catch ets:delete(Tree#hashtree_tree.nodes),
ok.
@doc an alias for insert(Prefixes , Key , Hash , [ ] , Tree )
-spec insert(prefixes(), binary(), binary(), tree()) -> tree() | {error, term()}.
insert(Prefixes, Key, Hash, Tree) ->
insert(Prefixes, Key, Hash, [], Tree).
@doc Insert a hash into the tree . The length of ` Prefixes ' must
-type insert_opt_if_missing() :: {if_missing, boolean()}.
-type insert_opt() :: insert_opt_if_missing().
-type insert_opts() :: [insert_opt()].
-spec insert(prefixes(), binary(), binary(), insert_opts(), tree()) -> tree() | {error, term()}.
insert(Prefixes, Key, Hash, Opts, Tree) ->
NodeName = prefixes_to_node_name(Prefixes),
case valid_prefixes(NodeName, Tree) of
true ->
insert_hash(Key, Hash, Opts, NodeName, Tree);
false ->
{error, bad_prefixes}
end.
-spec update_snapshot(tree()) -> tree().
update_snapshot(Tree=#hashtree_tree{dirty=Dirty,nodes=Nodes,snapshot=Snapshot0}) ->
catch ets:delete(Snapshot0),
FoldRes = gb_sets:fold(fun(DirtyName, Acc) ->
DirtyKey = node_key(DirtyName, Tree),
Node = lookup_node(DirtyName, Tree),
{DirtyNode, NewNode} = hashtree:update_snapshot(Node),
[{{DirtyKey, DirtyNode}, {DirtyKey, NewNode}} | Acc]
end, [], Dirty),
{Snaps, NewNodes} = lists:unzip(FoldRes),
Snapshot = ets:new(undefined, []),
ets:insert(Snapshot, Snaps),
ets:insert(Nodes, NewNodes),
Tree#hashtree_tree{dirty=gb_sets:new(),snapshot=Snapshot}.
update_snapshot/1 } . This function may be called by a process other
-spec update_perform(tree()) -> ok.
update_perform(Tree=#hashtree_tree{snapshot=Snapshot}) ->
DirtyParents = ets:foldl(fun(DirtyLeaf, DirtyParentsAcc) ->
update_dirty_leaves(DirtyLeaf, DirtyParentsAcc, Tree)
end,
gb_sets:new(), Snapshot),
update_dirty_parents(DirtyParents, Tree),
catch ets:delete(Snapshot),
ok.
@doc Compare two local trees . This function is primarily for
-spec local_compare(tree(), tree()) -> [diff()].
local_compare(T1, T2) ->
RemoteFun = fun(Prefixes, {get_bucket, {Level, Bucket}}) ->
hashtree_tree:get_bucket(Prefixes, Level, Bucket, T2);
(Prefixes, {key_hashes, Segment}) ->
[{_, Hashes}] = hashtree_tree:key_hashes(Prefixes, Segment, T2),
Hashes
end,
HandlerFun = fun(Diff, Acc) -> Acc ++ [Diff] end,
compare(T1, RemoteFun, HandlerFun, []).
@doc Compare a local and remote tree . ` RemoteFun ' is used to
tree . ` HandlerFun ' will be called for each difference found in the
missing keys . ` HandlerAcc ' is passed to the first call of
` HandlerFun ' and each subsequent call is passed the value returned
return value from the last call to ` HandlerFun ' .
-spec compare(tree(), remote_fun(), handler_fun(X), X) -> X.
compare(LocalTree, RemoteFun, HandlerFun, HandlerAcc) ->
compare(?ROOT, 1, LocalTree, RemoteFun, HandlerFun, HandlerAcc).
-spec top_hash(tree()) -> undefined | binary().
top_hash(Tree) ->
prefix_hash([], Tree).
-spec prefix_hash(prefixes(), tree()) -> undefined | binary().
prefix_hash(Prefixes, Tree) ->
NodeName = prefixes_to_node_name(Prefixes),
case lookup_node(NodeName, Tree) of
undefined -> undefined;
Node -> extract_top_hash(hashtree:top_hash(Node))
end.
-spec get_bucket(tree_node(), integer(), integer(), tree()) -> orddict:orddict().
get_bucket(Prefixes, Level, Bucket, Tree) ->
case lookup_node(prefixes_to_node_name(Prefixes), Tree) of
undefined -> orddict:new();
Node -> hashtree:get_bucket(Level, Bucket, Node)
end.
-spec key_hashes(tree_node(), integer(), tree()) -> [{integer(), orddict:orddict()}].
key_hashes(Prefixes, Segment, Tree) ->
case lookup_node(prefixes_to_node_name(Prefixes), Tree) of
undefined -> [{Segment, orddict:new()}];
Node -> hashtree:key_hashes(Node, Segment)
end.
Internal functions
@private
insert_hash(Key, Hash, Opts, NodeName, Tree) ->
Node = get_node(NodeName, Tree),
insert_hash(Key, Hash, Opts, NodeName, Node, Tree).
@private
insert_hash(Key, Hash, Opts, NodeName, Node, Tree=#hashtree_tree{dirty=Dirty}) ->
Node2 = hashtree:insert(Key, Hash, Node, Opts),
Dirty2 = gb_sets:add_element(NodeName, Dirty),
_ = set_node(NodeName, Node2, Tree),
Tree#hashtree_tree{dirty=Dirty2}.
@private
update_dirty_leaves({DirtyKey, DirtyNode}, DirtyParents, Tree) ->
update_dirty(node_key_to_name(DirtyKey), DirtyNode, DirtyParents, Tree).
@private
update_dirty_parents(DirtyParents, Tree) ->
case gb_sets:is_empty(DirtyParents) of
true -> ok;
false ->
NextDirty = gb_sets:fold(
fun(DirtyParent, DirtyAcc) ->
DirtyNode = lookup_node(DirtyParent, Tree),
{DirtySnap, DirtyNode2} = hashtree:update_snapshot(DirtyNode),
NextDirty = update_dirty(DirtyParent, DirtySnap, DirtyAcc, Tree),
_ = set_node(DirtyParent, DirtyNode2, Tree),
NextDirty
end, gb_sets:new(), DirtyParents),
update_dirty_parents(NextDirty, Tree)
end.
@private
update_dirty(DirtyName, DirtyNode, NextDirty, Tree) ->
_ = hashtree:update_perform(DirtyNode),
case parent_node(DirtyName, Tree) of
undefined ->
NextDirty;
{ParentName, ParentNode} ->
TopHash = extract_top_hash(hashtree:top_hash(DirtyNode)),
ParentKey = to_parent_key(DirtyName),
_ = insert_hash(ParentKey, TopHash, [], ParentName, ParentNode, Tree),
gb_sets:add_element(ParentName, NextDirty)
end.
@private
compare(NodeName, Level, LocalTree, RemoteFun, HandlerFun, HandlerAcc)
when Level =:= LocalTree#hashtree_tree.num_levels + 1 ->
Prefixes = node_name_to_prefixes(NodeName),
LocalNode = lookup_node(NodeName, LocalTree),
RemoteNode = fun(Action, Info) ->
RemoteFun(Prefixes, {Action, Info})
end,
AccFun = fun(Diffs, CompareAcc) ->
Res = HandlerFun({key_diffs, Prefixes, Diffs},
extract_compare_acc(CompareAcc, HandlerAcc)),
[{acc, Res}]
end,
CompareRes = hashtree:compare(LocalNode, RemoteNode, AccFun, []),
extract_compare_acc(CompareRes, HandlerAcc);
compare(NodeName, Level, LocalTree, RemoteFun, HandlerFun, HandlerAcc) ->
Prefixes = node_name_to_prefixes(NodeName),
LocalNode = lookup_node(NodeName, LocalTree),
RemoteNode = fun(Action, Info) ->
RemoteFun(Prefixes, {Action, Info})
end,
AccFoldFun = fun({missing, NodeKey}, HandlerAcc2) ->
missing_prefix(NodeKey, local, HandlerFun, HandlerAcc2);
({remote_missing, NodeKey}, HandlerAcc2) ->
missing_prefix(NodeKey, remote, HandlerFun, HandlerAcc2);
({different, NodeKey}, HandlerAcc2) ->
compare(from_parent_key(NodeKey), Level+1, LocalTree,
RemoteFun, HandlerFun, HandlerAcc2)
end,
AccFun = fun(Diffs, CompareAcc) ->
Res = lists:foldl(AccFoldFun,
extract_compare_acc(CompareAcc, HandlerAcc), Diffs),
[{acc, Res}]
end,
CompareRes = hashtree:compare(LocalNode, RemoteNode, AccFun, []),
extract_compare_acc(CompareRes, HandlerAcc).
@private
missing_prefix(NodeKey, Type, HandlerFun, HandlerAcc) ->
HandlerFun({missing_prefix, Type, node_name_to_prefixes(from_parent_key(NodeKey))},
HandlerAcc).
@private
extract_compare_acc([], HandlerAcc) ->
HandlerAcc;
extract_compare_acc([{acc, Acc}], _HandlerAcc) ->
Acc.
@private
get_node(NodeName, Tree) ->
Node = lookup_node(NodeName, Tree),
get_node(NodeName, Node, Tree).
@private
get_node(NodeName, undefined, Tree) ->
create_node(NodeName, Tree);
get_node(_NodeName, Node, _Tree) ->
Node.
@private
lookup_node(NodeName, Tree=#hashtree_tree{nodes=Nodes}) ->
NodeKey = node_key(NodeName, Tree),
case ets:lookup(Nodes, NodeKey) of
[] -> undefined;
[{NodeKey, Node}] -> Node
end.
@private
create_node(?ROOT, Tree) ->
NodeId = node_id(?ROOT, Tree),
NodePath = node_path(Tree),
NumSegs = node_num_segs(?ROOT),
Width = node_width(?ROOT),
Opts = [{segment_path, NodePath}, {segments, NumSegs}, {width, Width}],
ok = hashtree:destroy(NodePath),
Node = hashtree:new(NodeId, Opts),
set_node(?ROOT, Node, Tree);
create_node([], Tree) ->
create_node(?ROOT, Tree);
create_node(NodeName, Tree) ->
NodeId = node_id(NodeName, Tree),
RootNode = get_node(?ROOT, Tree),
NumSegs = node_num_segs(NodeName),
Width = node_width(NodeName),
Opts = [{segments, NumSegs}, {width, Width}],
Node = hashtree:new(NodeId, RootNode, Opts),
set_node(NodeName, Node, Tree).
@private
set_node(NodeName, Node, Tree) when is_list(NodeName) orelse NodeName =:= ?ROOT ->
set_node(node_key(NodeName, Tree), Node, Tree);
set_node(NodeKey, Node, #hashtree_tree{nodes=Nodes}) when is_tuple(NodeKey) ->
ets:insert(Nodes, [{NodeKey, Node}]),
Node.
@private
parent_node(?ROOT, _Tree) ->
undefined;
parent_node([_Single], Tree) ->
parent of first level is the root
{?ROOT, get_node(?ROOT, Tree)};
parent_node([_Prefix | Parent], Tree) ->
{Parent, get_node(Parent, Tree)}.
@private
node_width(?ROOT) ->
256;
node_width(NodeName) ->
case length(NodeName) < 2 of
true -> 512;
false -> 1024
end.
@private
node_num_segs(?ROOT) ->
256 * 256;
node_num_segs(NodeName) ->
case length(NodeName) < 2 of
true -> 512 * 512;
false -> 1024 * 1024
end.
@private
node_path(#hashtree_tree{data_root=DataRoot}) ->
DataRoot.
@private
node_key(NodeName, #hashtree_tree{id=TreeId}) ->
{TreeId, NodeName}.
@private
node_key_to_name({_TreeId, NodeName}) ->
NodeName.
@private
node_id(?ROOT, #hashtree_tree{id=TreeId}) ->
{TreeId, <<0:176/integer>>};
node_id(NodeName, #hashtree_tree{id=TreeId}) ->
<<NodeMD5:128/integer>> = riak_core_util:md5(term_to_binary(NodeName)),
{TreeId, <<NodeMD5:176/integer>>}.
@private
to_parent_key(NodeName) ->
term_to_binary(NodeName).
@private
from_parent_key(NodeKey) ->
binary_to_term(NodeKey).
@private
valid_prefixes(NodeName, #hashtree_tree{num_levels=NumLevels}) ->
length(NodeName) =:= NumLevels.
@private
prefixes_to_node_name([]) ->
?ROOT;
prefixes_to_node_name(Prefixes) ->
lists:reverse(Prefixes).
@private
node_name_to_prefixes(?ROOT) ->
[];
node_name_to_prefixes(NodeName) ->
lists:reverse(NodeName).
@private
extract_top_hash([]) ->
undefined;
extract_top_hash([{0, Hash}]) ->
Hash.
@private
data_root(Opts) ->
case proplists:get_value(data_dir, Opts) of
undefined ->
Base = "/tmp/hashtree_tree",
<<P:128/integer>> = riak_core_util:md5(term_to_binary(os:timestamp())),
filename:join(Base, riak_core_util:integer_to_list(P, 16));
Root -> Root
end.
|
56d36c18ff3aed70f0f7e4445f1ca8ad3faf7f6e97c12ef095d67f5ea0363cd6 | 2600hz-archive/whistle | rebar_asn1_compiler.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
%% ex: ts=4 sw=4 et
%% -------------------------------------------------------------------
%%
rebar : Erlang Build Tools
%%
Copyright ( c ) 2009 , 2010 ( )
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% -------------------------------------------------------------------
-module(rebar_asn1_compiler).
-author('').
-export([compile/2,
clean/2]).
-include("rebar.hrl").
%% ===================================================================
%% Public API
%% ===================================================================
-spec compile(Config::rebar_config:config(), AppFile::file:filename()) -> 'ok'.
compile(Config, _AppFile) ->
rebar_base_compiler:run(Config, filelib:wildcard("asn1/*.asn1"),
"asn1", ".asn1", "src", ".erl",
fun compile_asn1/3).
-spec clean(Config::rebar_config:config(), AppFile::file:filename()) -> 'ok'.
clean(_Config, _AppFile) ->
GeneratedFiles = asn_generated_files("asn1", "src", "include"),
ok = rebar_file_utils:delete_each(GeneratedFiles),
ok.
-spec compile_asn1(file:filename(), file:filename(),
rebar_config:config()) -> ok.
compile_asn1(Source, Target, Config) ->
ok = filelib:ensure_dir(Target),
ok = filelib:ensure_dir(filename:join("include", "dummy.hrl")),
Opts = [{outdir, "src"}, noobj] ++ rebar_config:get(Config, asn1_opts, []),
case asn1ct:compile(Source, Opts) of
ok ->
Asn1 = filename:basename(Source, ".asn1"),
HrlFile = filename:join("src", Asn1 ++ ".hrl"),
ok = rebar_file_utils:mv(HrlFile, "include"),
ok;
{error, _Reason} ->
?FAIL
end.
asn_generated_files(AsnDir, SrcDir, IncDir) ->
lists:foldl(
fun(AsnFile, Acc) ->
Base = filename:rootname(filename:basename(AsnFile)),
[filename:join([IncDir, Base ++ ".hrl"])|
filelib:wildcard(filename:join([SrcDir, Base ++ ".*"]))] ++ Acc
end,
[],
filelib:wildcard(filename:join([AsnDir, "*.asn1"]))
).
| null | https://raw.githubusercontent.com/2600hz-archive/whistle/1a256604f0d037fac409ad5a55b6b17e545dcbf9/utils/rebar/src/rebar_asn1_compiler.erl | erlang | ex: ts=4 sw=4 et
-------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-------------------------------------------------------------------
===================================================================
Public API
=================================================================== | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
rebar : Erlang Build Tools
Copyright ( c ) 2009 , 2010 ( )
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-module(rebar_asn1_compiler).
-author('').
-export([compile/2,
clean/2]).
-include("rebar.hrl").
-spec compile(Config::rebar_config:config(), AppFile::file:filename()) -> 'ok'.
compile(Config, _AppFile) ->
rebar_base_compiler:run(Config, filelib:wildcard("asn1/*.asn1"),
"asn1", ".asn1", "src", ".erl",
fun compile_asn1/3).
-spec clean(Config::rebar_config:config(), AppFile::file:filename()) -> 'ok'.
clean(_Config, _AppFile) ->
GeneratedFiles = asn_generated_files("asn1", "src", "include"),
ok = rebar_file_utils:delete_each(GeneratedFiles),
ok.
-spec compile_asn1(file:filename(), file:filename(),
rebar_config:config()) -> ok.
compile_asn1(Source, Target, Config) ->
ok = filelib:ensure_dir(Target),
ok = filelib:ensure_dir(filename:join("include", "dummy.hrl")),
Opts = [{outdir, "src"}, noobj] ++ rebar_config:get(Config, asn1_opts, []),
case asn1ct:compile(Source, Opts) of
ok ->
Asn1 = filename:basename(Source, ".asn1"),
HrlFile = filename:join("src", Asn1 ++ ".hrl"),
ok = rebar_file_utils:mv(HrlFile, "include"),
ok;
{error, _Reason} ->
?FAIL
end.
asn_generated_files(AsnDir, SrcDir, IncDir) ->
lists:foldl(
fun(AsnFile, Acc) ->
Base = filename:rootname(filename:basename(AsnFile)),
[filename:join([IncDir, Base ++ ".hrl"])|
filelib:wildcard(filename:join([SrcDir, Base ++ ".*"]))] ++ Acc
end,
[],
filelib:wildcard(filename:join([AsnDir, "*.asn1"]))
).
|
b9714c0025a38566d21d317d9f368c684cb919c964c8327f2b503af3e12a66e9 | pdarragh/parsing-with-zippers-paper-artifact | pwz_abstract_types.ml | type pos = int ref (* Using ref makes it easy to create values that are not pointer equal *)
let p_bottom = ref (-1)
type sym = string
let s_bottom = "<s_bottom>"
type tok = string
let t_eof = "<t_eof>"
| null | https://raw.githubusercontent.com/pdarragh/parsing-with-zippers-paper-artifact/c5c08306cfe4eec588237c7fa45b794649ccb68a/appendix/pwz_abstract_types.ml | ocaml | Using ref makes it easy to create values that are not pointer equal | let p_bottom = ref (-1)
type sym = string
let s_bottom = "<s_bottom>"
type tok = string
let t_eof = "<t_eof>"
|
83972ac76c4b900fbb003cfe9f0c3e272fc8c6d3778131b004cdf9ec31988919 | tidalcycles/tidal-midi | Stream.hs | |
Entry functions for interacting with MIDI devices through Tidal .
Entry functions for interacting with MIDI devices through Tidal.
-}
module Sound.Tidal.MIDI.Stream (midiStream, midiBackend, midiState, midiSetters, midiDevices, displayOutputDevices) where
-- generics
import Control.Concurrent
import Control.Concurrent.MVar ()
import qualified Data.Map as Map
-- Tidal specific
import Sound.Tidal.Stream as S
import Sound.Tidal.Time
import Sound.Tidal.Transition (transition)
-- MIDI specific
import Sound.Tidal.MIDI.Control
import Sound.Tidal.MIDI.Output
|
Create a handle for all currently used ' Output 's indexed by their device name .
We use this to cache once opened devices .
This will be passed to _ every _ initialization of a virtual stream to a MIDI device
and is necessary since , ' PortMidi ' only allows a single connection to a device .
Create a handle for all currently used 'Output's indexed by their device name.
We use this to cache once opened devices.
This will be passed to _every_ initialization of a virtual stream to a MIDI device
and is necessary since, 'PortMidi' only allows a single connection to a device.
-}
midiDevices :: IO (MVar MidiDeviceMap)
midiDevices = newMVar $ Map.fromList []
|
Connect to a MIDI device with a given name and channel ,
using a ' ControllerShape ' to allow customized interaction
with specific MIDI synths .
Needs a ' ' to operate , create on using ' midiDevices ' !
Usage :
@
( m1 , mt1 ) < - midiSetters devices " My Synth Controller Device name " 1 synthController getNow
@
To find the correct name for your device see ' displayOutputDevices '
Connect to a MIDI device with a given name and channel,
using a 'ControllerShape' to allow customized interaction
with specific MIDI synths.
Needs a 'MidiDeviceMap' to operate, create on using 'midiDevices'!
Usage:
@
(m1, mt1) <- midiSetters devices "My Synth Controller Device name" 1 synthController getNow
@
To find the correct name for your device see 'displayOutputDevices'
-}
midiSetters :: MVar MidiDeviceMap -- ^ A list of MIDI output devices
-> String -- ^ The name of the output device to connect
-> Int -- ^ The MIDI Channel to use
-> ControllerShape -- ^ The definition of params to be usable
-> IO Time -- ^ a method to get the current time
-> IO (ParamPattern -> IO (), (Time -> [ParamPattern] -> ParamPattern) -> ParamPattern -> IO ())
midiSetters d n c s getNow = do
ds <- midiState d n c s
return (setter ds, transition getNow ds)
|
Creates a single virtual stream to a MIDI device using a specific ' ControllerShape '
Needs a ' ' to operate , create one using ' midiDevices ' !
Creates a single virtual stream to a MIDI device using a specific 'ControllerShape'
Needs a 'MidiDeviceMap' to operate, create one using 'midiDevices'!
-}
midiStream :: MVar MidiDeviceMap -> String -> Int -> ControllerShape -> IO (ParamPattern -> IO ())
midiStream d n c s = do
backend <- midiBackend d n c s
stream backend (toShape s)
|
Creates a single virtual state for a MIDI device using a specific ' ControllerShape '
This state can be used to either create a ' Sound.Tidal.Stream.setter ' or a ' Sound.Tidal.Transition.transition ' from it .
Needs a ' ' to operate , create one using ' midiDevices ' !
Creates a single virtual state for a MIDI device using a specific 'ControllerShape'
This state can be used to either create a 'Sound.Tidal.Stream.setter' or a 'Sound.Tidal.Transition.transition' from it.
Needs a 'MidiDeviceMap' to operate, create one using 'midiDevices'!
-}
midiState :: MVar MidiDeviceMap -> String -> Int -> ControllerShape -> IO (MVar (ParamPattern, [ParamPattern]))
midiState d n c s = do
backend <- midiBackend d n c s
S.state backend (toShape s)
|
Opens a connection to a MIDI device and wraps it in a ' Sound . Tidal . Stream . Backend ' implementation .
Needs a ' ' to operate , create one using ' midiDevices ' !
Opens a connection to a MIDI device and wraps it in a 'Sound.Tidal.Stream.Backend' implementation.
Needs a 'MidiDeviceMap' to operate, create one using 'midiDevices'!
-}
midiBackend :: MVar MidiDeviceMap -> String -> Int -> ControllerShape -> IO (Backend a)
midiBackend d n c cs = do
(s, o) <- makeConnection d n c cs
return $ Backend s (flushBackend o)
| null | https://raw.githubusercontent.com/tidalcycles/tidal-midi/0f806c31daee46bb54053dc3407349001a0e00b8/Sound/Tidal/MIDI/Stream.hs | haskell | generics
Tidal specific
MIDI specific
^ A list of MIDI output devices
^ The name of the output device to connect
^ The MIDI Channel to use
^ The definition of params to be usable
^ a method to get the current time | |
Entry functions for interacting with MIDI devices through Tidal .
Entry functions for interacting with MIDI devices through Tidal.
-}
module Sound.Tidal.MIDI.Stream (midiStream, midiBackend, midiState, midiSetters, midiDevices, displayOutputDevices) where
import Control.Concurrent
import Control.Concurrent.MVar ()
import qualified Data.Map as Map
import Sound.Tidal.Stream as S
import Sound.Tidal.Time
import Sound.Tidal.Transition (transition)
import Sound.Tidal.MIDI.Control
import Sound.Tidal.MIDI.Output
|
Create a handle for all currently used ' Output 's indexed by their device name .
We use this to cache once opened devices .
This will be passed to _ every _ initialization of a virtual stream to a MIDI device
and is necessary since , ' PortMidi ' only allows a single connection to a device .
Create a handle for all currently used 'Output's indexed by their device name.
We use this to cache once opened devices.
This will be passed to _every_ initialization of a virtual stream to a MIDI device
and is necessary since, 'PortMidi' only allows a single connection to a device.
-}
midiDevices :: IO (MVar MidiDeviceMap)
midiDevices = newMVar $ Map.fromList []
|
Connect to a MIDI device with a given name and channel ,
using a ' ControllerShape ' to allow customized interaction
with specific MIDI synths .
Needs a ' ' to operate , create on using ' midiDevices ' !
Usage :
@
( m1 , mt1 ) < - midiSetters devices " My Synth Controller Device name " 1 synthController getNow
@
To find the correct name for your device see ' displayOutputDevices '
Connect to a MIDI device with a given name and channel,
using a 'ControllerShape' to allow customized interaction
with specific MIDI synths.
Needs a 'MidiDeviceMap' to operate, create on using 'midiDevices'!
Usage:
@
(m1, mt1) <- midiSetters devices "My Synth Controller Device name" 1 synthController getNow
@
To find the correct name for your device see 'displayOutputDevices'
-}
-> IO (ParamPattern -> IO (), (Time -> [ParamPattern] -> ParamPattern) -> ParamPattern -> IO ())
midiSetters d n c s getNow = do
ds <- midiState d n c s
return (setter ds, transition getNow ds)
|
Creates a single virtual stream to a MIDI device using a specific ' ControllerShape '
Needs a ' ' to operate , create one using ' midiDevices ' !
Creates a single virtual stream to a MIDI device using a specific 'ControllerShape'
Needs a 'MidiDeviceMap' to operate, create one using 'midiDevices'!
-}
midiStream :: MVar MidiDeviceMap -> String -> Int -> ControllerShape -> IO (ParamPattern -> IO ())
midiStream d n c s = do
backend <- midiBackend d n c s
stream backend (toShape s)
|
Creates a single virtual state for a MIDI device using a specific ' ControllerShape '
This state can be used to either create a ' Sound.Tidal.Stream.setter ' or a ' Sound.Tidal.Transition.transition ' from it .
Needs a ' ' to operate , create one using ' midiDevices ' !
Creates a single virtual state for a MIDI device using a specific 'ControllerShape'
This state can be used to either create a 'Sound.Tidal.Stream.setter' or a 'Sound.Tidal.Transition.transition' from it.
Needs a 'MidiDeviceMap' to operate, create one using 'midiDevices'!
-}
midiState :: MVar MidiDeviceMap -> String -> Int -> ControllerShape -> IO (MVar (ParamPattern, [ParamPattern]))
midiState d n c s = do
backend <- midiBackend d n c s
S.state backend (toShape s)
|
Opens a connection to a MIDI device and wraps it in a ' Sound . Tidal . Stream . Backend ' implementation .
Needs a ' ' to operate , create one using ' midiDevices ' !
Opens a connection to a MIDI device and wraps it in a 'Sound.Tidal.Stream.Backend' implementation.
Needs a 'MidiDeviceMap' to operate, create one using 'midiDevices'!
-}
midiBackend :: MVar MidiDeviceMap -> String -> Int -> ControllerShape -> IO (Backend a)
midiBackend d n c cs = do
(s, o) <- makeConnection d n c cs
return $ Backend s (flushBackend o)
|
5d470dde777c6d50a6c35a5c80ac1f2f43e7b648b3ee56c4c2b551e25d154367 | robert-strandh/SICL | package-name-defun.lisp | (cl:in-package #:sicl-package)
(defun package-name (package-designator)
(let ((package (package-designator-to-package package-designator)))
(name package)))
| null | https://raw.githubusercontent.com/robert-strandh/SICL/65d7009247b856b2c0f3d9bb41ca7febd3cd641b/Code/Package/package-name-defun.lisp | lisp | (cl:in-package #:sicl-package)
(defun package-name (package-designator)
(let ((package (package-designator-to-package package-designator)))
(name package)))
|
|
10bb2aa260c3d7d5e990d41a35afa899e216429ad72e894c12fe64033fd6602a | Plutonomicon/plutarch-plutus | MonadicSpec.hs | # LANGUAGE OverloadedRecordDot #
# LANGUAGE QualifiedDo #
module Plutarch.MonadicSpec (spec) where
import Control.Monad.Trans.Cont (cont, runCont)
import Plutarch.Api.V1 (
PAddress (PAddress),
PCredential,
PMaybeData,
PPubKeyHash,
PScriptContext,
PScriptPurpose (PSpending),
PStakingCredential,
)
import Plutarch.ApiSpec qualified as ApiSpec
import Plutarch.List (pconvertLists)
import Plutarch.Monadic qualified as P
import Plutarch.Prelude
import Plutarch.Test
import PlutusLedgerApi.V1
import Test.Hspec
spec :: Spec
spec = do
describe "monadic" $ do
describe "pmatch-twice" . pgoldenSpec $ do
-- We expect all these benchmarks to produce equivalent numbers
let integerList :: [Integer] -> Term s (PList PInteger)
integerList xs = pconvertLists #$ pconstant @(PBuiltinList PInteger) xs
xs = integerList [1 .. 10]
"normal"
@| pmatch xs
$ \case
PSCons _x xs' -> do
pmatch xs' $ \case
PSCons _ xs'' ->
xs''
PSNil -> perror
PSNil -> perror
"do"
@| P.do
PSCons _ xs' <- pmatch xs
PSCons _ xs'' <- pmatch xs'
xs''
"cont"
@| flip runCont id
$ do
ls <- cont $ pmatch xs
case ls of
PSCons _ xs' -> do
ls' <- cont $ pmatch xs'
case ls' of
PSCons _ xs'' -> pure xs''
PSNil -> pure perror
PSNil -> pure perror
"termcont"
@| unTermCont
$ do
PSCons _ xs' <- TermCont $ pmatch xs
PSCons _ xs'' <- TermCont $ pmatch xs'
pure xs''
describe "api.example" $ do
-- The checkSignatory family of functions implicitly use tracing due to
monadic syntax , and as such we need two sets of tests here .
describe "signatory" . pgoldenSpec $ do
let aSig :: PubKeyHash = "ab01fe235c"
"do" @\ do
"succeeds" @| checkSignatory # pconstant aSig # ApiSpec.ctx @-> psucceeds
"fails" @| checkSignatory # pconstant "41" # ApiSpec.ctx @-> pfails
describe "getFields" . pgoldenSpec $ do
"0" @| getFields
checkSignatory :: Term s (PPubKeyHash :--> PScriptContext :--> PUnit)
checkSignatory = plam $ \ph ctx' ->
pletFields @["txInfo", "purpose"] ctx' $ \ctx -> P.do
PSpending _ <- pmatch $ ctx.purpose
let signatories = pfield @"signatories" # ctx.txInfo
pif
(pelem # pdata ph # pfromData signatories)
Success !
(pconstant ())
-- Signature not present.
perror
getFields :: Term s (PAddress :--> PDataRecord '["credential" ':= PCredential, "stakingCredential" ':= PMaybeData PStakingCredential])
getFields = phoistAcyclic $
plam $ \addr -> P.do
PAddress addrFields <- pmatch addr
addrFields
| null | https://raw.githubusercontent.com/Plutonomicon/plutarch-plutus/9b83892057f2aaaed76e3af6193ad1ae242244cc/plutarch-test/tests/Plutarch/MonadicSpec.hs | haskell | We expect all these benchmarks to produce equivalent numbers
The checkSignatory family of functions implicitly use tracing due to
> PScriptContext :--> PUnit)
Signature not present.
> PDataRecord '["credential" ':= PCredential, "stakingCredential" ':= PMaybeData PStakingCredential]) | # LANGUAGE OverloadedRecordDot #
# LANGUAGE QualifiedDo #
module Plutarch.MonadicSpec (spec) where
import Control.Monad.Trans.Cont (cont, runCont)
import Plutarch.Api.V1 (
PAddress (PAddress),
PCredential,
PMaybeData,
PPubKeyHash,
PScriptContext,
PScriptPurpose (PSpending),
PStakingCredential,
)
import Plutarch.ApiSpec qualified as ApiSpec
import Plutarch.List (pconvertLists)
import Plutarch.Monadic qualified as P
import Plutarch.Prelude
import Plutarch.Test
import PlutusLedgerApi.V1
import Test.Hspec
spec :: Spec
spec = do
describe "monadic" $ do
describe "pmatch-twice" . pgoldenSpec $ do
let integerList :: [Integer] -> Term s (PList PInteger)
integerList xs = pconvertLists #$ pconstant @(PBuiltinList PInteger) xs
xs = integerList [1 .. 10]
"normal"
@| pmatch xs
$ \case
PSCons _x xs' -> do
pmatch xs' $ \case
PSCons _ xs'' ->
xs''
PSNil -> perror
PSNil -> perror
"do"
@| P.do
PSCons _ xs' <- pmatch xs
PSCons _ xs'' <- pmatch xs'
xs''
"cont"
@| flip runCont id
$ do
ls <- cont $ pmatch xs
case ls of
PSCons _ xs' -> do
ls' <- cont $ pmatch xs'
case ls' of
PSCons _ xs'' -> pure xs''
PSNil -> pure perror
PSNil -> pure perror
"termcont"
@| unTermCont
$ do
PSCons _ xs' <- TermCont $ pmatch xs
PSCons _ xs'' <- TermCont $ pmatch xs'
pure xs''
describe "api.example" $ do
monadic syntax , and as such we need two sets of tests here .
describe "signatory" . pgoldenSpec $ do
let aSig :: PubKeyHash = "ab01fe235c"
"do" @\ do
"succeeds" @| checkSignatory # pconstant aSig # ApiSpec.ctx @-> psucceeds
"fails" @| checkSignatory # pconstant "41" # ApiSpec.ctx @-> pfails
describe "getFields" . pgoldenSpec $ do
"0" @| getFields
checkSignatory = plam $ \ph ctx' ->
pletFields @["txInfo", "purpose"] ctx' $ \ctx -> P.do
PSpending _ <- pmatch $ ctx.purpose
let signatories = pfield @"signatories" # ctx.txInfo
pif
(pelem # pdata ph # pfromData signatories)
Success !
(pconstant ())
perror
getFields = phoistAcyclic $
plam $ \addr -> P.do
PAddress addrFields <- pmatch addr
addrFields
|
5adc13adc5886e7dfb8799c7daa0a57126a5b8c8532298cbdfbaf252da1134dc | graninas/Functional-Design-and-Architecture | Hdl.hs | module Andromeda.Hardware.Language.Hdl where
import Andromeda.Hardware.Common
import Andromeda.Hardware.Domain
import Control.Monad.Free (Free (..), liftF)
data HdlMethod next
= SetupController DeviceName ControllerName ComponentPassport (Controller -> next)
| RegisterComponent Controller ComponentIndex ComponentPassport (() -> next)
instance Functor HdlMethod where
fmap f (SetupController deviceName ctrlName passp next) = SetupController deviceName ctrlName passp (f . next)
fmap f (RegisterComponent controller idx passp next) = RegisterComponent controller idx passp (f . next)
type Hdl a = Free HdlMethod a
setupController :: DeviceName -> ControllerName -> ComponentPassport -> Hdl Controller
setupController deviceName ctrlName passp = liftF $ SetupController deviceName ctrlName passp id
registerComponent :: Controller -> ComponentIndex -> ComponentPassport -> Hdl ()
registerComponent controller idx passp = liftF $ RegisterComponent controller idx passp id
| null | https://raw.githubusercontent.com/graninas/Functional-Design-and-Architecture/b6a78f80a2a2e0b913bcab1d2279fc137a90db4c/Second-Edition-Manning-Publications/BookSamples/CH08/Section8p1/src/Andromeda/Hardware/Language/Hdl.hs | haskell | module Andromeda.Hardware.Language.Hdl where
import Andromeda.Hardware.Common
import Andromeda.Hardware.Domain
import Control.Monad.Free (Free (..), liftF)
data HdlMethod next
= SetupController DeviceName ControllerName ComponentPassport (Controller -> next)
| RegisterComponent Controller ComponentIndex ComponentPassport (() -> next)
instance Functor HdlMethod where
fmap f (SetupController deviceName ctrlName passp next) = SetupController deviceName ctrlName passp (f . next)
fmap f (RegisterComponent controller idx passp next) = RegisterComponent controller idx passp (f . next)
type Hdl a = Free HdlMethod a
setupController :: DeviceName -> ControllerName -> ComponentPassport -> Hdl Controller
setupController deviceName ctrlName passp = liftF $ SetupController deviceName ctrlName passp id
registerComponent :: Controller -> ComponentIndex -> ComponentPassport -> Hdl ()
registerComponent controller idx passp = liftF $ RegisterComponent controller idx passp id
|
|
681e4b28998ec2fe069e8a15cf878d2eba171c99b63bb3444b87fb5764fa7ba1 | lkuper/lvar-examples | map-lvar-waitsize.hs | # LANGUAGE TypeFamilies #
import Control.LVish
import Data.LVar.PureMap
import qualified Data.Map as M
data Item = Book | Shoes
deriving (Show, Ord, Eq)
Bug in ? : this program occasionally raises put - after - freeze
errors , even though I think the ` waitSize 2 ` should be enough
-- synchronization to prevent that.
-- Returns an ordinary Data.Map, because `freezeMap` turns a
` Data . LVar . PureMap ` into one .
p :: (HasPut e, HasGet e, HasFreeze e) => Par e s (M.Map Item Int)
p = do
cart <- newEmptyMap
fork $ insert Book 1 cart
fork $ insert Shoes 1 cart
waitSize 2 cart
freezeMap cart
main = do
v <- runParQuasiDet p
print $ M.toList v
| null | https://raw.githubusercontent.com/lkuper/lvar-examples/681c0ed83366dbb8f7d4a1f1285c0ad3e42a436b/2.0/map-lvar-waitsize.hs | haskell | synchronization to prevent that.
Returns an ordinary Data.Map, because `freezeMap` turns a | # LANGUAGE TypeFamilies #
import Control.LVish
import Data.LVar.PureMap
import qualified Data.Map as M
data Item = Book | Shoes
deriving (Show, Ord, Eq)
Bug in ? : this program occasionally raises put - after - freeze
errors , even though I think the ` waitSize 2 ` should be enough
` Data . LVar . PureMap ` into one .
p :: (HasPut e, HasGet e, HasFreeze e) => Par e s (M.Map Item Int)
p = do
cart <- newEmptyMap
fork $ insert Book 1 cart
fork $ insert Shoes 1 cart
waitSize 2 cart
freezeMap cart
main = do
v <- runParQuasiDet p
print $ M.toList v
|
a8af544e698daa7ba5bd216c4b386c2085e01d06be259bf913b44f7795403499 | BranchTaken/Hemlock | test_pp.ml | open! Basis.Rudiments
open! Basis
open Unit
let test () =
File.Fmt.stdout
|> Fmt.fmt "pp "
|> Fmt.fmt (to_string ())
|> Fmt.fmt " -> "
|> pp ()
|> Fmt.fmt "\n"
|> ignore
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/f3604ceda4f75cf18b6ee2b1c2f3c5759ad495a5/bootstrap/test/basis/unit/test_pp.ml | ocaml | open! Basis.Rudiments
open! Basis
open Unit
let test () =
File.Fmt.stdout
|> Fmt.fmt "pp "
|> Fmt.fmt (to_string ())
|> Fmt.fmt " -> "
|> pp ()
|> Fmt.fmt "\n"
|> ignore
let _ = test ()
|
|
a32cc730e99fce94b0d6d10cda919f76d2b15606903a2b627a45bda9b2818816 | oliyh/re-graph | core_test.cljc | (ns re-graph.core-test
(:require [re-graph.core :as re-graph]
[re-graph.internals :as internals :refer [default-instance-id]]
[re-frame.core :as re-frame]
[re-frame.db :refer [app-db]]
[day8.re-frame.test :refer [run-test-sync run-test-async wait-for]
:refer-macros [run-test-sync run-test-async wait-for]]
[clojure.test :refer [deftest is testing]
:refer-macros [deftest is testing]]
[clojure.spec.alpha :as s]
[clojure.spec.test.alpha :as stest]
#?@(:clj [[cheshire.core :as json]
[hato.client :as hato]
[clj-http.client :as clj-http]])))
(stest/instrument)
(s/check-asserts true)
(def on-ws-message @#'internals/on-ws-message)
(def on-open @#'internals/on-open)
(def on-close @#'internals/on-close)
(def insert-http-status @#'internals/insert-http-status)
(defn- data->message [d]
#?(:cljs (clj->js {:data (js/JSON.stringify (clj->js d))})
:clj (json/encode d)))
(defn- install-websocket-stub! []
(re-frame/reg-fx
::internals/connect-ws
(fn [[instance-id _options]]
((on-open instance-id ::websocket-connection)))))
(defn- dispatch-to-instance [instance-id [event opts]]
(re-frame/dispatch [event (if (nil? instance-id)
opts
(assoc opts :instance-id instance-id))]))
(defn- init [instance-id opts]
(dispatch-to-instance instance-id [::re-graph/init opts]))
(defn- run-subscription-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-ws-message (on-ws-message (or instance-id default-instance-id))]
(run-test-sync
(install-websocket-stub!)
(init instance-id {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-subscription-payload {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}
expected-unsubscription-payload {:id "my-sub"
:type "stop"}]
(testing "Subscriptions can be registered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-subscription-payload
payload))))
(dispatch [::re-graph/subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= [::on-thing]
(get-in (db-instance) [:subscriptions "my-sub" :callback])))
(testing "and deduplicated"
(re-frame/reg-fx
::internals/send-ws
(fn [_]
(is false "Should not have sent a websocket message for an existing subscription")))
(dispatch [::re-graph/subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}]))
(testing "messages from the WS are sent to the callback"
(let [expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(on-ws-message (data->message {:type "data"
:id "my-sub"
:payload expected-response-payload}))
(is (= expected-response-payload
(::thing @app-db)))))
(testing "errors from the WS are sent to the callback"
(let [expected-response-payload {:errors {:message "Something went wrong"}}]
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(on-ws-message (data->message {:type "error"
:id "my-sub"
:payload (:errors expected-response-payload)}))
(is (= expected-response-payload
(::thing @app-db)))))
(testing "and unregistered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-unsubscription-payload
payload))))
(dispatch [::re-graph/unsubscribe {:id :my-sub}])
(is (nil? (get-in (db-instance) [:subscriptions "my-sub"])))))))))
(deftest subscription-test
(run-subscription-test nil))
(deftest named-subscription-test
(run-subscription-test :service-a))
(defn- run-websocket-lifecycle-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-open (partial on-open (or instance-id default-instance-id))]
(run-test-sync
(re-frame/reg-fx
::internals/connect-ws
(constantly nil))
(let [init-payload {:token "abc"}
expected-subscription-payload {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}]
(init instance-id {:ws {:url "ws"
:connection-init-payload init-payload}})
(testing "messages are queued when websocket isn't ready"
(dispatch [::re-graph/subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(dispatch [::re-graph/query {:query "{ more_things { id } }"
:variables {:some "other-variable"}
:callback [::on-thing]}])
(is (= 2 (count (get-in (db-instance) [:ws :queue]))))
(testing "and sent when websocket opens"
(let [ws-messages (atom [])]
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(swap! ws-messages conj [ws payload])))
((on-open ::websocket-connection))
(testing "the connection init payload is sent first"
(is (= [::websocket-connection
{:type "connection_init"
:payload init-payload}]
(first @ws-messages))))
(is (= [::websocket-connection expected-subscription-payload]
(second @ws-messages)))
(is (= [::websocket-connection {:type "start",
:payload
{:query "query { more_things { id } }",
:variables {:some "other-variable"}}}]
((juxt first (comp #(dissoc % :id) second)) (last @ws-messages)))))
(is (empty? (get-in (db-instance) [:ws :queue]))))))
(testing "when re-graph is destroyed"
(testing "the subscriptions are cancelled"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (or (= {:id "my-sub" :type "stop"}
payload)
(= {:type "stop"}
(dissoc payload :id)))))))
(testing "the websocket is closed"
(re-frame/reg-fx
::internals/disconnect-ws
(fn [[ws]]
(is (= ::websocket-connection ws)))))
(dispatch [::re-graph/destroy {}])
(testing "the re-graph state is set to destroyed"
(is (:destroyed? (db-instance))))))))
(deftest websocket-lifecycle-test
(run-websocket-lifecycle-test nil))
(deftest named-websocket-lifecycle-test
(run-websocket-lifecycle-test :service-a))
(defn- run-websocket-reconnection-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-close (on-close (or instance-id default-instance-id))
sent-msgs (atom [])]
(run-test-async
(install-websocket-stub!)
(re-frame/reg-fx
:dispatch-later
(fn [[{:keys [dispatch]}]]
(re-frame/dispatch dispatch)))
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (or
(= "connection_init" (:type payload))
(= {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}
payload)))
(swap! sent-msgs conj payload)))
(testing "websocket reconnects when disconnected"
(init instance-id {:ws {:url "ws"
:connection-init-payload {:token "abc"}
:reconnect-timeout 0}})
(let [subscription-params {:instance-id (or instance-id default-instance-id)
:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}]
(wait-for
[::internals/on-ws-open]
(is (get-in (db-instance) [:ws :ready?]))
;; create a subscription and wait for it to be sent
(dispatch [::re-graph/subscribe subscription-params])
(wait-for [::re-graph/subscribe]
(on-close)
(wait-for
[::internals/on-ws-close]
(is (false? (get-in (db-instance) [:ws :ready?])))
(testing "websocket is reconnected"
(wait-for [::internals/on-ws-open]
(is (get-in (db-instance) [:ws :ready?]))
(testing "subscriptions are resumed"
(wait-for
[(fn [event]
(= [::re-graph/subscribe subscription-params] event))]
(is (= 4 (count @sent-msgs)))))))))))))))
(deftest websocket-reconnection-test
(run-websocket-reconnection-test nil))
(deftest named-websocket-reconnection-test
(run-websocket-reconnection-test :service-a))
(defn- run-websocket-query-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-ws-message (on-ws-message (or instance-id default-instance-id))]
(with-redefs [internals/generate-id (constantly "random-id")]
(run-test-sync
(install-websocket-stub!)
(init instance-id {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-query-payload {:id "random-id"
:type "start"
:payload {:query "query { things { id } }"
:variables {:some "variable"}}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(testing "Queries can be made"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-query-payload
payload))
(on-ws-message (data->message {:type "data"
:id (:id payload)
:payload expected-response-payload}))))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::thing @app-db))))
(on-ws-message (data->message {:type "complete"
:id "random-id"}))
(testing "the callback is removed afterwards"
(is (nil? (get-in (db-instance) [:subscriptions "random-id"]))))))))))
(deftest websocket-query-test
(run-websocket-query-test nil))
(deftest named-websocket-query-test
(run-websocket-query-test :service-a))
(deftest prefer-http-query-test
(run-test-sync
(install-websocket-stub!)
(re-frame/dispatch [::re-graph/init {:ws {:url "ws"
:connection-init-payload nil
:supported-operations #{:subscribe}}
:http {:url "-ql"}}])
(testing "Queries are sent via http because the websocket doesn't support them"
(let [http-called? (atom false)]
(re-frame/reg-fx
::internals/send-http
(fn [_]
(reset! http-called? true)))
(re-frame/dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is @http-called?)))))
(defn- dispatch-response [event payload]
(re-frame/dispatch [::internals/http-complete (assoc event :response payload)]))
(defn- run-http-query-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [expected-http-url "-ql"]
(init instance-id {:http {:url expected-http-url}
:ws nil})
(let [expected-query-payload {:query "query { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(testing "Queries can be made"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::thing @app-db)))))
(testing "In flight queries are deduplicated"
(let [id :abc-123]
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event]}]
(is (= id (:id event)))))
(dispatch [::re-graph/query {:id id
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(re-frame/reg-fx
::internals/send-http
(fn [_]
(is false "Should not have sent an http request for a duplicate in-flight query id")))
(dispatch [::re-graph/query {:id id
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}]))))))))
(deftest http-query-test
(run-http-query-test nil))
(deftest named-http-query-test
(run-http-query-test :service-a))
(defn- run-http-query-error-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [mock-response (atom {})
query "{ things { id } }"
variables {:some "variable"}]
(init instance-id {:http {:url "-ql"}
:ws nil})
(re-frame/reg-fx
::internals/send-http
(fn [fx-args]
(let [response @mock-response
{:keys [status error-code]} response]
(dispatch-response (:event fx-args) (if (= :no-error error-code)
(:body response)
(insert-http-status (:body response) status))))))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(testing "Query error with invalid graphql response (string body)"
(reset! mock-response {:status 403
:body "Access Token is invalid"
:error-code :http-error})
(let [expected-response-payload {:errors [{:message "The HTTP call failed.",
:extensions {:status 403}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "Query error with invalid graphql response (map body)"
(reset! mock-response {:status 403
:body {:data nil
:errors nil}
:error-code :http-error})
(let [expected-response-payload {:data nil
:errors [{:message "The HTTP call failed.",
:extensions {:status 403}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "Query error with valid graphql error response"
(reset! mock-response {:status 400
:body {:errors [{:message "Bad field \"bad1\".",
:locations [{:line 2, :column 0}]}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999}}]}
:error-code :http-error})
(let [expected-response-payload {:errors [{:message "Bad field \"bad1\"."
:locations [{:line 2, :column 0}]
:extensions {:status 400}}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999
:status 400}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "Query error with valid graphql error response, insert status only if not present"
(reset! mock-response {:status 400
:body {:errors [{:message "Bad field \"bad1\".",
:locations [{:line 2, :column 0}]}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999
:status 500}}]}
:error-code :http-error})
(let [expected-response-payload {:errors [{:message "Bad field \"bad1\"."
:locations [{:line 2, :column 0}]
:extensions {:status 400}}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999
:status 500}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "No query error, body unchanged"
(let [expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(reset! mock-response {:status 200
:body expected-response-payload
:error-code :no-error})
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))))))
(deftest http-query-error-test
(run-http-query-error-test nil))
(deftest named-http-query-error-test
(run-http-query-error-test :service-a))
#?(:clj
(deftest clj-http-query-error-test
(let [instance-id nil
dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [query "{ things { id } }"
variables {:some "variable"}
http-url "-ql"
http-server-response (fn [_url & [_opts respond _raise]]
(respond {:status 400, :body {:errors [{:message "OK"
:extensions {:status 404}}]}}))]
(init instance-id {:http {:url http-url}
:ws nil})
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(testing "http error returns correct response"
(with-redefs [hato/post http-server-response
clj-http/post http-server-response]
(let [expected-response-payload {:errors [{:message "OK",
:extensions {:status 404}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))))))))
(defn- run-http-mutation-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [expected-http-url "-ql"]
(init instance-id {:http {:url expected-http-url}
:ws nil})
(let [mutation (str "signin($login:String!,$password:String!){"
"signin(login:$login,password:$password){id}}")
variables {:login "alice" :password "secret"}
expected-query-payload {:query (str "mutation " mutation)
:variables variables}
expected-response-payload {:data {:id 1}}]
(testing "Mutations can be made"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event url payload]}]
(is (= expected-query-payload payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(re-frame/reg-event-db
::on-mutate
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::mutation response)))
(dispatch [::re-graph/mutate {:query mutation
:variables variables
:callback [::on-mutate]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::mutation @app-db)))))
(testing "In flight mutations are deduplicated"
(let [id :abc-123]
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event]}]
(is (= id (:id event)))))
(dispatch [::re-graph/mutate {:id id
:query mutation
:variables variables
:callback [::on-mutate]}])
(re-frame/reg-fx
::internals/send-http
(fn [_]
(is false "Should not have sent an http request for a duplicate in-flight mutation id")))
(dispatch [::re-graph/mutate {:id id
:query mutation
:variables variables
:callback [::on-mutate]}]))))))))
(deftest http-mutation-test
(run-http-mutation-test nil))
(deftest named-http-mutation-test
(run-http-mutation-test :service-a))
(defn- run-http-parameters-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [expected-http-url "-ql"
expected-request {:with-credentials? false}]
(init instance-id {:http {:url expected-http-url
:impl (constantly expected-request)}
:ws nil})
(testing "Request can be specified"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [request]}]
(is (= expected-request
request))))
(dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(dispatch [::re-graph/mutate {:query "don't care"
:variables {:some "variable"}
:callback [::on-thing]}]))))))
(deftest http-parameters-test
(run-http-parameters-test nil))
(deftest named-http-parameters-test
(run-http-parameters-test :service-a))
(deftest http-query-with-id-test
(let [dispatch (partial dispatch-to-instance nil)]
(run-test-sync
(let [expected-http-url "-ql"
call-count (atom 0)]
(init nil {:http {:url expected-http-url}
:ws nil})
(testing "Request is completed"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [request]}]
(is request)
(swap! call-count inc)))
(dispatch [::re-graph/query {:id "query-1"
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(dispatch [::re-graph/mutate {:id "mutation-1"
:query "don't care"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= 2 @call-count)))))))
(defn- call-instance [instance-id f]
(fn [opts]
(f (if instance-id
(assoc opts :instance-id instance-id)
opts))))
(defn- run-non-re-frame-test [instance-id]
(let [db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-ws-message (on-ws-message (or instance-id default-instance-id))
init (call-instance instance-id re-graph/init)
subscribe (call-instance instance-id re-graph/subscribe)
unsubscribe (call-instance instance-id re-graph/unsubscribe)
query (call-instance instance-id re-graph/query)
mutate (call-instance instance-id re-graph/mutate)]
(testing "can call normal functions instead of needing re-frame"
(testing "using a websocket"
(run-test-sync
(install-websocket-stub!)
(init {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-subscription-payload {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}
expected-unsubscription-payload {:id "my-sub"
:type "stop"}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}
callback-called? (atom false)
callback-fn (fn [payload]
(reset! callback-called? true)
(is (= expected-response-payload payload)))]
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-subscription-payload
payload))))
(subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback callback-fn})
(is (get-in (db-instance) [:subscriptions "my-sub" :callback]))
(testing "messages from the WS are sent to the callback-fn"
(on-ws-message (data->message {:type "data"
:id "my-sub"
:payload expected-response-payload}))
(is @callback-called?))
(testing "and unregistered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-unsubscription-payload
payload))))
(unsubscribe {:id :my-sub})
(is (nil? (get-in (db-instance) [:subscriptions "my-sub"])))))))
(testing "using http"
(testing "queries"
(run-test-sync
(let [expected-http-url "-ql"
expected-query-payload {:query "query { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}
callback-called? (atom false)
callback-fn (fn [payload]
(reset! callback-called? true)
(is (= expected-response-payload payload)))]
(init {:http {:url expected-http-url}
:ws nil})
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(query {:query "{ things { id } }"
:variables {:some "variable"}
:callback callback-fn})
(testing "responses are sent to the callback"
(is @callback-called?)))))
(testing "mutations"
(run-test-sync
(let [expected-http-url "-ql"
expected-query-payload {:query "mutation { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}
callback-called? (atom false)
callback-fn (fn [payload]
(reset! callback-called? true)
(is (= expected-response-payload payload)))]
(init {:http {:url expected-http-url}
:ws nil})
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(mutate {:query "{ things { id } }"
:variables {:some "variable"}
:callback callback-fn})
(testing "responses are sent to the callback"
(is @callback-called?)))))))))
(deftest non-re-frame-test
(run-non-re-frame-test nil))
(deftest named-non-re-frame-test
(run-non-re-frame-test :service-a))
(deftest venia-compatibility-test
(run-test-sync
(let [expected-http-url "-ql"]
(re-graph/init {:http {:url expected-http-url}
:ws nil})
(let [expected-query-payload {:query "query { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(testing "Ignores 'query' at the start of the query"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(re-frame/dispatch [::re-graph/query {:query "query { things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::thing @app-db)))))))))
(deftest multi-instance-test
(run-test-sync
(re-frame/reg-fx
::internals/connect-ws
(fn [[instance-id _options]]
((on-open instance-id (keyword (str (name instance-id) "-connection"))))))
(init :service-a {:ws {:url "ws"
:connection-init-payload nil}})
(init :service-b {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-subscription-payload-a {:id "a-sub"
:type "start"
:payload {:query "subscription { things { a } }"
:variables {:some "a"}}}
expected-unsubscription-payload-a {:id "a-sub"
:type "stop"}
expected-subscription-payload-b {:id "b-sub"
:type "start"
:payload {:query "subscription { things { b } }"
:variables {:some "b"}}}
expected-unsubscription-payload-b {:id "b-sub"
:type "stop"}]
(testing "Subscriptions can be registered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(condp = ws
:service-a-connection
(is (= expected-subscription-payload-a payload))
:service-b-connection
(is (= expected-subscription-payload-b payload)))))
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-a
:id :a-sub
:query "{ things { a } }"
:variables {:some "a"}
:callback [::on-a-thing]}])
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-b
:id :b-sub
:query "{ things { b } }"
:variables {:some "b"}
:callback [::on-b-thing]}])
(is (= [::on-a-thing]
(get-in @app-db [:re-graph :service-a :subscriptions "a-sub" :callback])))
(is (= [::on-b-thing]
(get-in @app-db [:re-graph :service-b :subscriptions "b-sub" :callback])))
(testing "and deduplicated"
(re-frame/reg-fx
::internals/send-ws
(fn [_]
(is false "Should not have sent a websocket message for an existing subscription")))
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-a
:id :a-sub
:query "{ things { a } }"
:variables {:some "a"}
:callback [::on-a-thing]}])
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-b
:id :b-sub
:query "{ things { b } }"
:variables {:some "b"}
:callback [::on-b-thing]}]))
(testing "messages from the WS are sent to the callback"
(let [expected-response-payload-a {:data {:things [{:a 1} {:a 2}]}}
expected-response-payload-b {:data {:things [{:b 1}]}}]
(re-frame/reg-event-db
::on-a-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::a-thing response)))
(re-frame/reg-event-db
::on-b-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::b-thing response)))
((on-ws-message :service-a) (data->message {:type "data"
:id "a-sub"
:payload expected-response-payload-a}))
((on-ws-message :service-b) (data->message {:type "data"
:id "b-sub"
:payload expected-response-payload-b}))
(is (= expected-response-payload-a
(::a-thing @app-db)))
(is (= expected-response-payload-b
(::b-thing @app-db)))))
(testing "and unregistered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(condp = ws
:service-a-connection
(is (= expected-unsubscription-payload-a payload))
:service-b-connection
(is (= expected-unsubscription-payload-b payload)))))
(re-frame/dispatch [::re-graph/unsubscribe {:instance-id :service-a :id :a-sub}])
(re-frame/dispatch [::re-graph/unsubscribe {:instance-id :service-b :id :b-sub}])
(is (nil? (get-in @app-db [:re-graph :service-a :subscriptions "a-sub"])))
(is (nil? (get-in @app-db [:re-graph :service-b :subscriptions "b-sub"]))))))))
(deftest reinit-ws-test []
(run-test-sync
(install-websocket-stub!)
(testing "websocket connection payload is sent"
(let [last-ws-message (atom nil)]
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(reset! last-ws-message payload)))
(re-frame/dispatch [::re-graph/init {:ws {:url "ws"
:connection-init-payload {:auth-token 123}}}])
(is (= {:type "connection_init"
:payload {:auth-token 123}}
@last-ws-message))
(testing "updated when re-inited"
(re-frame/dispatch [::re-graph/re-init {:ws {:connection-init-payload {:auth-token 234}}}] )
(is (= {:type "connection_init"
:payload {:auth-token 234}}
@last-ws-message)))))))
(deftest re-init-http-test []
(run-test-sync
(testing "http headers are sent"
(let [last-http-message (atom nil)]
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event request]}]
(reset! last-http-message request)
(dispatch-response event {})))
(re-frame/dispatch [::re-graph/init {:http {:url "-ql"
:impl {:headers {"Authorization" 123}}}
:ws nil}])
(re-frame/dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= {:headers {"Authorization" 123}}
@last-http-message))
(testing "and can be updated"
(re-frame/dispatch [::re-graph/re-init {:http {:impl {:headers {"Authorization" 234}}}}])
(re-frame/dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= {:headers {"Authorization" 234}}
@last-http-message)))))))
| null | https://raw.githubusercontent.com/oliyh/re-graph/abffdc040461ffe1cac540d2dfce987127707afe/test/re_graph/core_test.cljc | clojure | create a subscription and wait for it to be sent | (ns re-graph.core-test
(:require [re-graph.core :as re-graph]
[re-graph.internals :as internals :refer [default-instance-id]]
[re-frame.core :as re-frame]
[re-frame.db :refer [app-db]]
[day8.re-frame.test :refer [run-test-sync run-test-async wait-for]
:refer-macros [run-test-sync run-test-async wait-for]]
[clojure.test :refer [deftest is testing]
:refer-macros [deftest is testing]]
[clojure.spec.alpha :as s]
[clojure.spec.test.alpha :as stest]
#?@(:clj [[cheshire.core :as json]
[hato.client :as hato]
[clj-http.client :as clj-http]])))
(stest/instrument)
(s/check-asserts true)
(def on-ws-message @#'internals/on-ws-message)
(def on-open @#'internals/on-open)
(def on-close @#'internals/on-close)
(def insert-http-status @#'internals/insert-http-status)
(defn- data->message [d]
#?(:cljs (clj->js {:data (js/JSON.stringify (clj->js d))})
:clj (json/encode d)))
(defn- install-websocket-stub! []
(re-frame/reg-fx
::internals/connect-ws
(fn [[instance-id _options]]
((on-open instance-id ::websocket-connection)))))
(defn- dispatch-to-instance [instance-id [event opts]]
(re-frame/dispatch [event (if (nil? instance-id)
opts
(assoc opts :instance-id instance-id))]))
(defn- init [instance-id opts]
(dispatch-to-instance instance-id [::re-graph/init opts]))
(defn- run-subscription-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-ws-message (on-ws-message (or instance-id default-instance-id))]
(run-test-sync
(install-websocket-stub!)
(init instance-id {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-subscription-payload {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}
expected-unsubscription-payload {:id "my-sub"
:type "stop"}]
(testing "Subscriptions can be registered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-subscription-payload
payload))))
(dispatch [::re-graph/subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= [::on-thing]
(get-in (db-instance) [:subscriptions "my-sub" :callback])))
(testing "and deduplicated"
(re-frame/reg-fx
::internals/send-ws
(fn [_]
(is false "Should not have sent a websocket message for an existing subscription")))
(dispatch [::re-graph/subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}]))
(testing "messages from the WS are sent to the callback"
(let [expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(on-ws-message (data->message {:type "data"
:id "my-sub"
:payload expected-response-payload}))
(is (= expected-response-payload
(::thing @app-db)))))
(testing "errors from the WS are sent to the callback"
(let [expected-response-payload {:errors {:message "Something went wrong"}}]
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(on-ws-message (data->message {:type "error"
:id "my-sub"
:payload (:errors expected-response-payload)}))
(is (= expected-response-payload
(::thing @app-db)))))
(testing "and unregistered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-unsubscription-payload
payload))))
(dispatch [::re-graph/unsubscribe {:id :my-sub}])
(is (nil? (get-in (db-instance) [:subscriptions "my-sub"])))))))))
(deftest subscription-test
(run-subscription-test nil))
(deftest named-subscription-test
(run-subscription-test :service-a))
(defn- run-websocket-lifecycle-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-open (partial on-open (or instance-id default-instance-id))]
(run-test-sync
(re-frame/reg-fx
::internals/connect-ws
(constantly nil))
(let [init-payload {:token "abc"}
expected-subscription-payload {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}]
(init instance-id {:ws {:url "ws"
:connection-init-payload init-payload}})
(testing "messages are queued when websocket isn't ready"
(dispatch [::re-graph/subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(dispatch [::re-graph/query {:query "{ more_things { id } }"
:variables {:some "other-variable"}
:callback [::on-thing]}])
(is (= 2 (count (get-in (db-instance) [:ws :queue]))))
(testing "and sent when websocket opens"
(let [ws-messages (atom [])]
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(swap! ws-messages conj [ws payload])))
((on-open ::websocket-connection))
(testing "the connection init payload is sent first"
(is (= [::websocket-connection
{:type "connection_init"
:payload init-payload}]
(first @ws-messages))))
(is (= [::websocket-connection expected-subscription-payload]
(second @ws-messages)))
(is (= [::websocket-connection {:type "start",
:payload
{:query "query { more_things { id } }",
:variables {:some "other-variable"}}}]
((juxt first (comp #(dissoc % :id) second)) (last @ws-messages)))))
(is (empty? (get-in (db-instance) [:ws :queue]))))))
(testing "when re-graph is destroyed"
(testing "the subscriptions are cancelled"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (or (= {:id "my-sub" :type "stop"}
payload)
(= {:type "stop"}
(dissoc payload :id)))))))
(testing "the websocket is closed"
(re-frame/reg-fx
::internals/disconnect-ws
(fn [[ws]]
(is (= ::websocket-connection ws)))))
(dispatch [::re-graph/destroy {}])
(testing "the re-graph state is set to destroyed"
(is (:destroyed? (db-instance))))))))
(deftest websocket-lifecycle-test
(run-websocket-lifecycle-test nil))
(deftest named-websocket-lifecycle-test
(run-websocket-lifecycle-test :service-a))
(defn- run-websocket-reconnection-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-close (on-close (or instance-id default-instance-id))
sent-msgs (atom [])]
(run-test-async
(install-websocket-stub!)
(re-frame/reg-fx
:dispatch-later
(fn [[{:keys [dispatch]}]]
(re-frame/dispatch dispatch)))
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (or
(= "connection_init" (:type payload))
(= {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}
payload)))
(swap! sent-msgs conj payload)))
(testing "websocket reconnects when disconnected"
(init instance-id {:ws {:url "ws"
:connection-init-payload {:token "abc"}
:reconnect-timeout 0}})
(let [subscription-params {:instance-id (or instance-id default-instance-id)
:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}]
(wait-for
[::internals/on-ws-open]
(is (get-in (db-instance) [:ws :ready?]))
(dispatch [::re-graph/subscribe subscription-params])
(wait-for [::re-graph/subscribe]
(on-close)
(wait-for
[::internals/on-ws-close]
(is (false? (get-in (db-instance) [:ws :ready?])))
(testing "websocket is reconnected"
(wait-for [::internals/on-ws-open]
(is (get-in (db-instance) [:ws :ready?]))
(testing "subscriptions are resumed"
(wait-for
[(fn [event]
(= [::re-graph/subscribe subscription-params] event))]
(is (= 4 (count @sent-msgs)))))))))))))))
(deftest websocket-reconnection-test
(run-websocket-reconnection-test nil))
(deftest named-websocket-reconnection-test
(run-websocket-reconnection-test :service-a))
(defn- run-websocket-query-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)
db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-ws-message (on-ws-message (or instance-id default-instance-id))]
(with-redefs [internals/generate-id (constantly "random-id")]
(run-test-sync
(install-websocket-stub!)
(init instance-id {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-query-payload {:id "random-id"
:type "start"
:payload {:query "query { things { id } }"
:variables {:some "variable"}}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(testing "Queries can be made"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-query-payload
payload))
(on-ws-message (data->message {:type "data"
:id (:id payload)
:payload expected-response-payload}))))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::thing @app-db))))
(on-ws-message (data->message {:type "complete"
:id "random-id"}))
(testing "the callback is removed afterwards"
(is (nil? (get-in (db-instance) [:subscriptions "random-id"]))))))))))
(deftest websocket-query-test
(run-websocket-query-test nil))
(deftest named-websocket-query-test
(run-websocket-query-test :service-a))
(deftest prefer-http-query-test
(run-test-sync
(install-websocket-stub!)
(re-frame/dispatch [::re-graph/init {:ws {:url "ws"
:connection-init-payload nil
:supported-operations #{:subscribe}}
:http {:url "-ql"}}])
(testing "Queries are sent via http because the websocket doesn't support them"
(let [http-called? (atom false)]
(re-frame/reg-fx
::internals/send-http
(fn [_]
(reset! http-called? true)))
(re-frame/dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is @http-called?)))))
(defn- dispatch-response [event payload]
(re-frame/dispatch [::internals/http-complete (assoc event :response payload)]))
(defn- run-http-query-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [expected-http-url "-ql"]
(init instance-id {:http {:url expected-http-url}
:ws nil})
(let [expected-query-payload {:query "query { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(testing "Queries can be made"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::thing @app-db)))))
(testing "In flight queries are deduplicated"
(let [id :abc-123]
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event]}]
(is (= id (:id event)))))
(dispatch [::re-graph/query {:id id
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(re-frame/reg-fx
::internals/send-http
(fn [_]
(is false "Should not have sent an http request for a duplicate in-flight query id")))
(dispatch [::re-graph/query {:id id
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}]))))))))
(deftest http-query-test
(run-http-query-test nil))
(deftest named-http-query-test
(run-http-query-test :service-a))
(defn- run-http-query-error-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [mock-response (atom {})
query "{ things { id } }"
variables {:some "variable"}]
(init instance-id {:http {:url "-ql"}
:ws nil})
(re-frame/reg-fx
::internals/send-http
(fn [fx-args]
(let [response @mock-response
{:keys [status error-code]} response]
(dispatch-response (:event fx-args) (if (= :no-error error-code)
(:body response)
(insert-http-status (:body response) status))))))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(testing "Query error with invalid graphql response (string body)"
(reset! mock-response {:status 403
:body "Access Token is invalid"
:error-code :http-error})
(let [expected-response-payload {:errors [{:message "The HTTP call failed.",
:extensions {:status 403}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "Query error with invalid graphql response (map body)"
(reset! mock-response {:status 403
:body {:data nil
:errors nil}
:error-code :http-error})
(let [expected-response-payload {:data nil
:errors [{:message "The HTTP call failed.",
:extensions {:status 403}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "Query error with valid graphql error response"
(reset! mock-response {:status 400
:body {:errors [{:message "Bad field \"bad1\".",
:locations [{:line 2, :column 0}]}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999}}]}
:error-code :http-error})
(let [expected-response-payload {:errors [{:message "Bad field \"bad1\"."
:locations [{:line 2, :column 0}]
:extensions {:status 400}}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999
:status 400}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "Query error with valid graphql error response, insert status only if not present"
(reset! mock-response {:status 400
:body {:errors [{:message "Bad field \"bad1\".",
:locations [{:line 2, :column 0}]}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999
:status 500}}]}
:error-code :http-error})
(let [expected-response-payload {:errors [{:message "Bad field \"bad1\"."
:locations [{:line 2, :column 0}]
:extensions {:status 400}}
{:message "Unknown argument \"limit\"."
:locations [{:line 2, :column 0}]
:extensions {:errcode 999
:status 500}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))
(testing "No query error, body unchanged"
(let [expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(reset! mock-response {:status 200
:body expected-response-payload
:error-code :no-error})
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))))))
(deftest http-query-error-test
(run-http-query-error-test nil))
(deftest named-http-query-error-test
(run-http-query-error-test :service-a))
#?(:clj
(deftest clj-http-query-error-test
(let [instance-id nil
dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [query "{ things { id } }"
variables {:some "variable"}
http-url "-ql"
http-server-response (fn [_url & [_opts respond _raise]]
(respond {:status 400, :body {:errors [{:message "OK"
:extensions {:status 404}}]}}))]
(init instance-id {:http {:url http-url}
:ws nil})
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(testing "http error returns correct response"
(with-redefs [hato/post http-server-response
clj-http/post http-server-response]
(let [expected-response-payload {:errors [{:message "OK",
:extensions {:status 404}}]}]
(dispatch [::re-graph/query {:query query
:variables variables
:callback [::on-thing]}])
(is (= expected-response-payload
(::thing @app-db)))))))))))
(defn- run-http-mutation-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [expected-http-url "-ql"]
(init instance-id {:http {:url expected-http-url}
:ws nil})
(let [mutation (str "signin($login:String!,$password:String!){"
"signin(login:$login,password:$password){id}}")
variables {:login "alice" :password "secret"}
expected-query-payload {:query (str "mutation " mutation)
:variables variables}
expected-response-payload {:data {:id 1}}]
(testing "Mutations can be made"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event url payload]}]
(is (= expected-query-payload payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(re-frame/reg-event-db
::on-mutate
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::mutation response)))
(dispatch [::re-graph/mutate {:query mutation
:variables variables
:callback [::on-mutate]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::mutation @app-db)))))
(testing "In flight mutations are deduplicated"
(let [id :abc-123]
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event]}]
(is (= id (:id event)))))
(dispatch [::re-graph/mutate {:id id
:query mutation
:variables variables
:callback [::on-mutate]}])
(re-frame/reg-fx
::internals/send-http
(fn [_]
(is false "Should not have sent an http request for a duplicate in-flight mutation id")))
(dispatch [::re-graph/mutate {:id id
:query mutation
:variables variables
:callback [::on-mutate]}]))))))))
(deftest http-mutation-test
(run-http-mutation-test nil))
(deftest named-http-mutation-test
(run-http-mutation-test :service-a))
(defn- run-http-parameters-test [instance-id]
(let [dispatch (partial dispatch-to-instance instance-id)]
(run-test-sync
(let [expected-http-url "-ql"
expected-request {:with-credentials? false}]
(init instance-id {:http {:url expected-http-url
:impl (constantly expected-request)}
:ws nil})
(testing "Request can be specified"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [request]}]
(is (= expected-request
request))))
(dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(dispatch [::re-graph/mutate {:query "don't care"
:variables {:some "variable"}
:callback [::on-thing]}]))))))
(deftest http-parameters-test
(run-http-parameters-test nil))
(deftest named-http-parameters-test
(run-http-parameters-test :service-a))
(deftest http-query-with-id-test
(let [dispatch (partial dispatch-to-instance nil)]
(run-test-sync
(let [expected-http-url "-ql"
call-count (atom 0)]
(init nil {:http {:url expected-http-url}
:ws nil})
(testing "Request is completed"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [request]}]
(is request)
(swap! call-count inc)))
(dispatch [::re-graph/query {:id "query-1"
:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(dispatch [::re-graph/mutate {:id "mutation-1"
:query "don't care"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= 2 @call-count)))))))
(defn- call-instance [instance-id f]
(fn [opts]
(f (if instance-id
(assoc opts :instance-id instance-id)
opts))))
(defn- run-non-re-frame-test [instance-id]
(let [db-instance #(get-in @app-db [:re-graph (or instance-id default-instance-id)])
on-ws-message (on-ws-message (or instance-id default-instance-id))
init (call-instance instance-id re-graph/init)
subscribe (call-instance instance-id re-graph/subscribe)
unsubscribe (call-instance instance-id re-graph/unsubscribe)
query (call-instance instance-id re-graph/query)
mutate (call-instance instance-id re-graph/mutate)]
(testing "can call normal functions instead of needing re-frame"
(testing "using a websocket"
(run-test-sync
(install-websocket-stub!)
(init {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-subscription-payload {:id "my-sub"
:type "start"
:payload {:query "subscription { things { id } }"
:variables {:some "variable"}}}
expected-unsubscription-payload {:id "my-sub"
:type "stop"}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}
callback-called? (atom false)
callback-fn (fn [payload]
(reset! callback-called? true)
(is (= expected-response-payload payload)))]
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-subscription-payload
payload))))
(subscribe {:id :my-sub
:query "{ things { id } }"
:variables {:some "variable"}
:callback callback-fn})
(is (get-in (db-instance) [:subscriptions "my-sub" :callback]))
(testing "messages from the WS are sent to the callback-fn"
(on-ws-message (data->message {:type "data"
:id "my-sub"
:payload expected-response-payload}))
(is @callback-called?))
(testing "and unregistered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(is (= expected-unsubscription-payload
payload))))
(unsubscribe {:id :my-sub})
(is (nil? (get-in (db-instance) [:subscriptions "my-sub"])))))))
(testing "using http"
(testing "queries"
(run-test-sync
(let [expected-http-url "-ql"
expected-query-payload {:query "query { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}
callback-called? (atom false)
callback-fn (fn [payload]
(reset! callback-called? true)
(is (= expected-response-payload payload)))]
(init {:http {:url expected-http-url}
:ws nil})
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(query {:query "{ things { id } }"
:variables {:some "variable"}
:callback callback-fn})
(testing "responses are sent to the callback"
(is @callback-called?)))))
(testing "mutations"
(run-test-sync
(let [expected-http-url "-ql"
expected-query-payload {:query "mutation { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}
callback-called? (atom false)
callback-fn (fn [payload]
(reset! callback-called? true)
(is (= expected-response-payload payload)))]
(init {:http {:url expected-http-url}
:ws nil})
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(mutate {:query "{ things { id } }"
:variables {:some "variable"}
:callback callback-fn})
(testing "responses are sent to the callback"
(is @callback-called?)))))))))
(deftest non-re-frame-test
(run-non-re-frame-test nil))
(deftest named-non-re-frame-test
(run-non-re-frame-test :service-a))
(deftest venia-compatibility-test
(run-test-sync
(let [expected-http-url "-ql"]
(re-graph/init {:http {:url expected-http-url}
:ws nil})
(let [expected-query-payload {:query "query { things { id } }"
:variables {:some "variable"}}
expected-response-payload {:data {:things [{:id 1} {:id 2}]}}]
(testing "Ignores 'query' at the start of the query"
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [url payload event]}]
(is (= expected-query-payload
payload))
(is (= expected-http-url url))
(dispatch-response event expected-response-payload)))
(re-frame/reg-event-db
::on-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::thing response)))
(re-frame/dispatch [::re-graph/query {:query "query { things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(testing "responses are sent to the callback"
(is (= expected-response-payload
(::thing @app-db)))))))))
(deftest multi-instance-test
(run-test-sync
(re-frame/reg-fx
::internals/connect-ws
(fn [[instance-id _options]]
((on-open instance-id (keyword (str (name instance-id) "-connection"))))))
(init :service-a {:ws {:url "ws"
:connection-init-payload nil}})
(init :service-b {:ws {:url "ws"
:connection-init-payload nil}})
(let [expected-subscription-payload-a {:id "a-sub"
:type "start"
:payload {:query "subscription { things { a } }"
:variables {:some "a"}}}
expected-unsubscription-payload-a {:id "a-sub"
:type "stop"}
expected-subscription-payload-b {:id "b-sub"
:type "start"
:payload {:query "subscription { things { b } }"
:variables {:some "b"}}}
expected-unsubscription-payload-b {:id "b-sub"
:type "stop"}]
(testing "Subscriptions can be registered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(condp = ws
:service-a-connection
(is (= expected-subscription-payload-a payload))
:service-b-connection
(is (= expected-subscription-payload-b payload)))))
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-a
:id :a-sub
:query "{ things { a } }"
:variables {:some "a"}
:callback [::on-a-thing]}])
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-b
:id :b-sub
:query "{ things { b } }"
:variables {:some "b"}
:callback [::on-b-thing]}])
(is (= [::on-a-thing]
(get-in @app-db [:re-graph :service-a :subscriptions "a-sub" :callback])))
(is (= [::on-b-thing]
(get-in @app-db [:re-graph :service-b :subscriptions "b-sub" :callback])))
(testing "and deduplicated"
(re-frame/reg-fx
::internals/send-ws
(fn [_]
(is false "Should not have sent a websocket message for an existing subscription")))
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-a
:id :a-sub
:query "{ things { a } }"
:variables {:some "a"}
:callback [::on-a-thing]}])
(re-frame/dispatch [::re-graph/subscribe {:instance-id :service-b
:id :b-sub
:query "{ things { b } }"
:variables {:some "b"}
:callback [::on-b-thing]}]))
(testing "messages from the WS are sent to the callback"
(let [expected-response-payload-a {:data {:things [{:a 1} {:a 2}]}}
expected-response-payload-b {:data {:things [{:b 1}]}}]
(re-frame/reg-event-db
::on-a-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::a-thing response)))
(re-frame/reg-event-db
::on-b-thing
[re-frame/unwrap]
(fn [db {:keys [response]}]
(assoc db ::b-thing response)))
((on-ws-message :service-a) (data->message {:type "data"
:id "a-sub"
:payload expected-response-payload-a}))
((on-ws-message :service-b) (data->message {:type "data"
:id "b-sub"
:payload expected-response-payload-b}))
(is (= expected-response-payload-a
(::a-thing @app-db)))
(is (= expected-response-payload-b
(::b-thing @app-db)))))
(testing "and unregistered"
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(condp = ws
:service-a-connection
(is (= expected-unsubscription-payload-a payload))
:service-b-connection
(is (= expected-unsubscription-payload-b payload)))))
(re-frame/dispatch [::re-graph/unsubscribe {:instance-id :service-a :id :a-sub}])
(re-frame/dispatch [::re-graph/unsubscribe {:instance-id :service-b :id :b-sub}])
(is (nil? (get-in @app-db [:re-graph :service-a :subscriptions "a-sub"])))
(is (nil? (get-in @app-db [:re-graph :service-b :subscriptions "b-sub"]))))))))
(deftest reinit-ws-test []
(run-test-sync
(install-websocket-stub!)
(testing "websocket connection payload is sent"
(let [last-ws-message (atom nil)]
(re-frame/reg-fx
::internals/send-ws
(fn [[ws payload]]
(is (= ::websocket-connection ws))
(reset! last-ws-message payload)))
(re-frame/dispatch [::re-graph/init {:ws {:url "ws"
:connection-init-payload {:auth-token 123}}}])
(is (= {:type "connection_init"
:payload {:auth-token 123}}
@last-ws-message))
(testing "updated when re-inited"
(re-frame/dispatch [::re-graph/re-init {:ws {:connection-init-payload {:auth-token 234}}}] )
(is (= {:type "connection_init"
:payload {:auth-token 234}}
@last-ws-message)))))))
(deftest re-init-http-test []
(run-test-sync
(testing "http headers are sent"
(let [last-http-message (atom nil)]
(re-frame/reg-fx
::internals/send-http
(fn [{:keys [event request]}]
(reset! last-http-message request)
(dispatch-response event {})))
(re-frame/dispatch [::re-graph/init {:http {:url "-ql"
:impl {:headers {"Authorization" 123}}}
:ws nil}])
(re-frame/dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= {:headers {"Authorization" 123}}
@last-http-message))
(testing "and can be updated"
(re-frame/dispatch [::re-graph/re-init {:http {:impl {:headers {"Authorization" 234}}}}])
(re-frame/dispatch [::re-graph/query {:query "{ things { id } }"
:variables {:some "variable"}
:callback [::on-thing]}])
(is (= {:headers {"Authorization" 234}}
@last-http-message)))))))
|
dd2e0c1153c56234ee689eef127b89a05985ffd13e5f71b81cc26691d558125c | Feldspar/feldspar-language | CRC.hs | {-# LANGUAGE GADTs #-}
--
Copyright ( c ) 2009 - 2011 , ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
* Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Feldspar.Algorithm.CRC where
import qualified Prelude
import Feldspar
import Feldspar.Vector
tstBit :: Bits a => Data a -> Data Index -> Data Bool
tstBit w b = w .&. (1 .<<. b) /= 0
makeCrcTable :: (Bits a) => Data a -> Pull1 a
makeCrcTable polynomial = indexed1 256 $ \i -> forLoop 8 (i2n i .<<. (sz - 8)) step
where
sz = bitSize polynomial
step _ r = let r' = r .<<. 1
in tstBit r (sz - 1) ? (r' `xor` polynomial) $ r'
| Calculate the normal form CRC using a table
crcNormal :: (Bits a)
=> Pull1 a -> Data a -> Pull1 Word8 -> Data a
crcNormal table initial xs = fromZero $ fold step initial xs
where
sz = bitSize initial
step crc a = (table ! (Z :. i2n ((i2n (crc .>>. (sz - 8)) .&. 0xFF) `xor` a))) `xor` (crc .<<. 8)
| Calculate the reflected form CRC using a table
-- needs reflected tables
crcReflected :: (Bits a)
=> Pull1 a -> Data a -> Pull1 Word8 -> Data a
crcReflected table initial xs = fromZero $ fold step initial xs
where
step crc a = (table ! (Z :. i2n ((crc `xor` i2n a) .&. 0xFF))) `xor` (crc .>>. 8)
| Calculate normal form CRC from a polynominal
crcNaive :: (Bits a) => Data a -> Data a -> Pull1 Word8 -> Data a
crcNaive = crcNormal . makeCrcTable
-- | Reflect the bottom b bits of value t
reflect :: (Bits a) => Data a -> Data Length -> Data a
reflect t b = forLoop b t $ \i v -> let mask = bit ((b-1)-i) in testBit t i ? (v .|. mask) $ v .&. complement mask
-- References
-- The functions in this module are inspired by the follow guide
--
| null | https://raw.githubusercontent.com/Feldspar/feldspar-language/499e4e42d462f436a5267ddf0c2f73d5741a8248/src/Feldspar/Algorithm/CRC.hs | haskell | # LANGUAGE GADTs #
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
needs reflected tables
| Reflect the bottom b bits of value t
References
The functions in this module are inspired by the follow guide
|
Copyright ( c ) 2009 - 2011 , ERICSSON AB
* Neither the name of the ERICSSON AB nor the names of its contributors
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
module Feldspar.Algorithm.CRC where
import qualified Prelude
import Feldspar
import Feldspar.Vector
tstBit :: Bits a => Data a -> Data Index -> Data Bool
tstBit w b = w .&. (1 .<<. b) /= 0
makeCrcTable :: (Bits a) => Data a -> Pull1 a
makeCrcTable polynomial = indexed1 256 $ \i -> forLoop 8 (i2n i .<<. (sz - 8)) step
where
sz = bitSize polynomial
step _ r = let r' = r .<<. 1
in tstBit r (sz - 1) ? (r' `xor` polynomial) $ r'
| Calculate the normal form CRC using a table
crcNormal :: (Bits a)
=> Pull1 a -> Data a -> Pull1 Word8 -> Data a
crcNormal table initial xs = fromZero $ fold step initial xs
where
sz = bitSize initial
step crc a = (table ! (Z :. i2n ((i2n (crc .>>. (sz - 8)) .&. 0xFF) `xor` a))) `xor` (crc .<<. 8)
| Calculate the reflected form CRC using a table
crcReflected :: (Bits a)
=> Pull1 a -> Data a -> Pull1 Word8 -> Data a
crcReflected table initial xs = fromZero $ fold step initial xs
where
step crc a = (table ! (Z :. i2n ((crc `xor` i2n a) .&. 0xFF))) `xor` (crc .>>. 8)
| Calculate normal form CRC from a polynominal
crcNaive :: (Bits a) => Data a -> Data a -> Pull1 Word8 -> Data a
crcNaive = crcNormal . makeCrcTable
reflect :: (Bits a) => Data a -> Data Length -> Data a
reflect t b = forLoop b t $ \i v -> let mask = bit ((b-1)-i) in testBit t i ? (v .|. mask) $ v .&. complement mask
|
54cd00bf963e18d141908e49704e591a164b8343505c9c3a0ed427699554c819 | dharmatech/surfage | test.scm | ;;; array test
2001
(define past
(let ((stones '()))
(lambda stone
(if (null? stone)
(reverse stones)
(set! stones (cons (apply (lambda (stone) stone) stone) stones))))))
(define (tail n)
(if (< n (length (past)))
(list-tail (past) (- (length (past)) n))
(past)))
;;; Simple tests
(or (and (shape)
(shape -1 -1)
(shape -1 0)
(shape -1 1)
(shape 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8))
(error "(shape ...) failed"))
(past "shape")
(or (and (make-array (shape))
(make-array (shape) *)
(make-array (shape -1 -1))
(make-array (shape -1 -1) *)
(make-array (shape -1 1))
(make-array (shape 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8 1 2 3 4) *))
(error "(make-array (shape ...) [o]) failed"))
(past "make-array")
(or (and (array (shape) *)
(array (shape -1 -1))
(array (shape -1 1) * *)
(array (shape 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8) *))
(error "(array (shape ...) ...) failed"))
(past "array")
(or (and (= (array-rank (shape)) 2)
(= (array-rank (shape -1 -1)) 2)
(= (array-rank (shape -1 1)) 2)
(= (array-rank (shape 1 2 3 4 5 6 7 8)) 2))
(error "(array-rank (shape ...)) failed"))
(past "array-rank of shape")
(or (and (= (array-rank (make-array (shape))) 0)
(= (array-rank (make-array (shape -1 -1))) 1)
(= (array-rank (make-array (shape -1 1))) 1)
(= (array-rank (make-array (shape 1 2 3 4 5 6 7 8))) 4))
(error "(array-rank (make-array ...)) failed"))
(past "array-rank of make-array")
(or (and (= (array-rank (array (shape) *)) 0)
(= (array-rank (array (shape -1 -1))) 1)
(= (array-rank (array (shape -1 1) * *)) 1)
(= (array-rank (array (shape 1 2 3 4 5 6 7 8) *)) 4))
(error "(array-rank (array ...)) failed"))
(past "array-rank of array")
(or (and (= (array-start (shape -1 -1) 0) 0)
(= (array-start (shape -1 -1) 1) 0)
(= (array-start (shape -1 1) 0) 0)
(= (array-start (shape -1 1) 1) 0)
(= (array-start (shape 1 2 3 4 5 6 7 8) 0) 0)
(= (array-start (shape 1 2 3 4 5 6 7 8) 1) 0))
(error "(array-start (shape ...)) failed"))
(past "array-start of shape")
(or (and (= (array-end (shape -1 -1) 0) 1)
(= (array-end (shape -1 -1) 1) 2)
(= (array-end (shape -1 1) 0) 1)
(= (array-end (shape -1 1) 1) 2)
(= (array-end (shape 1 2 3 4 5 6 7 8) 0) 4)
(= (array-end (shape 1 2 3 4 5 6 7 8) 1) 2))
(error "(array-end (shape ...)) failed"))
(past "array-end of shape")
(or (and (= (array-start (make-array (shape -1 -1)) 0) -1)
(= (array-start (make-array (shape -1 1)) 0) -1)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 0) 1)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 1) 3)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 2) 5)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 3) 7))
(error "(array-start (make-array ...)) failed"))
(past "array-start of make-array")
(or (and (= (array-end (make-array (shape -1 -1)) 0) -1)
(= (array-end (make-array (shape -1 1)) 0) 1)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 0) 2)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 1) 4)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 2) 6)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 3) 8))
(error "(array-end (make-array ...)) failed"))
(past "array-end of make-array")
(or (and (= (array-start (array (shape -1 -1)) 0) -1)
(= (array-start (array (shape -1 1) * *) 0) -1)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 0) 1)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 1) 3)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 2) 5)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 3) 7))
(error "(array-start (array ...)) failed"))
(past "array-start of array")
(or (and (= (array-end (array (shape -1 -1)) 0) -1)
(= (array-end (array (shape -1 1) * *) 0) 1)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 0) 2)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 1) 4)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 2) 6)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 3) 8))
(error "(array-end (array ...)) failed"))
(past "array-end of array")
(or (and (eq? (array-ref (make-array (shape) 'a)) 'a)
(eq? (array-ref (make-array (shape -1 1) 'b) -1) 'b)
(eq? (array-ref (make-array (shape -1 1) 'c) 0) 'c)
(eq? (array-ref (make-array (shape 1 2 3 4 5 6 7 8) 'd) 1 3 5 7) 'd))
(error "array-ref of make-array with arguments failed"))
(past "array-ref of make-array with arguments")
(or (and (eq? (array-ref (make-array (shape) 'a) '#()) 'a)
(eq? (array-ref (make-array (shape -1 1) 'b) '#(-1)) 'b)
(eq? (array-ref (make-array (shape -1 1) 'c) '#(0)) 'c)
(eq? (array-ref (make-array (shape 1 2 3 4 5 6 7 8) 'd)
'#(1 3 5 7))
'd))
(error "array-ref of make-array with vector failed"))
(past "array-ref of make-array with vector")
(or (and (eq? (array-ref (make-array (shape) 'a)
(array (shape 0 0)))
'a)
(eq? (array-ref (make-array (shape -1 1) 'b)
(array (shape 0 1) -1))
'b)
(eq? (array-ref (make-array (shape -1 1) 'c)
(array (shape 0 1) 0))
'c)
(eq? (array-ref (make-array (shape 1 2 3 4 5 6 7 8) 'd)
(array (shape 0 4) 1 3 5 7))
'd))
(error "(array-ref of make-array with array failed"))
(past "array-ref of make-array with array")
(or (and (let ((arr (make-array (shape) 'o)))
(array-set! arr 'a)
(eq? (array-ref arr) 'a))
(let ((arr (make-array (shape -1 1) 'o)))
(array-set! arr -1 'b)
(array-set! arr 0 'c)
(and (eq? (array-ref arr -1) 'b)
(eq? (array-ref arr 0) 'c)))
(let ((arr (make-array (shape 1 2 3 4 5 6 7 8) 'o)))
(array-set! arr 1 3 5 7 'd)
(eq? (array-ref arr 1 3 5 7) 'd)))
(error "array-set! with arguments failed"))
(past "array-set! of make-array with arguments")
(or (and (let ((arr (make-array (shape) 'o)))
(array-set! arr '#() 'a)
(eq? (array-ref arr) 'a))
(let ((arr (make-array (shape -1 1) 'o)))
(array-set! arr '#(-1) 'b)
(array-set! arr '#(0) 'c)
(and (eq? (array-ref arr -1) 'b)
(eq? (array-ref arr 0) 'c)))
(let ((arr (make-array (shape 1 2 3 4 5 6 7 8) 'o)))
(array-set! arr '#(1 3 5 7) 'd)
(eq? (array-ref arr 1 3 5 7) 'd)))
(error "array-set! with vector failed"))
(past "array-set! of make-array with vector")
(or (and (let ((arr (make-array (shape) 'o)))
(array-set! arr 'a)
(eq? (array-ref arr) 'a))
(let ((arr (make-array (shape -1 1) 'o)))
(array-set! arr (array (shape 0 1) -1) 'b)
(array-set! arr (array (shape 0 1) 0) 'c)
(and (eq? (array-ref arr -1) 'b)
(eq? (array-ref arr 0) 'c)))
(let ((arr (make-array (shape 1 2 3 4 5 6 7 8) 'o)))
(array-set! arr (array (shape 0 4) 1 3 5 7) 'd)
(eq? (array-ref arr 1 3 5 7) 'd)))
(error "array-set! with arguments failed"))
(past "array-set! of make-array with array")
;;; Share and change:
;;;
org brk swp box
;;;
0 1 1 2 5 6
6 a b 2 a b 3 d c 0 2 4 6 8 : e
7 c d 3 e f 4 f e
8 e f
(or (let* ((org (array (shape 6 9 0 2) 'a 'b 'c 'd 'e 'f))
(brk (share-array
org
(shape 2 4 1 3)
(lambda (r k)
(values
(+ 6 (* 2 (- r 2)))
(- k 1)))))
(swp (share-array
org
(shape 3 5 5 7)
(lambda (r k)
(values
(+ 7 (- r 3))
(- 1 (- k 5))))))
(box (share-array
swp
(shape 0 1 2 3 4 5 6 7 8 9)
(lambda _ (values 4 6))))
(org-contents (lambda ()
(list (array-ref org 6 0) (array-ref org 6 1)
(array-ref org 7 0) (array-ref org 7 1)
(array-ref org 8 0) (array-ref org 8 1))))
(brk-contents (lambda ()
(list (array-ref brk 2 1) (array-ref brk 2 2)
(array-ref brk 3 1) (array-ref brk 3 2))))
(swp-contents (lambda ()
(list (array-ref swp 3 5) (array-ref swp 3 6)
(array-ref swp 4 5) (array-ref swp 4 6))))
(box-contents (lambda ()
(list (array-ref box 0 2 4 6 8)))))
(and (equal? (org-contents) '(a b c d e f))
(equal? (brk-contents) '(a b e f))
(equal? (swp-contents) '(d c f e))
(equal? (box-contents) '(e))
(begin (array-set! org 6 0 'x) #t)
(equal? (org-contents) '(x b c d e f))
(equal? (brk-contents) '(x b e f))
(equal? (swp-contents) '(d c f e))
(equal? (box-contents) '(e))
(begin (array-set! brk 3 1 'y) #t)
(equal? (org-contents) '(x b c d y f))
(equal? (brk-contents) '(x b y f))
(equal? (swp-contents) '(d c f y))
(equal? (box-contents) '(y))
(begin (array-set! swp 4 5 'z) #t)
(equal? (org-contents) '(x b c d y z))
(equal? (brk-contents) '(x b y z))
(equal? (swp-contents) '(d c z y))
(equal? (box-contents) '(y))
(begin (array-set! box 0 2 4 6 8 'e) #t)
(equal? (org-contents) '(x b c d e z))
(equal? (brk-contents) '(x b e z))
(equal? (swp-contents) '(d c z e))
(equal? (box-contents) '(e))))
(error "shared change failed"))
(past "shared change")
;;; Check that arrays copy the shape specification
(or (let ((shp (shape 10 12)))
(let ((arr (make-array shp))
(ars (array shp * *))
(art (share-array (make-array shp) shp (lambda (k) k))))
(array-set! shp 0 0 '?)
(array-set! shp 0 1 '!)
(and (= (array-rank shp) 2)
(= (array-start shp 0) 0)
(= (array-end shp 0) 1)
(= (array-start shp 1) 0)
(= (array-end shp 1) 2)
(eq? (array-ref shp 0 0) '?)
(eq? (array-ref shp 0 1) '!)
(= (array-rank arr) 1)
(= (array-start arr 0) 10)
(= (array-end arr 0) 12)
(= (array-rank ars) 1)
(= (array-start ars 0) 10)
(= (array-end ars 0) 12)
(= (array-rank art) 1)
(= (array-start art 0) 10)
(= (array-end art 0) 12))))
(error "array-set! of shape failed"))
(past "array-set! of shape")
;;; Check that index arrays work even when they share
;;;
arr
5 6 0 1
4 nw ne 0 4 6
5 sw se 1 5 4
(or (let ((arr (array (shape 4 6 5 7) 'nw 'ne 'sw 'se))
(ixn (array (shape 0 2 0 2) 4 6 5 4)))
(let ((col0 (share-array
ixn
(shape 0 2)
(lambda (k)
(values k 0))))
(row0 (share-array
ixn
(shape 0 2)
(lambda (k)
(values 0 k))))
(wor1 (share-array
ixn
(shape 0 2)
(lambda (k)
(values 1 (- 1 k)))))
(cod (share-array
ixn
(shape 0 2)
(lambda (k)
(case k
((0) (values 1 0))
((1) (values 0 1))))))
(box (share-array
ixn
(shape 0 2)
(lambda (k)
(values 1 0)))))
(and (eq? (array-ref arr col0) 'nw)
(eq? (array-ref arr row0) 'ne)
(eq? (array-ref arr wor1) 'nw)
(eq? (array-ref arr cod) 'se)
(eq? (array-ref arr box) 'sw)
(begin
(array-set! arr col0 'ul)
(array-set! arr row0 'ur)
(array-set! arr cod 'lr)
(array-set! arr box 'll)
#t)
(eq? (array-ref arr 4 5) 'ul)
(eq? (array-ref arr 4 6) 'ur)
(eq? (array-ref arr 5 5) 'll)
(eq? (array-ref arr 5 6) 'lr)
(begin
(array-set! arr wor1 'xx)
(eq? (array-ref arr 4 5) 'xx)))))
(error "array access with sharing index array failed"))
(past "array access with sharing index array")
;;; Check that shape arrays work even when they share
;;;
arr shp shq shr shs
1 2 3 4 0 1 0 1 0 1 0 1
1 10 12 16 20 0 10 12 0 12 20 0 10 10 0 12 12
;;; 2 10 11 12 13 1 10 11 1 11 13 1 11 12 1 12 12
;;; 2 12 16
;;; 3 13 20
(or (let ((arr (array (shape 1 3 1 5) 10 12 16 20 10 11 12 13)))
(let ((shp (share-array
arr
(shape 0 2 0 2)
(lambda (r k)
(values (+ r 1) (+ k 1)))))
(shq (share-array
arr
(shape 0 2 0 2)
(lambda (r k)
(values (+ r 1) (* 2 (+ 1 k))))))
(shr (share-array
arr
(shape 0 4 0 2)
(lambda (r k)
(values (- 2 k) (+ r 1)))))
(shs (share-array
arr
(shape 0 2 0 2)
(lambda (r k)
(values 2 3)))))
(and (let ((arr-p (make-array shp)))
(and (= (array-rank arr-p) 2)
(= (array-start arr-p 0) 10)
(= (array-end arr-p 0) 12)
(= (array-start arr-p 1) 10)
(= (array-end arr-p 1) 11)))
(let ((arr-q (array shq * * * * * * * * * * * * * * * *)))
(and (= (array-rank arr-q) 2)
(= (array-start arr-q 0) 12)
(= (array-end arr-q 0) 20)
(= (array-start arr-q 1) 11)
(= (array-end arr-q 1) 13)))
(let ((arr-r (share-array
(array (shape) *)
shr
(lambda _ (values)))))
(and (= (array-rank arr-r) 4)
(= (array-start arr-r 0) 10)
(= (array-end arr-r 0) 10)
(= (array-start arr-r 1) 11)
(= (array-end arr-r 1) 12)
(= (array-start arr-r 2) 12)
(= (array-end arr-r 2) 16)
(= (array-start arr-r 3) 13)
(= (array-end arr-r 3) 20)))
(let ((arr-s (make-array shs)))
(and (= (array-rank arr-s) 2)
(= (array-start arr-s 0) 12)
(= (array-end arr-s 0) 12)
(= (array-start arr-s 1) 12)
(= (array-end arr-s 1) 12))))))
(error "sharing shape array failed"))
(past "sharing shape array")
(let ((super (array (shape 4 7 4 7)
1 * *
* 2 *
* * 3))
(subshape (share-array
(array (shape 0 2 0 3)
* 4 *
* 7 *)
(shape 0 1 0 2)
(lambda (r k)
(values k 1)))))
(let ((sub (share-array super subshape (lambda (k) (values k k)))))
( array - equal ? ( shape 4 7 ) )
(or (and (= (array-rank subshape) 2)
(= (array-start subshape 0) 0)
(= (array-end subshape 0) 1)
(= (array-start subshape 1) 0)
(= (array-end subshape 1) 2)
(= (array-ref subshape 0 0) 4)
(= (array-ref subshape 0 1) 7))
(error "sharing subshape failed"))
( array - equal ? sub ( array ( shape 4 7 ) 1 2 3 ) )
(or (and (= (array-rank sub) 1)
(= (array-start sub 0) 4)
(= (array-end sub 0) 7)
(= (array-ref sub 4) 1)
(= (array-ref sub 5) 2)
(= (array-ref sub 6) 3))
(error "sharing with sharing subshape failed"))))
(past "sharing with sharing subshape")
| null | https://raw.githubusercontent.com/dharmatech/surfage/895f16af83d9ce3c190f69626c9baba8c44d76bc/s25/test.scm | scheme | array test
Simple tests
Share and change:
Check that arrays copy the shape specification
Check that index arrays work even when they share
Check that shape arrays work even when they share
2 10 11 12 13 1 10 11 1 11 13 1 11 12 1 12 12
2 12 16
3 13 20 | 2001
(define past
(let ((stones '()))
(lambda stone
(if (null? stone)
(reverse stones)
(set! stones (cons (apply (lambda (stone) stone) stone) stones))))))
(define (tail n)
(if (< n (length (past)))
(list-tail (past) (- (length (past)) n))
(past)))
(or (and (shape)
(shape -1 -1)
(shape -1 0)
(shape -1 1)
(shape 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8))
(error "(shape ...) failed"))
(past "shape")
(or (and (make-array (shape))
(make-array (shape) *)
(make-array (shape -1 -1))
(make-array (shape -1 -1) *)
(make-array (shape -1 1))
(make-array (shape 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8 1 2 3 4) *))
(error "(make-array (shape ...) [o]) failed"))
(past "make-array")
(or (and (array (shape) *)
(array (shape -1 -1))
(array (shape -1 1) * *)
(array (shape 1 2 3 4 5 6 7 8 1 2 3 4 5 6 7 8) *))
(error "(array (shape ...) ...) failed"))
(past "array")
(or (and (= (array-rank (shape)) 2)
(= (array-rank (shape -1 -1)) 2)
(= (array-rank (shape -1 1)) 2)
(= (array-rank (shape 1 2 3 4 5 6 7 8)) 2))
(error "(array-rank (shape ...)) failed"))
(past "array-rank of shape")
(or (and (= (array-rank (make-array (shape))) 0)
(= (array-rank (make-array (shape -1 -1))) 1)
(= (array-rank (make-array (shape -1 1))) 1)
(= (array-rank (make-array (shape 1 2 3 4 5 6 7 8))) 4))
(error "(array-rank (make-array ...)) failed"))
(past "array-rank of make-array")
(or (and (= (array-rank (array (shape) *)) 0)
(= (array-rank (array (shape -1 -1))) 1)
(= (array-rank (array (shape -1 1) * *)) 1)
(= (array-rank (array (shape 1 2 3 4 5 6 7 8) *)) 4))
(error "(array-rank (array ...)) failed"))
(past "array-rank of array")
(or (and (= (array-start (shape -1 -1) 0) 0)
(= (array-start (shape -1 -1) 1) 0)
(= (array-start (shape -1 1) 0) 0)
(= (array-start (shape -1 1) 1) 0)
(= (array-start (shape 1 2 3 4 5 6 7 8) 0) 0)
(= (array-start (shape 1 2 3 4 5 6 7 8) 1) 0))
(error "(array-start (shape ...)) failed"))
(past "array-start of shape")
(or (and (= (array-end (shape -1 -1) 0) 1)
(= (array-end (shape -1 -1) 1) 2)
(= (array-end (shape -1 1) 0) 1)
(= (array-end (shape -1 1) 1) 2)
(= (array-end (shape 1 2 3 4 5 6 7 8) 0) 4)
(= (array-end (shape 1 2 3 4 5 6 7 8) 1) 2))
(error "(array-end (shape ...)) failed"))
(past "array-end of shape")
(or (and (= (array-start (make-array (shape -1 -1)) 0) -1)
(= (array-start (make-array (shape -1 1)) 0) -1)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 0) 1)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 1) 3)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 2) 5)
(= (array-start (make-array (shape 1 2 3 4 5 6 7 8)) 3) 7))
(error "(array-start (make-array ...)) failed"))
(past "array-start of make-array")
(or (and (= (array-end (make-array (shape -1 -1)) 0) -1)
(= (array-end (make-array (shape -1 1)) 0) 1)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 0) 2)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 1) 4)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 2) 6)
(= (array-end (make-array (shape 1 2 3 4 5 6 7 8)) 3) 8))
(error "(array-end (make-array ...)) failed"))
(past "array-end of make-array")
(or (and (= (array-start (array (shape -1 -1)) 0) -1)
(= (array-start (array (shape -1 1) * *) 0) -1)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 0) 1)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 1) 3)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 2) 5)
(= (array-start (array (shape 1 2 3 4 5 6 7 8) *) 3) 7))
(error "(array-start (array ...)) failed"))
(past "array-start of array")
(or (and (= (array-end (array (shape -1 -1)) 0) -1)
(= (array-end (array (shape -1 1) * *) 0) 1)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 0) 2)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 1) 4)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 2) 6)
(= (array-end (array (shape 1 2 3 4 5 6 7 8) *) 3) 8))
(error "(array-end (array ...)) failed"))
(past "array-end of array")
(or (and (eq? (array-ref (make-array (shape) 'a)) 'a)
(eq? (array-ref (make-array (shape -1 1) 'b) -1) 'b)
(eq? (array-ref (make-array (shape -1 1) 'c) 0) 'c)
(eq? (array-ref (make-array (shape 1 2 3 4 5 6 7 8) 'd) 1 3 5 7) 'd))
(error "array-ref of make-array with arguments failed"))
(past "array-ref of make-array with arguments")
(or (and (eq? (array-ref (make-array (shape) 'a) '#()) 'a)
(eq? (array-ref (make-array (shape -1 1) 'b) '#(-1)) 'b)
(eq? (array-ref (make-array (shape -1 1) 'c) '#(0)) 'c)
(eq? (array-ref (make-array (shape 1 2 3 4 5 6 7 8) 'd)
'#(1 3 5 7))
'd))
(error "array-ref of make-array with vector failed"))
(past "array-ref of make-array with vector")
(or (and (eq? (array-ref (make-array (shape) 'a)
(array (shape 0 0)))
'a)
(eq? (array-ref (make-array (shape -1 1) 'b)
(array (shape 0 1) -1))
'b)
(eq? (array-ref (make-array (shape -1 1) 'c)
(array (shape 0 1) 0))
'c)
(eq? (array-ref (make-array (shape 1 2 3 4 5 6 7 8) 'd)
(array (shape 0 4) 1 3 5 7))
'd))
(error "(array-ref of make-array with array failed"))
(past "array-ref of make-array with array")
(or (and (let ((arr (make-array (shape) 'o)))
(array-set! arr 'a)
(eq? (array-ref arr) 'a))
(let ((arr (make-array (shape -1 1) 'o)))
(array-set! arr -1 'b)
(array-set! arr 0 'c)
(and (eq? (array-ref arr -1) 'b)
(eq? (array-ref arr 0) 'c)))
(let ((arr (make-array (shape 1 2 3 4 5 6 7 8) 'o)))
(array-set! arr 1 3 5 7 'd)
(eq? (array-ref arr 1 3 5 7) 'd)))
(error "array-set! with arguments failed"))
(past "array-set! of make-array with arguments")
(or (and (let ((arr (make-array (shape) 'o)))
(array-set! arr '#() 'a)
(eq? (array-ref arr) 'a))
(let ((arr (make-array (shape -1 1) 'o)))
(array-set! arr '#(-1) 'b)
(array-set! arr '#(0) 'c)
(and (eq? (array-ref arr -1) 'b)
(eq? (array-ref arr 0) 'c)))
(let ((arr (make-array (shape 1 2 3 4 5 6 7 8) 'o)))
(array-set! arr '#(1 3 5 7) 'd)
(eq? (array-ref arr 1 3 5 7) 'd)))
(error "array-set! with vector failed"))
(past "array-set! of make-array with vector")
(or (and (let ((arr (make-array (shape) 'o)))
(array-set! arr 'a)
(eq? (array-ref arr) 'a))
(let ((arr (make-array (shape -1 1) 'o)))
(array-set! arr (array (shape 0 1) -1) 'b)
(array-set! arr (array (shape 0 1) 0) 'c)
(and (eq? (array-ref arr -1) 'b)
(eq? (array-ref arr 0) 'c)))
(let ((arr (make-array (shape 1 2 3 4 5 6 7 8) 'o)))
(array-set! arr (array (shape 0 4) 1 3 5 7) 'd)
(eq? (array-ref arr 1 3 5 7) 'd)))
(error "array-set! with arguments failed"))
(past "array-set! of make-array with array")
org brk swp box
0 1 1 2 5 6
6 a b 2 a b 3 d c 0 2 4 6 8 : e
7 c d 3 e f 4 f e
8 e f
(or (let* ((org (array (shape 6 9 0 2) 'a 'b 'c 'd 'e 'f))
(brk (share-array
org
(shape 2 4 1 3)
(lambda (r k)
(values
(+ 6 (* 2 (- r 2)))
(- k 1)))))
(swp (share-array
org
(shape 3 5 5 7)
(lambda (r k)
(values
(+ 7 (- r 3))
(- 1 (- k 5))))))
(box (share-array
swp
(shape 0 1 2 3 4 5 6 7 8 9)
(lambda _ (values 4 6))))
(org-contents (lambda ()
(list (array-ref org 6 0) (array-ref org 6 1)
(array-ref org 7 0) (array-ref org 7 1)
(array-ref org 8 0) (array-ref org 8 1))))
(brk-contents (lambda ()
(list (array-ref brk 2 1) (array-ref brk 2 2)
(array-ref brk 3 1) (array-ref brk 3 2))))
(swp-contents (lambda ()
(list (array-ref swp 3 5) (array-ref swp 3 6)
(array-ref swp 4 5) (array-ref swp 4 6))))
(box-contents (lambda ()
(list (array-ref box 0 2 4 6 8)))))
(and (equal? (org-contents) '(a b c d e f))
(equal? (brk-contents) '(a b e f))
(equal? (swp-contents) '(d c f e))
(equal? (box-contents) '(e))
(begin (array-set! org 6 0 'x) #t)
(equal? (org-contents) '(x b c d e f))
(equal? (brk-contents) '(x b e f))
(equal? (swp-contents) '(d c f e))
(equal? (box-contents) '(e))
(begin (array-set! brk 3 1 'y) #t)
(equal? (org-contents) '(x b c d y f))
(equal? (brk-contents) '(x b y f))
(equal? (swp-contents) '(d c f y))
(equal? (box-contents) '(y))
(begin (array-set! swp 4 5 'z) #t)
(equal? (org-contents) '(x b c d y z))
(equal? (brk-contents) '(x b y z))
(equal? (swp-contents) '(d c z y))
(equal? (box-contents) '(y))
(begin (array-set! box 0 2 4 6 8 'e) #t)
(equal? (org-contents) '(x b c d e z))
(equal? (brk-contents) '(x b e z))
(equal? (swp-contents) '(d c z e))
(equal? (box-contents) '(e))))
(error "shared change failed"))
(past "shared change")
(or (let ((shp (shape 10 12)))
(let ((arr (make-array shp))
(ars (array shp * *))
(art (share-array (make-array shp) shp (lambda (k) k))))
(array-set! shp 0 0 '?)
(array-set! shp 0 1 '!)
(and (= (array-rank shp) 2)
(= (array-start shp 0) 0)
(= (array-end shp 0) 1)
(= (array-start shp 1) 0)
(= (array-end shp 1) 2)
(eq? (array-ref shp 0 0) '?)
(eq? (array-ref shp 0 1) '!)
(= (array-rank arr) 1)
(= (array-start arr 0) 10)
(= (array-end arr 0) 12)
(= (array-rank ars) 1)
(= (array-start ars 0) 10)
(= (array-end ars 0) 12)
(= (array-rank art) 1)
(= (array-start art 0) 10)
(= (array-end art 0) 12))))
(error "array-set! of shape failed"))
(past "array-set! of shape")
arr
5 6 0 1
4 nw ne 0 4 6
5 sw se 1 5 4
(or (let ((arr (array (shape 4 6 5 7) 'nw 'ne 'sw 'se))
(ixn (array (shape 0 2 0 2) 4 6 5 4)))
(let ((col0 (share-array
ixn
(shape 0 2)
(lambda (k)
(values k 0))))
(row0 (share-array
ixn
(shape 0 2)
(lambda (k)
(values 0 k))))
(wor1 (share-array
ixn
(shape 0 2)
(lambda (k)
(values 1 (- 1 k)))))
(cod (share-array
ixn
(shape 0 2)
(lambda (k)
(case k
((0) (values 1 0))
((1) (values 0 1))))))
(box (share-array
ixn
(shape 0 2)
(lambda (k)
(values 1 0)))))
(and (eq? (array-ref arr col0) 'nw)
(eq? (array-ref arr row0) 'ne)
(eq? (array-ref arr wor1) 'nw)
(eq? (array-ref arr cod) 'se)
(eq? (array-ref arr box) 'sw)
(begin
(array-set! arr col0 'ul)
(array-set! arr row0 'ur)
(array-set! arr cod 'lr)
(array-set! arr box 'll)
#t)
(eq? (array-ref arr 4 5) 'ul)
(eq? (array-ref arr 4 6) 'ur)
(eq? (array-ref arr 5 5) 'll)
(eq? (array-ref arr 5 6) 'lr)
(begin
(array-set! arr wor1 'xx)
(eq? (array-ref arr 4 5) 'xx)))))
(error "array access with sharing index array failed"))
(past "array access with sharing index array")
arr shp shq shr shs
1 2 3 4 0 1 0 1 0 1 0 1
1 10 12 16 20 0 10 12 0 12 20 0 10 10 0 12 12
(or (let ((arr (array (shape 1 3 1 5) 10 12 16 20 10 11 12 13)))
(let ((shp (share-array
arr
(shape 0 2 0 2)
(lambda (r k)
(values (+ r 1) (+ k 1)))))
(shq (share-array
arr
(shape 0 2 0 2)
(lambda (r k)
(values (+ r 1) (* 2 (+ 1 k))))))
(shr (share-array
arr
(shape 0 4 0 2)
(lambda (r k)
(values (- 2 k) (+ r 1)))))
(shs (share-array
arr
(shape 0 2 0 2)
(lambda (r k)
(values 2 3)))))
(and (let ((arr-p (make-array shp)))
(and (= (array-rank arr-p) 2)
(= (array-start arr-p 0) 10)
(= (array-end arr-p 0) 12)
(= (array-start arr-p 1) 10)
(= (array-end arr-p 1) 11)))
(let ((arr-q (array shq * * * * * * * * * * * * * * * *)))
(and (= (array-rank arr-q) 2)
(= (array-start arr-q 0) 12)
(= (array-end arr-q 0) 20)
(= (array-start arr-q 1) 11)
(= (array-end arr-q 1) 13)))
(let ((arr-r (share-array
(array (shape) *)
shr
(lambda _ (values)))))
(and (= (array-rank arr-r) 4)
(= (array-start arr-r 0) 10)
(= (array-end arr-r 0) 10)
(= (array-start arr-r 1) 11)
(= (array-end arr-r 1) 12)
(= (array-start arr-r 2) 12)
(= (array-end arr-r 2) 16)
(= (array-start arr-r 3) 13)
(= (array-end arr-r 3) 20)))
(let ((arr-s (make-array shs)))
(and (= (array-rank arr-s) 2)
(= (array-start arr-s 0) 12)
(= (array-end arr-s 0) 12)
(= (array-start arr-s 1) 12)
(= (array-end arr-s 1) 12))))))
(error "sharing shape array failed"))
(past "sharing shape array")
(let ((super (array (shape 4 7 4 7)
1 * *
* 2 *
* * 3))
(subshape (share-array
(array (shape 0 2 0 3)
* 4 *
* 7 *)
(shape 0 1 0 2)
(lambda (r k)
(values k 1)))))
(let ((sub (share-array super subshape (lambda (k) (values k k)))))
( array - equal ? ( shape 4 7 ) )
(or (and (= (array-rank subshape) 2)
(= (array-start subshape 0) 0)
(= (array-end subshape 0) 1)
(= (array-start subshape 1) 0)
(= (array-end subshape 1) 2)
(= (array-ref subshape 0 0) 4)
(= (array-ref subshape 0 1) 7))
(error "sharing subshape failed"))
( array - equal ? sub ( array ( shape 4 7 ) 1 2 3 ) )
(or (and (= (array-rank sub) 1)
(= (array-start sub 0) 4)
(= (array-end sub 0) 7)
(= (array-ref sub 4) 1)
(= (array-ref sub 5) 2)
(= (array-ref sub 6) 3))
(error "sharing with sharing subshape failed"))))
(past "sharing with sharing subshape")
|
f9a7cabd0f5e5638bff0bdd4a583d617b13b163bedb01046a277b7297dc663f1 | juhp/stack-clean-old | Types.hs | module Types (
Deletion (..),
isDelete
)
where
data Deletion = Dryrun | Delete
deriving Eq
isDelete :: Deletion -> Bool
isDelete = (== Delete)
| null | https://raw.githubusercontent.com/juhp/stack-clean-old/810798c26801db367766ed50a15328017a154c0f/src/Types.hs | haskell | module Types (
Deletion (..),
isDelete
)
where
data Deletion = Dryrun | Delete
deriving Eq
isDelete :: Deletion -> Bool
isDelete = (== Delete)
|
|
2f7780944f3c05f12d39b6262a5a62b42a987b536e9beaf9f26de9c65b25cabc | csabahruska/jhc-components | Main.hs | module FrontEnd.Tc.Main (tiExpr, tiProgram, makeProgram, isTypePlaceholder ) where
import Control.Monad.Reader
import Control.Monad.Writer
import Data.Graph(stronglyConnComp, SCC(..))
import System.IO(hPutStr,stderr)
import Text.Printf
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Text.PrettyPrint.HughesPJ as P
import Doc.DocLike
import Doc.PPrint as PPrint
import FrontEnd.Class
import FrontEnd.DeclsDepends(getDeclDeps)
import FrontEnd.Diagnostic
import FrontEnd.HsPretty
import FrontEnd.HsSyn
import FrontEnd.KindInfer
import FrontEnd.SrcLoc
import FrontEnd.Syn.Traverse
import FrontEnd.Tc.Class
import FrontEnd.Tc.Kind
import FrontEnd.Tc.Monad hiding(listenPreds)
import FrontEnd.Tc.Type
import FrontEnd.Tc.Unify
import FrontEnd.Warning
import GenUtil
import Name.Names
import Name.VConsts
import Options
import Support.FreeVars
import Util.Progress
import qualified FlagDump as FD
import qualified FlagOpts as FO
listenPreds = listenSolvePreds
type Expl = (Sigma, HsDecl)
TODO : this is different than the " Typing Haskell in Haskell " paper
-- we do not further sub-divide the implicitly typed declarations in
-- a binding group.
type BindGroup = ([Expl], [Either HsDecl [HsDecl]])
tpretty vv = prettyPrintType vv
tppretty vv = parens (tpretty vv)
tcKnownApp e coerce vname as typ = do
sc <- lookupName vname
let (_,_,rt) = fromType sc
-- fall through if the type isn't arrowy enough (will produce type error)
if (length . fst $ fromTArrow rt) < length as then tcApps' e as typ else do
(ts,rt) <- freshInstance Sigma sc
e' <- if coerce then doCoerce (ctAp ts) e else return e
addCoerce nname ( ctAp ts )
let f (TArrow x y) (a:as) = do
a <- tcExprPoly a x
y <- evalType y
(as,fc) <- f y as
return (a:as,fc)
f lt [] = do
fc <- lt `subsumes` typ
return ([],fc)
f _ _ = error "Main.tcKnownApp: bad."
(nas,CTId) <- f rt as
return (e',nas)
tcApps e@(HsVar v) as typ = do
let vname = toName Val v
let = toName n
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "tcApps: " ++ (show vname)
rc <- asks tcRecursiveCalls
-- fall through if this is a recursive call to oneself
if (vname `Set.member` rc) then tcApps' e as typ else do
tcKnownApp e True vname as typ
tcApps e@(HsCon v) as typ = do
(e,nname) <- wrapInAsPat e
let vname = toName DataConstructor v
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "tcApps: " ++ (show nname ++ "@" ++ show vname)
addToCollectedEnv (Map.singleton nname typ)
tcKnownApp e False vname as typ
tcApps e as typ = tcApps' e as typ
-- the fall through case
tcApps' e as typ = do
printRule $ "tcApps': " ++ (show e)
bs <- sequence [ newBox kindArg | _ <- as ]
e' <- tcExpr e (foldr fn typ bs)
as' <- sequence [ tcExprPoly a r | r <- bs | a <- as ]
return (e',as')
tcApp e1 e2 typ = do
(e1,[e2]) <- tcApps e1 [e2] typ
return (e1,e2)
tiExprPoly,tcExprPoly :: HsExp -> Type -> Tc HsExp
tcExprPoly e t = do
t <- evalType t
printRule $ "tiExprPoly " ++ tppretty t <+> show e
tiExprPoly e t
GEN2
tiExprPoly e t = do -- GEN1
(ts,_,t) <- skolomize t
e <- tcExpr e t
doCoerce (ctAbs ts) e
doCoerce :: CoerceTerm -> HsExp -> Tc HsExp
doCoerce CTId e = return e
doCoerce ct e = do
(e',n) <- wrapInAsPat e
addCoerce n ct
return e'
wrapInAsPat :: HsExp -> Tc (HsExp,Name)
wrapInAsPat e = do
n <- newHsVar "As"
return (HsAsPat n e, n)
wrapInAsPatEnv :: HsExp -> Type -> Tc HsExp
wrapInAsPatEnv e typ = do
(ne,ap) <- wrapInAsPat e
addToCollectedEnv (Map.singleton ap typ)
return ne
newHsVar ns = do
nn <- newUniq
return $ toName Val (ns ++ "@","tmp" ++ show nn)
isTypePlaceholder :: HsName -> Bool
isTypePlaceholder (getModule -> Just m) = m `elem` [toModule "Wild@",toModule "As@"]
isTypePlaceholder _ = False
tiExpr,tcExpr :: HsExp -> Type -> Tc HsExp
tcExpr e t = do
t <- evalType t
e <- tiExpr e t
--(_,False,_) <- unbox t
return e
tiExpr (HsVar v) typ = do
sc <- lookupName (toName Val v)
f <- sc `subsumes` typ
rc <- asks tcRecursiveCalls
if (toName Val v `Set.member` rc) then do
(e',n) <- wrapInAsPat (HsVar v)
tell mempty { outKnots = [(n,toName Val v)] }
return e'
else do
doCoerce f (HsVar v)
tiExpr (HsCase e alts) typ = do
dn <- getDeName
withContext (simpleMsg $ "in the case expression\n case " ++ render (ppHsExp $ dn e) ++ " of ...") $ do
scrutinee <- newBox kindFunRet
e' <- tcExpr e scrutinee
alts' <- mapM (tcAlt scrutinee typ) alts
wrapInAsPatEnv (HsCase e' alts') typ
tiExpr (HsCon conName) typ = do
sc <- lookupName (toName DataConstructor conName)
sc `subsumes` typ
wrapInAsPatEnv (HsCon conName) typ
tiExpr (HsLit l@(HsIntPrim _)) typ = do
unBox typ
ty <- evalType typ
case ty of
TCon (Tycon n kh) | kh == kindHash -> return ()
_ -> ty `boxyMatch` (TCon (Tycon tc_Bits32 kindHash))
wrapInAsPatEnv (HsLit l) ty
tiExpr (HsLit l@(HsInt _)) typ = do
t <- tiLit l
t `subsumes` typ
wrapInAsPatEnv (HsLit l) typ
tiExpr err@HsError {} typ = do
unBox typ
wrapInAsPatEnv err typ
tiExpr (HsLit l) typ = do
t <- tiLit l
t `subsumes` typ
return (HsLit l)
tiExpr (HsAsPat n e) typ = do
e <- tcExpr e typ
typ < - flattenType typ
addToCollectedEnv (Map.singleton (toName Val n) typ)
return (HsAsPat n e)
comb LET - S and VAR
tiExpr expr@(HsExpTypeSig sloc e qt) typ =
deNameContext (Just sloc) "in the annotated expression" expr $ do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
s `subsumes` typ
e' <- tcExpr e typ
return (HsExpTypeSig sloc e' qt)
tiExpr (HsLeftSection e1 e2) typ = do
(e1,e2) <- tcApp e1 e2 typ
return (HsLeftSection e1 e2)
-- I know this looks weird but it appears to be correct
-- e1 :: b
-- e2 :: a -> b -> c
-- e1 e2 :: a -> c
(: [ ] ) \x - > x : [ ] ` fn `
tiExpr (HsRightSection e1 e2) typ = do
arg <- newBox kindArg
arg2 <- newBox kindArg
ret <- newBox kindFunRet
e1 <- tcExpr e1 arg2
e2 <- tcExpr e2 (arg `fn` (arg2 `fn` ret))
(arg `fn` ret) `subsumes` typ
return (HsRightSection e1 e2)
tiExpr expr@HsApp {} typ = deNameContext Nothing "in the application" (backToApp h as) $ do
(h,as) <- tcApps h as typ
return $ backToApp h as
where
backToApp h as = foldl HsApp h as
(h,as) = fromHsApp expr
fromHsApp t = f t [] where
f (HsApp a b) rs = f a (b:rs)
f t rs = (t,rs)
tiExpr expr@(HsInfixApp e1 e2 e3) typ = deNameContext Nothing "in the infix application" expr $ do
(e2',[e1',e3']) <- tcApps e2 [e1,e3] typ
return (HsInfixApp e1' e2' e3')
-- we need to fix the type to to be in the class
cNum , just for cases such as :
foo = \x - > -x
tiExpr expr@(HsNegApp e) typ = deNameContext Nothing "in the negative expression" expr $ do
e <- tcExpr e typ
addPreds [IsIn class_Num typ]
return (HsNegApp e)
-- ABS1
tiExpr expr@(HsLambda sloc ps e) typ = do
dn <- getDeName
withContext (locSimple sloc $ "in the lambda expression\n \\" ++ show (pprint (dn ps):: P.Doc) ++ " -> ...") $ do
let lam (p:ps) e (TMetaVar mv) rs = do -- ABS2
withMetaVars mv [kindArg,kindFunRet] (\ [a,b] -> a `fn` b) $ \ [a,b] -> lam (p:ps) e (a `fn` b) rs
lam (p:ps) e (TArrow s1' s2') rs = do -- ABS1
--box <- newBox Star
s1 ' ` boxyMatch ` box
(p',env) <- tcPat p s1'
localEnv env $ do
s2' <- evalType s2'
TODO poly
lam (p:ps) e t@(TAp (TAp (TMetaVar mv) s1') s2') rs = do
boxyMatch (TMetaVar mv) tArrow
(p',env) <- tcPat p s1'
localEnv env $ do
s2' <- evalType s2'
TODO poly
lam [] e typ rs = do
e' <- tcExpr e typ
return (HsLambda sloc (reverse rs) e')
lam _ _ t _ = do
t <- flattenType t
fail $ "expected a -> b, found: " ++ prettyPrintType t
lamPoly ps e s rs = do
(ts,_,s) <- skolomize s
e <- lam ps e s rs
doCoerce (ctAbs ts) e
lam ps e typ []
tiExpr (HsIf e e1 e2) typ = do
dn <- getDeName
withContext (simpleMsg $ "in the if expression\n if " ++ render (ppHsExp (dn e)) ++ "...") $ do
e <- tcExpr e tBool
e1 <- tcExpr e1 typ
e2 <- tcExpr e2 typ
return (HsIf e e1 e2)
tiExpr tuple@(HsTuple exps@(_:_)) typ = deNameContext Nothing "in the tuple" tuple $ do
( _ , exps ' ) < - tcApps ( HsCon ( toTuple ( length exps ) ) )
(_,exps') <- tcApps (HsCon (name_TupleConstructor termLevel (length exps))) exps typ
return (HsTuple exps')
tiExpr t@(HsTuple []) typ = do -- deNameContext Nothing "in the tuple" tuple $ do
tUnit `subsumes` typ
return t
-- return (HsTuple [])
( _ , exps ' ) < - tcApps ( HsCon ( toTuple ( length exps ) ) )
( _ , exps ' ) < - tcApps ( HsCon ( nameTuple TypeConstructor ( length exps ) ) )
--return (HsTuple exps')
tiExpr tuple@(HsUnboxedTuple exps) typ = deNameContext Nothing "in the unboxed tuple" tuple $ do
(_,exps') <- tcApps (HsCon (name_UnboxedTupleConstructor termLevel (length exps))) exps typ
return (HsUnboxedTuple exps')
-- special case for the empty list
tiExpr (HsList []) (TAp c v) | c == tList = do
unBox v
wrapInAsPatEnv (HsList []) (TAp c v)
-- special case for the empty list
tiExpr (HsList []) typ = do
v <- newVar kindStar
let lt = TForAll [v] ([] :=> TAp tList (TVar v))
lt `subsumes` typ
wrapInAsPatEnv (HsList []) typ
-- non empty list
tiExpr expr@(HsList exps@(_:_)) (TAp tList' v) | tList == tList' = deNameContext Nothing "in the list " expr $ do
exps' <- mapM (`tcExpr` v) exps
wrapInAsPatEnv (HsList exps') (TAp tList' v)
-- non empty list
tiExpr expr@(HsList exps@(_:_)) typ = deNameContext Nothing "in the list " expr $ do
v <- newBox kindStar
exps' <- mapM (`tcExpr` v) exps
(TAp tList v) `subsumes` typ
wrapInAsPatEnv (HsList exps') typ
tiExpr (HsParen e) typ = tcExpr e typ
tiExpr expr@(HsLet decls e) typ = deNameContext Nothing "in the let binding" expr $ do
sigEnv <- getSigEnv
let bgs = getFunDeclsBg sigEnv decls
f (bg:bgs) rs = do
(ds,env) <- tcBindGroup bg
localEnv env $ f bgs (ds ++ rs)
f [] rs = do
e' <- tcExpr e typ
return (HsLet rs e')
f bgs []
tiExpr (HsLocatedExp (Located sl e)) typ = tiExpr e typ
tiExpr e typ = fail $ "tiExpr: not implemented for: " ++ show (e,typ)
tcWheres :: [HsDecl] -> Tc ([HsDecl],TypeEnv)
tcWheres decls = do
sigEnv <- getSigEnv
let bgs = getFunDeclsBg sigEnv decls
f (bg:bgs) rs cenv = do
(ds,env) <- tcBindGroup bg
localEnv env $ f bgs (ds ++ rs) (env `mappend` cenv)
f [] rs cenv = return (rs,cenv)
f bgs [] mempty
deNameContext :: Maybe SrcLoc -> String -> HsExp -> Tc a -> Tc a
deNameContext sl desc e action = do
dn <- getDeName
let mm = maybe makeMsg locMsg sl
withContext (mm desc (render $ ppHsExp (dn e))) action
-----------------------------------------------------------------------------
-- type check implicitly typed bindings
tcAlt :: Sigma -> Sigma -> HsAlt -> Tc HsAlt
tcAlt scrutinee typ alt@(HsAlt sloc pat gAlts wheres) = do
dn <- getDeName
withContext (locMsg sloc "in the alternative" $ render $ ppHsAlt (dn alt)) $ do
scrutinee <- evalType scrutinee
(pat',env) <- tcPat pat scrutinee
localEnv env $ do
(wheres', env) <- tcWheres wheres
localEnv env $ case gAlts of
HsUnGuardedRhs e -> do
e' <- tcExpr e typ
return (HsAlt sloc pat' (HsUnGuardedRhs e') wheres')
HsGuardedRhss as -> do
gas <- mapM (tcGuardedAlt typ) as
return (HsAlt sloc pat' (HsGuardedRhss gas) wheres')
tcGuardedAlt typ gAlt@(HsComp sloc ~[HsQualifier eGuard] e) = withContext (locMsg sloc "in the guarded alternative" $ render $ ppGAlt gAlt) $ do
typ <- evalType typ
g' <- tcExpr eGuard tBool
e' <- tcExpr e typ
return (HsComp sloc [HsQualifier g'] e')
tcGuardedRhs = tcGuardedAlt
tcGuardedRhs typ gAlt@(HsGuardedRhs sloc eGuard e ) = withContext ( locMsg sloc " in the guarded alternative " $ render $ ppHsGuardedRhs gAlt ) $ do
typ < - evalType typ
g ' < - tcExpr eGuard tBool
e ' < - tcExpr e typ
return ( HsGuardedRhs sloc g ' e ' )
tcGuardedRhs typ gAlt@(HsGuardedRhs sloc eGuard e) = withContext (locMsg sloc "in the guarded alternative" $ render $ ppHsGuardedRhs gAlt) $ do
typ <- evalType typ
g' <- tcExpr eGuard tBool
e' <- tcExpr e typ
return (HsGuardedRhs sloc g' e')
-}
-- Typing Patterns
tiPat,tcPat :: HsPat -> Type -> Tc (HsPat, Map.Map Name Sigma)
tcPat p typ = withContext (makeMsg "in the pattern: " $ render $ ppHsPat p) $ do
typ <- evalType typ
tiPat p typ
tiPat (HsPVar i) typ = do
v < - newMetaVar Tau Star
--v `boxyMatch` typ
--typ `subsumes` v
typ' <- unBox typ
addToCollectedEnv (Map.singleton (toName Val i) typ')
return (HsPVar i, Map.singleton (toName Val i) typ')
tiPat pl@(HsPLit HsChar {}) typ = boxyMatch tChar typ >> return (pl,mempty)
tiPat pl@(HsPLit HsCharPrim {}) typ = boxyMatch tCharzh typ >> return (pl,mempty)
tiPat pl@(HsPLit HsString {}) typ = boxyMatch tString typ >> return (pl,mempty)
tiPat pl@(HsPLit HsInt {}) typ = do
unBox typ
addPreds [IsIn class_Num typ]
return (pl,mempty)
tiPat pl@(HsPLit HsIntPrim {}) typ = do
unBox typ
ty <- evalType typ
case ty of
TCon (Tycon n kh) | kh == kindHash -> return ()
_ -> ty `boxyMatch` (TCon (Tycon tc_Bits32 kindHash))
return (pl,mempty)
tiPat pl@(HsPLit HsFrac {}) typ = do
unBox typ
addPreds [IsIn class_Fractional typ]
return (pl,mempty)
tiPat ( HsPLit l ) = do
t < - tiLit l
typ ` subsumes ` t -- ` boxyMatch ` typ
return ( HsPLit l , Map.empty )
tiPat (HsPLit l) typ = do
t <- tiLit l
typ `subsumes` t -- `boxyMatch` typ
return (HsPLit l,Map.empty)
-}
-- this is for negative literals only
-- so the pat must be a literal
-- it is safe not to make any predicates about
-- the pat, since the type checking of the literal
-- will do this for us
tiPat (HsPNeg (HsPLit (HsInt i))) typ = tiPat (HsPLit $ HsInt (negate i)) typ
tiPat (HsPNeg (HsPLit (HsFrac i))) typ = tiPat (HsPLit $ HsFrac (negate i)) typ
tiPat (HsPNeg (HsPLit (HsIntPrim i))) typ = tiPat (HsPLit $ HsIntPrim (negate i)) typ
tiPat (HsPNeg (HsPLit (HsFloatPrim i))) typ = tiPat (HsPLit $ HsFloatPrim (negate i)) typ
tiPat (HsPNeg (HsPLit (HsDoublePrim i))) typ = tiPat (HsPLit $ HsDoublePrim (negate i)) typ
tiPat (HsPNeg pat) typ = fail $ "non-literal negative patterns are not allowed"
tiPat ( HsPNeg pat ) typ = tiPat pat
tiPat (HsPIrrPat (Located l p)) typ = do
(p,ns) <- tiPat p typ
return (HsPIrrPat (Located l p),ns)
tiPat (HsPBangPat (Located l p@HsPAsPat {})) typ = do
(p,ns) <- tiPat p typ
return (HsPBangPat (Located l p),ns)
tiPat (HsPBangPat (Located l p)) typ = do
v <- newHsVar "Bang"
tiPat (HsPBangPat (Located l (HsPAsPat v p))) typ
tiPat (HsPParen p) typ = tiPat p typ
TODO check that constructors are saturated
tiPat (HsPApp conName pats) typ = do
s <- lookupName (toName DataConstructor conName)
nn <- deconstructorInstantiate s
let f (p:pats) (a `TArrow` rs) (ps,env) = do
(np,res) <- tiPat p a
f pats rs (np:ps,env `mappend` res)
f (p:pats) rs _ = do
fail $ "constructor applied to too many arguments:" <+> show p <+> prettyPrintType rs
f [] (_ `TArrow` _) _ = do
fail "constructor not applied to enough arguments"
f [] rs (ps,env) = do
rs `subsumes` typ
unBox typ
return (HsPApp conName (reverse ps), env)
f pats nn mempty
--bs <- sequence [ newBox Star | _ <- pats ]
--s `subsumes` (foldr fn typ bs)
--pats' <- sequence [ tcPat a r | r <- bs | a <- pats ]
return ( HsPApp conName ( fsts pats ' ) , mconcat ( snds pats ' ) )
tiPat pl@(HsPList []) (TAp t v) | t == tList = do
unBox v
return (delistPats [],mempty)
tiPat pl@(HsPList []) typ = do
v <- newBox kindStar
typ ` subsumes ` TAp tList v
typ `boxyMatch` TAp tList v
return (delistPats [],mempty)
tiPat (HsPList pats@(_:_)) (TAp t v) | t == tList = do
--v <- newBox kindStar
TAp tList v ` boxyMatch ` typ
typ ` subsumes ` TAp tList v
ps <- mapM (`tcPat` v) pats
return (delistPats (fsts ps), mconcat (snds ps))
tiPat (HsPList pats@(_:_)) typ = do
v <- newBox kindStar
TAp tList v ` boxyMatch ` typ
ps <- mapM (`tcPat` v) pats
typ `boxyMatch` TAp tList v
return (delistPats (fsts ps), mconcat (snds ps))
tiPat HsPWildCard typ = do
n <- newHsVar "Wild"
typ' <- unBox typ
addToCollectedEnv (Map.singleton n typ')
return (HsPVar n, Map.singleton n typ')
tiPat (HsPAsPat i pat) typ = do
(pat',env) <- tcPat pat typ
addToCollectedEnv (Map.singleton (toName Val i) typ)
return (HsPAsPat i pat', Map.insert (toName Val i) typ env)
tiPat (HsPInfixApp pLeft conName pRight) typ = tiPat (HsPApp conName [pLeft,pRight]) typ
tiPat (HsPUnboxedTuple ps) typ = tiPat (HsPApp (name_UnboxedTupleConstructor termLevel (length ps)) ps) typ
tiPat tuple@(HsPTuple pats) typ = tiPat (HsPApp (name_TupleConstructor termLevel (length pats)) pats) typ
tiPat (HsPTypeSig _ pat qt) typ = do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
s `boxyMatch` typ
p <- tcPat pat typ
return p
tiPat p _ = error $ "tiPat: " ++ show p
delistPats ps = pl ps where
pl [] = HsPApp (dc_EmptyList) []
pl (p:xs) = HsPApp (dc_Cons) [p, pl xs]
tcBindGroup :: BindGroup -> Tc ([HsDecl], TypeEnv)
tcBindGroup (es, is) = do
let env1 = Map.fromList [(getDeclName decl, sc) | (sc,decl) <- es ]
localEnv env1 $ do
(impls, implEnv) <- tiImplGroups is
localEnv implEnv $ do
expls <- mapM tiExpl es
return (impls ++ fsts expls, mconcat (implEnv:env1:snds expls))
tiImplGroups :: [Either HsDecl [HsDecl]] -> Tc ([HsDecl], TypeEnv)
tiImplGroups [] = return ([],mempty)
tiImplGroups (Left x:xs) = do
(d,te) <- tiNonRecImpl x
(ds',te') <- localEnv te $ tiImplGroups xs
return (d:ds', te `mappend` te')
tiImplGroups (Right x:xs) = do
(ds,te) <- tiImpls x
(ds',te') <- localEnv te $ tiImplGroups xs
return (ds ++ ds', te `mappend` te')
tiNonRecImpl :: HsDecl -> Tc (HsDecl, TypeEnv)
tiNonRecImpl decl = withContext (locSimple (srcLoc decl) ("in the implicitly typed: " ++ show (getDeclName decl))) $ do
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tiimpls " ++ show (getDeclName decl)
mv <- newMetaVar Sigma kindStar
(res,ps) <- listenPreds $ tcDecl decl mv
ps' <- flattenType ps
mv' <- flattenType mv
fs <- freeMetaVarsEnv
let vss = freeMetaVars mv'
gs = vss Set.\\ fs
(mvs,ds,rs) <- splitReduce fs vss ps'
addPreds ds
mr <- flagOpt FO.MonomorphismRestriction
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tinonrecimpls quantify " ++ show (gs,rs,mv')
sc' <- if restricted mr [decl] then do
let gs' = gs Set.\\ Set.fromList (freeVars rs)
ch <- getClassHierarchy
-- liftIO $ print $ genDefaults ch fs rs
addPreds rs
quantify (Set.toList gs') [] mv'
else quantify (Set.toList gs) rs mv'
let f n s = do
let (TForAll vs _) = toSigma s
addCoerce n (ctAbs vs)
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** " ++ show n ++ " :: " ++ prettyPrintType s
return (n,s)
(n,s) <- f (getDeclName decl) sc'
let nenv = (Map.singleton n s)
addToCollectedEnv nenv
return (fst res, nenv)
tiImpls :: [HsDecl] -> Tc ([HsDecl], TypeEnv)
tiImpls [] = return ([],Map.empty)
tiImpls bs = withContext (locSimple (srcLoc bs) ("in the recursive implicitly typed: " ++ (show (map getDeclName bs)))) $ do
let names = map getDeclName bs
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tiimpls " ++ show names
ts <- sequence [newMetaVar Tau kindStar | _ <- bs]
(res,ps) <- listenPreds $
local (tcRecursiveCalls_u (Set.union $ Set.fromList names)) $
localEnv (Map.fromList [ (d,s) | d <- names | s <- ts]) $
sequence [ tcDecl d s | d <- bs | s <- ts ]
ps' <- flattenType ps
ts' <- flattenType ts
fs <- freeMetaVarsEnv
let vss = map (Set.fromList . freeVars) ts'
gs = (Set.unions vss) Set.\\ fs
(mvs,ds,rs) <- splitReduce fs (foldr1 Set.intersection vss) ps'
addPreds ds
mr <- flagOpt FO.MonomorphismRestriction
scs' <- if restricted mr bs then do
let gs' = gs Set.\\ Set.fromList (freeVars rs)
addPreds rs
quantify_n (Set.toList gs') [] ts'
else do
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tiimpls quantify " ++ show (gs,rs,ts')
quantify_n (Set.toList gs) rs ts'
let f n s = do
let (TForAll vs _) = toSigma s
addCoerce n (ctAbs vs)
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** " ++ show n ++ " :: " ++ prettyPrintType s
return (n,s)
nenv <- sequence [ f (getDeclName d) t | (d,_) <- res | t <- scs' ]
addToCollectedEnv (Map.fromList nenv)
return (fsts res, Map.fromList nenv)
tcRhs :: HsRhs -> Sigma -> Tc HsRhs
tcRhs rhs typ = case rhs of
HsUnGuardedRhs e -> do
e' <- tcExpr e typ
return (HsUnGuardedRhs e')
HsGuardedRhss as -> do
gas <- mapM (tcGuardedRhs typ) as
return (HsGuardedRhss gas)
tcMiscDecl d = withContext (locMsg (srcLoc d) "in the declaration" "") $ f d where
f spec@HsPragmaSpecialize { hsDeclSrcLoc = sloc, hsDeclName = n, hsDeclType = t } = do
withContext (locMsg sloc "in the SPECIALIZE pragma" $ show n) ans where
ans = do
kt <- getKindEnv
t <- hsTypeToType kt t
let nn = toName Val n
sc <- lookupName nn
listenPreds $ sc `subsumes` t
addRule RuleSpec { ruleUniq = hsDeclUniq spec, ruleName = nn, ruleType = t, ruleSuper = hsDeclBool spec }
return [spec]
f HsInstDecl { .. } = do
tcClassHead hsDeclClassHead
ch <- getClassHierarchy
let as = asksClassRecord ch (hsClassHead hsDeclClassHead) classAssumps
forM_ hsDeclDecls $ \d -> do
case maybeGetDeclName d of
Just n -> when (n `notElem` fsts as) $ do
addWarn InvalidDecl $ printf "Cannot declare '%s' in instance because it is not a method of class '%s'" (show n) (show $ hsClassHead hsDeclClassHead)
Nothing -> return ()
return []
f i@HsDeclDeriving {} = tcClassHead (hsDeclClassHead i)
f (HsPragmaRules rs) = do
rs' <- mapM tcRule rs
return [HsPragmaRules rs']
f fd@(HsForeignDecl _ _ n qt) = do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
addToCollectedEnv (Map.singleton (toName Val n) s)
return []
f fd@(HsForeignExport _ e n qt) = do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
addToCollectedEnv (Map.singleton (ffiExportName e) s)
return []
f _ = return []
tcClassHead cHead@HsClassHead { .. } = do
ch <- getClassHierarchy
ke <- getKindEnv
let supers = asksClassRecord ch hsClassHead classSupers
(ctx,(_,[a])) = chToClassHead ke cHead
assertEntailment ctx [ IsIn s a | s <- supers]
return []
tcRule prule@HsRule { hsRuleUniq = uniq, hsRuleFreeVars = vs, hsRuleLeftExpr = e1, hsRuleRightExpr = e2, hsRuleSrcLoc = sloc } =
withContext (locMsg sloc "in the RULES pragma" $ hsRuleString prule) ans where
ans = do
vs' <- mapM dv vs
tr <- newBox kindStar
let (vs,envs) = unzip vs'
ch <- getClassHierarchy
((e1,rs1),(e2,rs2)) <- localEnv (mconcat envs) $ do
(e1,ps1) <- listenPreds (tcExpr e1 tr)
(e2,ps2) <- listenPreds (tcExpr e2 tr)
([],rs1) <- splitPreds ch Set.empty ps1
([],rs2) <- splitPreds ch Set.empty ps2
return ((e1,rs1),(e2,rs2))
mapM_ unBox vs
vs <- flattenType vs
tr <- flattenType tr
let mvs = Set.toList $ Set.unions $ map freeMetaVars (tr:vs)
nvs <- mapM (newVar . metaKind) mvs
sequence_ [ varBind mv (TVar v) | v <- nvs | mv <- mvs ]
(rs1,rs2) <- flattenType (rs1,rs2)
ch <- getClassHierarchy
rs1 <- return $ simplify ch rs1
rs2 <- return $ simplify ch rs2
assertEntailment rs1 rs2
return prule { hsRuleLeftExpr = e1, hsRuleRightExpr = e2 }
dv (n,Nothing) = do
v <- newMetaVar Tau kindStar
let env = (Map.singleton (toName Val n) v)
addToCollectedEnv env
return (v,env)
dv (n,Just t) = do
kt <- getKindEnv
tt <- hsTypeToType kt t
let env = (Map.singleton (toName Val n) tt)
addToCollectedEnv env
return (tt,env)
tcDecl :: HsDecl -> Sigma -> Tc (HsDecl,TypeEnv)
tcDecl decl@(HsActionDecl srcLoc pat@(HsPVar v) exp) typ = withContext (declDiagnostic decl) $ do
typ <- evalType typ
(pat',env) <- tcPat pat typ
let tio = TCon (Tycon tc_IO (Kfun kindStar kindStar))
e' <- tcExpr exp (TAp tio typ)
return (decl { hsDeclPat = pat', hsDeclExp = e' }, Map.singleton (toName Val v) typ)
tcDecl decl@(HsPatBind sloc (HsPVar v) rhs wheres) typ = withContext (declDiagnostic decl) $ do
typ <- evalType typ
mainFunc <- nameOfMainFunc
when ( v == mainFunc ) $ do
tMain <- typeOfMainFunc
typ `subsumes` tMain
return ()
(wheres', env) <- tcWheres wheres
localEnv env $ do
case rhs of
HsUnGuardedRhs e -> do
e' <- tcExpr e typ
return (HsPatBind sloc (HsPVar v) (HsUnGuardedRhs e') wheres', Map.singleton (toName Val v) typ)
HsGuardedRhss as -> do
gas <- mapM (tcGuardedRhs typ) as
return (HsPatBind sloc (HsPVar v) (HsGuardedRhss gas) wheres', Map.singleton (toName Val v) typ)
tcDecl decl@(HsFunBind matches) typ = withContext (declDiagnostic decl) $ do
typ <- evalType typ
matches' <- mapM (`tcMatch` typ) matches
return (HsFunBind matches', Map.singleton (getDeclName decl) typ)
tcDecl _ _ = error "Main.tcDecl: bad."
tcMatch :: HsMatch -> Sigma -> Tc HsMatch
tcMatch (HsMatch sloc funName pats rhs wheres) typ = withContext (locMsg sloc "in" $ show funName) $ do
let lam (p:ps) (TMetaVar mv) rs = do -- ABS2
withMetaVars mv [kindArg,kindFunRet] (\ [a,b] -> a `fn` b) $ \ [a,b] -> lam (p:ps) (a `fn` b) rs
lam (p:ps) ty@(TArrow s1' s2') rs = do -- ABS1
(p',env) <- tcPat p s1'
localEnv env $ do
s2' <- evalType s2'
lamPoly ps s2' (p':rs)
lam [] typ rs = do
(wheres', env) <- tcWheres wheres
rhs <- localEnv env $ tcRhs rhs typ
return (HsMatch sloc funName (reverse rs) rhs wheres')
lam _ t _ = do
t <- flattenType t
fail $ "expected a -> b, found: " ++ prettyPrintType t
lamPoly ps s@TMetaVar {} rs = lam ps s rs
lamPoly ps s rs = do
(_,_,s) <- skolomize s
lam ps s rs
typ <- evalType typ
res <- lam pats typ []
return res
typeOfMainFunc :: Tc Type
typeOfMainFunc = do
a <- newMetaVar Tau kindStar
-- a <- newMetaVar Tau kindStar
-- a <- Tvar `fmap` newVar kindStar
return $ tAp (TCon (Tycon tc_IO (Kfun kindStar kindStar))) a
nameOfMainFunc :: Tc Name
nameOfMainFunc = fmap (parseName Val . maybe "Main.main" snd . optMainFunc) getOptions
declDiagnostic :: (HsDecl) -> Diagnostic
declDiagnostic decl@(HsPatBind sloc (HsPVar {}) _ _) = locMsg sloc "in the declaration" $ render $ ppHsDecl decl
declDiagnostic decl@(HsPatBind sloc pat _ _) = locMsg sloc "in the pattern binding" $ render $ ppHsDecl decl
declDiagnostic decl@(HsFunBind matches) = locMsg (srcLoc decl) "in the function binding" $ render $ ppHsDecl decl
declDiagnostic _ = error "Main.declDiagnostic: bad."
tiExpl :: Expl -> Tc (HsDecl,TypeEnv)
tiExpl (sc, decl@HsForeignDecl {}) = do return (decl,Map.empty)
tiExpl (sc, decl@HsForeignExport {}) = do return (decl,Map.empty)
tiExpl (sc, decl) = withContext (locSimple (srcLoc decl) ("in the explicitly typed " ++ (render $ ppHsDecl decl))) $ do
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "** typing expl: " ++ show (getDeclName decl) ++ " " ++ prettyPrintType sc
sc <- evalFullType sc
(vs,qs,typ) <- skolomize sc
let sc' = (tForAll vs (qs :=> typ))
mp = (Map.singleton (getDeclName decl) sc')
addCoerce (getDeclName decl) (ctAbs vs)
addToCollectedEnv mp
(ret,ps) <- localEnv mp $ listenPreds (tcDecl decl typ)
ps <- flattenType ps
ch <- getClassHierarchy
env <- freeMetaVarsEnv
(_,ds,rs) <- splitReduce env (freeMetaVarsPreds qs) ps
printRule $ "endtiExpl: " <+> show env <+> show ps <+> show qs <+> show ds <+> show rs
addPreds ds
assertEntailment qs rs
return ret
restricted :: Bool -> [HsDecl] -> Bool
restricted monomorphismRestriction bs = any isHsActionDecl bs || (monomorphismRestriction && any isHsPatBind bs)
getBindGroupName ( expl , impls ) = map getDeclName ( snds expl + + concat ( rights impls ) + + lefts impls )
tiProgram :: [BindGroup] -> [HsDecl] -> Tc [HsDecl]
tiProgram bgs es = ans where
ans = do
let (pr,is) = progressStep (progressNew (length bgs + 1) 45) '.'
wdump FD.Progress $ liftIO $ do hPutStr stderr ("(" ++ is)
(r,ps) <- listenPreds $ f pr bgs []
ps <- flattenType ps
-- ch <- getClassHierarchy
( [ ] , rs ) < - splitPreds ch Set.empty ps
-- liftIO $ print ps
(_,[],rs) <- splitReduce Set.empty Set.empty ps
-- liftIO $ print rs
topDefaults rs
return r
f pr (bg:bgs) rs = do
(ds,env) <- (tcBindGroup bg)
let (pr',os) = progressStep pr '.'
wdump FD.Progress $ liftIO $ do hPutStr stderr os
localEnv env $ f pr' bgs (ds ++ rs)
f _ [] rs = do
ch <- getClassHierarchy
pdecls <- mapM tcMiscDecl es
wdump FD.Progress $ liftIO $ do hPutStr stderr ")\n"
return (rs ++ concat pdecls)
-- Typing Literals
tiLit :: HsLiteral -> Tc Tau
tiLit (HsChar _) = return tChar
tiLit (HsCharPrim _) = return tCharzh
tiLit (HsInt _) = do
v <- newVar kindStar
return $ TForAll [v] ([IsIn class_Num (TVar v)] :=> TVar v)
--(v) <- newBox Star
addPreds [ IsIn class_Num v ]
--return v
tiLit (HsFrac _) = do
v <- newVar kindStar
return $ TForAll [v] ([IsIn class_Fractional (TVar v)] :=> TVar v)
-- (v) <- newBox Star
addPreds [ IsIn class_Fractional v ]
-- return v
tiLit (HsStringPrim _) = return (TCon (Tycon tc_BitsPtr kindHash))
tiLit (HsString _) = return tString
tiLit _ = error "Main.tiLit: bad."
------------------------------------------
-- Binding analysis and program generation
------------------------------------------
create a Program structure from a list of decls and
-- type sigs. Type sigs are associated with corresponding
-- decls if they exist
getFunDeclsBg :: TypeEnv -> [HsDecl] -> [BindGroup]
getFunDeclsBg sigEnv decls = makeProgram sigEnv equationGroups where
equationGroups :: [[HsDecl]]
equationGroups = getBindGroups bindDecls getDeclName getDeclDeps
bindDecls = collectBindDecls decls
getBindGroups :: Ord name =>
[node] -> -- List of nodes
(node -> name) -> -- Function to convert nodes to a unique name
(node -> [name]) -> -- Function to return dependencies of this node
[[node]] -- Bindgroups
getBindGroups ns fn fd = map f $ stronglyConnComp [ (n, fn n, fd n) | n <- ns] where
f (AcyclicSCC x) = [x]
f (CyclicSCC xs) = xs
-- | make a program from a set of binding groups
makeProgram :: TypeEnv -> [[HsDecl]] -> [BindGroup]
makeProgram sigEnv groups = map (makeBindGroup sigEnv ) groups
| reunite decls with their signatures , if ever they had one
makeBindGroup :: TypeEnv -> [HsDecl] -> BindGroup
makeBindGroup sigEnv decls = (exps, f impls) where
(exps, impls) = makeBindGroup' sigEnv decls
enames = map (getDeclName . snd) exps
f xs = map g $ stronglyConnComp [ (x, getDeclName x,[ d | d <- getDeclDeps x, d `notElem` enames]) | x <- xs]
g (AcyclicSCC x) = Left x
g (CyclicSCC xs) = Right xs
makeBindGroup' _ [] = ([], [])
makeBindGroup' sigEnv (d:ds) = case Map.lookup funName sigEnv of
Nothing -> (restExpls, d:restImpls)
Just scheme -> ((scheme, d):restExpls, restImpls)
where
funName = getDeclName d
(restExpls, restImpls) = makeBindGroup' sigEnv ds
collectBindDecls :: [HsDecl] -> [HsDecl]
collectBindDecls = filter isBindDecl where
isBindDecl :: HsDecl -> Bool
isBindDecl HsActionDecl {} = True
isBindDecl HsPatBind {} = True
isBindDecl HsFunBind {} = True
isBindDecl _ = False
| null | https://raw.githubusercontent.com/csabahruska/jhc-components/a7dace481d017f5a83fbfc062bdd2d099133adf1/jhc-frontend/src/FrontEnd/Tc/Main.hs | haskell | we do not further sub-divide the implicitly typed declarations in
a binding group.
fall through if the type isn't arrowy enough (will produce type error)
fall through if this is a recursive call to oneself
the fall through case
GEN1
(_,False,_) <- unbox t
I know this looks weird but it appears to be correct
e1 :: b
e2 :: a -> b -> c
e1 e2 :: a -> c
we need to fix the type to to be in the class
ABS1
ABS2
ABS1
box <- newBox Star
deNameContext Nothing "in the tuple" tuple $ do
return (HsTuple [])
return (HsTuple exps')
special case for the empty list
special case for the empty list
non empty list
non empty list
---------------------------------------------------------------------------
type check implicitly typed bindings
Typing Patterns
v `boxyMatch` typ
typ `subsumes` v
` boxyMatch ` typ
`boxyMatch` typ
this is for negative literals only
so the pat must be a literal
it is safe not to make any predicates about
the pat, since the type checking of the literal
will do this for us
bs <- sequence [ newBox Star | _ <- pats ]
s `subsumes` (foldr fn typ bs)
pats' <- sequence [ tcPat a r | r <- bs | a <- pats ]
v <- newBox kindStar
liftIO $ print $ genDefaults ch fs rs
ABS2
ABS1
a <- newMetaVar Tau kindStar
a <- Tvar `fmap` newVar kindStar
ch <- getClassHierarchy
liftIO $ print ps
liftIO $ print rs
Typing Literals
(v) <- newBox Star
return v
(v) <- newBox Star
return v
----------------------------------------
Binding analysis and program generation
----------------------------------------
type sigs. Type sigs are associated with corresponding
decls if they exist
List of nodes
Function to convert nodes to a unique name
Function to return dependencies of this node
Bindgroups
| make a program from a set of binding groups | module FrontEnd.Tc.Main (tiExpr, tiProgram, makeProgram, isTypePlaceholder ) where
import Control.Monad.Reader
import Control.Monad.Writer
import Data.Graph(stronglyConnComp, SCC(..))
import System.IO(hPutStr,stderr)
import Text.Printf
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Text.PrettyPrint.HughesPJ as P
import Doc.DocLike
import Doc.PPrint as PPrint
import FrontEnd.Class
import FrontEnd.DeclsDepends(getDeclDeps)
import FrontEnd.Diagnostic
import FrontEnd.HsPretty
import FrontEnd.HsSyn
import FrontEnd.KindInfer
import FrontEnd.SrcLoc
import FrontEnd.Syn.Traverse
import FrontEnd.Tc.Class
import FrontEnd.Tc.Kind
import FrontEnd.Tc.Monad hiding(listenPreds)
import FrontEnd.Tc.Type
import FrontEnd.Tc.Unify
import FrontEnd.Warning
import GenUtil
import Name.Names
import Name.VConsts
import Options
import Support.FreeVars
import Util.Progress
import qualified FlagDump as FD
import qualified FlagOpts as FO
listenPreds = listenSolvePreds
type Expl = (Sigma, HsDecl)
TODO : this is different than the " Typing Haskell in Haskell " paper
type BindGroup = ([Expl], [Either HsDecl [HsDecl]])
tpretty vv = prettyPrintType vv
tppretty vv = parens (tpretty vv)
tcKnownApp e coerce vname as typ = do
sc <- lookupName vname
let (_,_,rt) = fromType sc
if (length . fst $ fromTArrow rt) < length as then tcApps' e as typ else do
(ts,rt) <- freshInstance Sigma sc
e' <- if coerce then doCoerce (ctAp ts) e else return e
addCoerce nname ( ctAp ts )
let f (TArrow x y) (a:as) = do
a <- tcExprPoly a x
y <- evalType y
(as,fc) <- f y as
return (a:as,fc)
f lt [] = do
fc <- lt `subsumes` typ
return ([],fc)
f _ _ = error "Main.tcKnownApp: bad."
(nas,CTId) <- f rt as
return (e',nas)
tcApps e@(HsVar v) as typ = do
let vname = toName Val v
let = toName n
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "tcApps: " ++ (show vname)
rc <- asks tcRecursiveCalls
if (vname `Set.member` rc) then tcApps' e as typ else do
tcKnownApp e True vname as typ
tcApps e@(HsCon v) as typ = do
(e,nname) <- wrapInAsPat e
let vname = toName DataConstructor v
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "tcApps: " ++ (show nname ++ "@" ++ show vname)
addToCollectedEnv (Map.singleton nname typ)
tcKnownApp e False vname as typ
tcApps e as typ = tcApps' e as typ
tcApps' e as typ = do
printRule $ "tcApps': " ++ (show e)
bs <- sequence [ newBox kindArg | _ <- as ]
e' <- tcExpr e (foldr fn typ bs)
as' <- sequence [ tcExprPoly a r | r <- bs | a <- as ]
return (e',as')
tcApp e1 e2 typ = do
(e1,[e2]) <- tcApps e1 [e2] typ
return (e1,e2)
tiExprPoly,tcExprPoly :: HsExp -> Type -> Tc HsExp
tcExprPoly e t = do
t <- evalType t
printRule $ "tiExprPoly " ++ tppretty t <+> show e
tiExprPoly e t
GEN2
(ts,_,t) <- skolomize t
e <- tcExpr e t
doCoerce (ctAbs ts) e
doCoerce :: CoerceTerm -> HsExp -> Tc HsExp
doCoerce CTId e = return e
doCoerce ct e = do
(e',n) <- wrapInAsPat e
addCoerce n ct
return e'
wrapInAsPat :: HsExp -> Tc (HsExp,Name)
wrapInAsPat e = do
n <- newHsVar "As"
return (HsAsPat n e, n)
wrapInAsPatEnv :: HsExp -> Type -> Tc HsExp
wrapInAsPatEnv e typ = do
(ne,ap) <- wrapInAsPat e
addToCollectedEnv (Map.singleton ap typ)
return ne
newHsVar ns = do
nn <- newUniq
return $ toName Val (ns ++ "@","tmp" ++ show nn)
isTypePlaceholder :: HsName -> Bool
isTypePlaceholder (getModule -> Just m) = m `elem` [toModule "Wild@",toModule "As@"]
isTypePlaceholder _ = False
tiExpr,tcExpr :: HsExp -> Type -> Tc HsExp
tcExpr e t = do
t <- evalType t
e <- tiExpr e t
return e
tiExpr (HsVar v) typ = do
sc <- lookupName (toName Val v)
f <- sc `subsumes` typ
rc <- asks tcRecursiveCalls
if (toName Val v `Set.member` rc) then do
(e',n) <- wrapInAsPat (HsVar v)
tell mempty { outKnots = [(n,toName Val v)] }
return e'
else do
doCoerce f (HsVar v)
tiExpr (HsCase e alts) typ = do
dn <- getDeName
withContext (simpleMsg $ "in the case expression\n case " ++ render (ppHsExp $ dn e) ++ " of ...") $ do
scrutinee <- newBox kindFunRet
e' <- tcExpr e scrutinee
alts' <- mapM (tcAlt scrutinee typ) alts
wrapInAsPatEnv (HsCase e' alts') typ
tiExpr (HsCon conName) typ = do
sc <- lookupName (toName DataConstructor conName)
sc `subsumes` typ
wrapInAsPatEnv (HsCon conName) typ
tiExpr (HsLit l@(HsIntPrim _)) typ = do
unBox typ
ty <- evalType typ
case ty of
TCon (Tycon n kh) | kh == kindHash -> return ()
_ -> ty `boxyMatch` (TCon (Tycon tc_Bits32 kindHash))
wrapInAsPatEnv (HsLit l) ty
tiExpr (HsLit l@(HsInt _)) typ = do
t <- tiLit l
t `subsumes` typ
wrapInAsPatEnv (HsLit l) typ
tiExpr err@HsError {} typ = do
unBox typ
wrapInAsPatEnv err typ
tiExpr (HsLit l) typ = do
t <- tiLit l
t `subsumes` typ
return (HsLit l)
tiExpr (HsAsPat n e) typ = do
e <- tcExpr e typ
typ < - flattenType typ
addToCollectedEnv (Map.singleton (toName Val n) typ)
return (HsAsPat n e)
comb LET - S and VAR
tiExpr expr@(HsExpTypeSig sloc e qt) typ =
deNameContext (Just sloc) "in the annotated expression" expr $ do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
s `subsumes` typ
e' <- tcExpr e typ
return (HsExpTypeSig sloc e' qt)
tiExpr (HsLeftSection e1 e2) typ = do
(e1,e2) <- tcApp e1 e2 typ
return (HsLeftSection e1 e2)
(: [ ] ) \x - > x : [ ] ` fn `
tiExpr (HsRightSection e1 e2) typ = do
arg <- newBox kindArg
arg2 <- newBox kindArg
ret <- newBox kindFunRet
e1 <- tcExpr e1 arg2
e2 <- tcExpr e2 (arg `fn` (arg2 `fn` ret))
(arg `fn` ret) `subsumes` typ
return (HsRightSection e1 e2)
tiExpr expr@HsApp {} typ = deNameContext Nothing "in the application" (backToApp h as) $ do
(h,as) <- tcApps h as typ
return $ backToApp h as
where
backToApp h as = foldl HsApp h as
(h,as) = fromHsApp expr
fromHsApp t = f t [] where
f (HsApp a b) rs = f a (b:rs)
f t rs = (t,rs)
tiExpr expr@(HsInfixApp e1 e2 e3) typ = deNameContext Nothing "in the infix application" expr $ do
(e2',[e1',e3']) <- tcApps e2 [e1,e3] typ
return (HsInfixApp e1' e2' e3')
cNum , just for cases such as :
foo = \x - > -x
tiExpr expr@(HsNegApp e) typ = deNameContext Nothing "in the negative expression" expr $ do
e <- tcExpr e typ
addPreds [IsIn class_Num typ]
return (HsNegApp e)
tiExpr expr@(HsLambda sloc ps e) typ = do
dn <- getDeName
withContext (locSimple sloc $ "in the lambda expression\n \\" ++ show (pprint (dn ps):: P.Doc) ++ " -> ...") $ do
withMetaVars mv [kindArg,kindFunRet] (\ [a,b] -> a `fn` b) $ \ [a,b] -> lam (p:ps) e (a `fn` b) rs
s1 ' ` boxyMatch ` box
(p',env) <- tcPat p s1'
localEnv env $ do
s2' <- evalType s2'
TODO poly
lam (p:ps) e t@(TAp (TAp (TMetaVar mv) s1') s2') rs = do
boxyMatch (TMetaVar mv) tArrow
(p',env) <- tcPat p s1'
localEnv env $ do
s2' <- evalType s2'
TODO poly
lam [] e typ rs = do
e' <- tcExpr e typ
return (HsLambda sloc (reverse rs) e')
lam _ _ t _ = do
t <- flattenType t
fail $ "expected a -> b, found: " ++ prettyPrintType t
lamPoly ps e s rs = do
(ts,_,s) <- skolomize s
e <- lam ps e s rs
doCoerce (ctAbs ts) e
lam ps e typ []
tiExpr (HsIf e e1 e2) typ = do
dn <- getDeName
withContext (simpleMsg $ "in the if expression\n if " ++ render (ppHsExp (dn e)) ++ "...") $ do
e <- tcExpr e tBool
e1 <- tcExpr e1 typ
e2 <- tcExpr e2 typ
return (HsIf e e1 e2)
tiExpr tuple@(HsTuple exps@(_:_)) typ = deNameContext Nothing "in the tuple" tuple $ do
( _ , exps ' ) < - tcApps ( HsCon ( toTuple ( length exps ) ) )
(_,exps') <- tcApps (HsCon (name_TupleConstructor termLevel (length exps))) exps typ
return (HsTuple exps')
tUnit `subsumes` typ
return t
( _ , exps ' ) < - tcApps ( HsCon ( toTuple ( length exps ) ) )
( _ , exps ' ) < - tcApps ( HsCon ( nameTuple TypeConstructor ( length exps ) ) )
tiExpr tuple@(HsUnboxedTuple exps) typ = deNameContext Nothing "in the unboxed tuple" tuple $ do
(_,exps') <- tcApps (HsCon (name_UnboxedTupleConstructor termLevel (length exps))) exps typ
return (HsUnboxedTuple exps')
tiExpr (HsList []) (TAp c v) | c == tList = do
unBox v
wrapInAsPatEnv (HsList []) (TAp c v)
tiExpr (HsList []) typ = do
v <- newVar kindStar
let lt = TForAll [v] ([] :=> TAp tList (TVar v))
lt `subsumes` typ
wrapInAsPatEnv (HsList []) typ
tiExpr expr@(HsList exps@(_:_)) (TAp tList' v) | tList == tList' = deNameContext Nothing "in the list " expr $ do
exps' <- mapM (`tcExpr` v) exps
wrapInAsPatEnv (HsList exps') (TAp tList' v)
tiExpr expr@(HsList exps@(_:_)) typ = deNameContext Nothing "in the list " expr $ do
v <- newBox kindStar
exps' <- mapM (`tcExpr` v) exps
(TAp tList v) `subsumes` typ
wrapInAsPatEnv (HsList exps') typ
tiExpr (HsParen e) typ = tcExpr e typ
tiExpr expr@(HsLet decls e) typ = deNameContext Nothing "in the let binding" expr $ do
sigEnv <- getSigEnv
let bgs = getFunDeclsBg sigEnv decls
f (bg:bgs) rs = do
(ds,env) <- tcBindGroup bg
localEnv env $ f bgs (ds ++ rs)
f [] rs = do
e' <- tcExpr e typ
return (HsLet rs e')
f bgs []
tiExpr (HsLocatedExp (Located sl e)) typ = tiExpr e typ
tiExpr e typ = fail $ "tiExpr: not implemented for: " ++ show (e,typ)
tcWheres :: [HsDecl] -> Tc ([HsDecl],TypeEnv)
tcWheres decls = do
sigEnv <- getSigEnv
let bgs = getFunDeclsBg sigEnv decls
f (bg:bgs) rs cenv = do
(ds,env) <- tcBindGroup bg
localEnv env $ f bgs (ds ++ rs) (env `mappend` cenv)
f [] rs cenv = return (rs,cenv)
f bgs [] mempty
deNameContext :: Maybe SrcLoc -> String -> HsExp -> Tc a -> Tc a
deNameContext sl desc e action = do
dn <- getDeName
let mm = maybe makeMsg locMsg sl
withContext (mm desc (render $ ppHsExp (dn e))) action
tcAlt :: Sigma -> Sigma -> HsAlt -> Tc HsAlt
tcAlt scrutinee typ alt@(HsAlt sloc pat gAlts wheres) = do
dn <- getDeName
withContext (locMsg sloc "in the alternative" $ render $ ppHsAlt (dn alt)) $ do
scrutinee <- evalType scrutinee
(pat',env) <- tcPat pat scrutinee
localEnv env $ do
(wheres', env) <- tcWheres wheres
localEnv env $ case gAlts of
HsUnGuardedRhs e -> do
e' <- tcExpr e typ
return (HsAlt sloc pat' (HsUnGuardedRhs e') wheres')
HsGuardedRhss as -> do
gas <- mapM (tcGuardedAlt typ) as
return (HsAlt sloc pat' (HsGuardedRhss gas) wheres')
tcGuardedAlt typ gAlt@(HsComp sloc ~[HsQualifier eGuard] e) = withContext (locMsg sloc "in the guarded alternative" $ render $ ppGAlt gAlt) $ do
typ <- evalType typ
g' <- tcExpr eGuard tBool
e' <- tcExpr e typ
return (HsComp sloc [HsQualifier g'] e')
tcGuardedRhs = tcGuardedAlt
tcGuardedRhs typ gAlt@(HsGuardedRhs sloc eGuard e ) = withContext ( locMsg sloc " in the guarded alternative " $ render $ ppHsGuardedRhs gAlt ) $ do
typ < - evalType typ
g ' < - tcExpr eGuard tBool
e ' < - tcExpr e typ
return ( HsGuardedRhs sloc g ' e ' )
tcGuardedRhs typ gAlt@(HsGuardedRhs sloc eGuard e) = withContext (locMsg sloc "in the guarded alternative" $ render $ ppHsGuardedRhs gAlt) $ do
typ <- evalType typ
g' <- tcExpr eGuard tBool
e' <- tcExpr e typ
return (HsGuardedRhs sloc g' e')
-}
tiPat,tcPat :: HsPat -> Type -> Tc (HsPat, Map.Map Name Sigma)
tcPat p typ = withContext (makeMsg "in the pattern: " $ render $ ppHsPat p) $ do
typ <- evalType typ
tiPat p typ
tiPat (HsPVar i) typ = do
v < - newMetaVar Tau Star
typ' <- unBox typ
addToCollectedEnv (Map.singleton (toName Val i) typ')
return (HsPVar i, Map.singleton (toName Val i) typ')
tiPat pl@(HsPLit HsChar {}) typ = boxyMatch tChar typ >> return (pl,mempty)
tiPat pl@(HsPLit HsCharPrim {}) typ = boxyMatch tCharzh typ >> return (pl,mempty)
tiPat pl@(HsPLit HsString {}) typ = boxyMatch tString typ >> return (pl,mempty)
tiPat pl@(HsPLit HsInt {}) typ = do
unBox typ
addPreds [IsIn class_Num typ]
return (pl,mempty)
tiPat pl@(HsPLit HsIntPrim {}) typ = do
unBox typ
ty <- evalType typ
case ty of
TCon (Tycon n kh) | kh == kindHash -> return ()
_ -> ty `boxyMatch` (TCon (Tycon tc_Bits32 kindHash))
return (pl,mempty)
tiPat pl@(HsPLit HsFrac {}) typ = do
unBox typ
addPreds [IsIn class_Fractional typ]
return (pl,mempty)
tiPat ( HsPLit l ) = do
t < - tiLit l
return ( HsPLit l , Map.empty )
tiPat (HsPLit l) typ = do
t <- tiLit l
return (HsPLit l,Map.empty)
-}
tiPat (HsPNeg (HsPLit (HsInt i))) typ = tiPat (HsPLit $ HsInt (negate i)) typ
tiPat (HsPNeg (HsPLit (HsFrac i))) typ = tiPat (HsPLit $ HsFrac (negate i)) typ
tiPat (HsPNeg (HsPLit (HsIntPrim i))) typ = tiPat (HsPLit $ HsIntPrim (negate i)) typ
tiPat (HsPNeg (HsPLit (HsFloatPrim i))) typ = tiPat (HsPLit $ HsFloatPrim (negate i)) typ
tiPat (HsPNeg (HsPLit (HsDoublePrim i))) typ = tiPat (HsPLit $ HsDoublePrim (negate i)) typ
tiPat (HsPNeg pat) typ = fail $ "non-literal negative patterns are not allowed"
tiPat ( HsPNeg pat ) typ = tiPat pat
tiPat (HsPIrrPat (Located l p)) typ = do
(p,ns) <- tiPat p typ
return (HsPIrrPat (Located l p),ns)
tiPat (HsPBangPat (Located l p@HsPAsPat {})) typ = do
(p,ns) <- tiPat p typ
return (HsPBangPat (Located l p),ns)
tiPat (HsPBangPat (Located l p)) typ = do
v <- newHsVar "Bang"
tiPat (HsPBangPat (Located l (HsPAsPat v p))) typ
tiPat (HsPParen p) typ = tiPat p typ
TODO check that constructors are saturated
tiPat (HsPApp conName pats) typ = do
s <- lookupName (toName DataConstructor conName)
nn <- deconstructorInstantiate s
let f (p:pats) (a `TArrow` rs) (ps,env) = do
(np,res) <- tiPat p a
f pats rs (np:ps,env `mappend` res)
f (p:pats) rs _ = do
fail $ "constructor applied to too many arguments:" <+> show p <+> prettyPrintType rs
f [] (_ `TArrow` _) _ = do
fail "constructor not applied to enough arguments"
f [] rs (ps,env) = do
rs `subsumes` typ
unBox typ
return (HsPApp conName (reverse ps), env)
f pats nn mempty
return ( HsPApp conName ( fsts pats ' ) , mconcat ( snds pats ' ) )
tiPat pl@(HsPList []) (TAp t v) | t == tList = do
unBox v
return (delistPats [],mempty)
tiPat pl@(HsPList []) typ = do
v <- newBox kindStar
typ ` subsumes ` TAp tList v
typ `boxyMatch` TAp tList v
return (delistPats [],mempty)
tiPat (HsPList pats@(_:_)) (TAp t v) | t == tList = do
TAp tList v ` boxyMatch ` typ
typ ` subsumes ` TAp tList v
ps <- mapM (`tcPat` v) pats
return (delistPats (fsts ps), mconcat (snds ps))
tiPat (HsPList pats@(_:_)) typ = do
v <- newBox kindStar
TAp tList v ` boxyMatch ` typ
ps <- mapM (`tcPat` v) pats
typ `boxyMatch` TAp tList v
return (delistPats (fsts ps), mconcat (snds ps))
tiPat HsPWildCard typ = do
n <- newHsVar "Wild"
typ' <- unBox typ
addToCollectedEnv (Map.singleton n typ')
return (HsPVar n, Map.singleton n typ')
tiPat (HsPAsPat i pat) typ = do
(pat',env) <- tcPat pat typ
addToCollectedEnv (Map.singleton (toName Val i) typ)
return (HsPAsPat i pat', Map.insert (toName Val i) typ env)
tiPat (HsPInfixApp pLeft conName pRight) typ = tiPat (HsPApp conName [pLeft,pRight]) typ
tiPat (HsPUnboxedTuple ps) typ = tiPat (HsPApp (name_UnboxedTupleConstructor termLevel (length ps)) ps) typ
tiPat tuple@(HsPTuple pats) typ = tiPat (HsPApp (name_TupleConstructor termLevel (length pats)) pats) typ
tiPat (HsPTypeSig _ pat qt) typ = do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
s `boxyMatch` typ
p <- tcPat pat typ
return p
tiPat p _ = error $ "tiPat: " ++ show p
delistPats ps = pl ps where
pl [] = HsPApp (dc_EmptyList) []
pl (p:xs) = HsPApp (dc_Cons) [p, pl xs]
tcBindGroup :: BindGroup -> Tc ([HsDecl], TypeEnv)
tcBindGroup (es, is) = do
let env1 = Map.fromList [(getDeclName decl, sc) | (sc,decl) <- es ]
localEnv env1 $ do
(impls, implEnv) <- tiImplGroups is
localEnv implEnv $ do
expls <- mapM tiExpl es
return (impls ++ fsts expls, mconcat (implEnv:env1:snds expls))
tiImplGroups :: [Either HsDecl [HsDecl]] -> Tc ([HsDecl], TypeEnv)
tiImplGroups [] = return ([],mempty)
tiImplGroups (Left x:xs) = do
(d,te) <- tiNonRecImpl x
(ds',te') <- localEnv te $ tiImplGroups xs
return (d:ds', te `mappend` te')
tiImplGroups (Right x:xs) = do
(ds,te) <- tiImpls x
(ds',te') <- localEnv te $ tiImplGroups xs
return (ds ++ ds', te `mappend` te')
tiNonRecImpl :: HsDecl -> Tc (HsDecl, TypeEnv)
tiNonRecImpl decl = withContext (locSimple (srcLoc decl) ("in the implicitly typed: " ++ show (getDeclName decl))) $ do
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tiimpls " ++ show (getDeclName decl)
mv <- newMetaVar Sigma kindStar
(res,ps) <- listenPreds $ tcDecl decl mv
ps' <- flattenType ps
mv' <- flattenType mv
fs <- freeMetaVarsEnv
let vss = freeMetaVars mv'
gs = vss Set.\\ fs
(mvs,ds,rs) <- splitReduce fs vss ps'
addPreds ds
mr <- flagOpt FO.MonomorphismRestriction
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tinonrecimpls quantify " ++ show (gs,rs,mv')
sc' <- if restricted mr [decl] then do
let gs' = gs Set.\\ Set.fromList (freeVars rs)
ch <- getClassHierarchy
addPreds rs
quantify (Set.toList gs') [] mv'
else quantify (Set.toList gs) rs mv'
let f n s = do
let (TForAll vs _) = toSigma s
addCoerce n (ctAbs vs)
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** " ++ show n ++ " :: " ++ prettyPrintType s
return (n,s)
(n,s) <- f (getDeclName decl) sc'
let nenv = (Map.singleton n s)
addToCollectedEnv nenv
return (fst res, nenv)
tiImpls :: [HsDecl] -> Tc ([HsDecl], TypeEnv)
tiImpls [] = return ([],Map.empty)
tiImpls bs = withContext (locSimple (srcLoc bs) ("in the recursive implicitly typed: " ++ (show (map getDeclName bs)))) $ do
let names = map getDeclName bs
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tiimpls " ++ show names
ts <- sequence [newMetaVar Tau kindStar | _ <- bs]
(res,ps) <- listenPreds $
local (tcRecursiveCalls_u (Set.union $ Set.fromList names)) $
localEnv (Map.fromList [ (d,s) | d <- names | s <- ts]) $
sequence [ tcDecl d s | d <- bs | s <- ts ]
ps' <- flattenType ps
ts' <- flattenType ts
fs <- freeMetaVarsEnv
let vss = map (Set.fromList . freeVars) ts'
gs = (Set.unions vss) Set.\\ fs
(mvs,ds,rs) <- splitReduce fs (foldr1 Set.intersection vss) ps'
addPreds ds
mr <- flagOpt FO.MonomorphismRestriction
scs' <- if restricted mr bs then do
let gs' = gs Set.\\ Set.fromList (freeVars rs)
addPreds rs
quantify_n (Set.toList gs') [] ts'
else do
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** tiimpls quantify " ++ show (gs,rs,ts')
quantify_n (Set.toList gs) rs ts'
let f n s = do
let (TForAll vs _) = toSigma s
addCoerce n (ctAbs vs)
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "*** " ++ show n ++ " :: " ++ prettyPrintType s
return (n,s)
nenv <- sequence [ f (getDeclName d) t | (d,_) <- res | t <- scs' ]
addToCollectedEnv (Map.fromList nenv)
return (fsts res, Map.fromList nenv)
tcRhs :: HsRhs -> Sigma -> Tc HsRhs
tcRhs rhs typ = case rhs of
HsUnGuardedRhs e -> do
e' <- tcExpr e typ
return (HsUnGuardedRhs e')
HsGuardedRhss as -> do
gas <- mapM (tcGuardedRhs typ) as
return (HsGuardedRhss gas)
tcMiscDecl d = withContext (locMsg (srcLoc d) "in the declaration" "") $ f d where
f spec@HsPragmaSpecialize { hsDeclSrcLoc = sloc, hsDeclName = n, hsDeclType = t } = do
withContext (locMsg sloc "in the SPECIALIZE pragma" $ show n) ans where
ans = do
kt <- getKindEnv
t <- hsTypeToType kt t
let nn = toName Val n
sc <- lookupName nn
listenPreds $ sc `subsumes` t
addRule RuleSpec { ruleUniq = hsDeclUniq spec, ruleName = nn, ruleType = t, ruleSuper = hsDeclBool spec }
return [spec]
f HsInstDecl { .. } = do
tcClassHead hsDeclClassHead
ch <- getClassHierarchy
let as = asksClassRecord ch (hsClassHead hsDeclClassHead) classAssumps
forM_ hsDeclDecls $ \d -> do
case maybeGetDeclName d of
Just n -> when (n `notElem` fsts as) $ do
addWarn InvalidDecl $ printf "Cannot declare '%s' in instance because it is not a method of class '%s'" (show n) (show $ hsClassHead hsDeclClassHead)
Nothing -> return ()
return []
f i@HsDeclDeriving {} = tcClassHead (hsDeclClassHead i)
f (HsPragmaRules rs) = do
rs' <- mapM tcRule rs
return [HsPragmaRules rs']
f fd@(HsForeignDecl _ _ n qt) = do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
addToCollectedEnv (Map.singleton (toName Val n) s)
return []
f fd@(HsForeignExport _ e n qt) = do
kt <- getKindEnv
s <- hsQualTypeToSigma kt qt
addToCollectedEnv (Map.singleton (ffiExportName e) s)
return []
f _ = return []
tcClassHead cHead@HsClassHead { .. } = do
ch <- getClassHierarchy
ke <- getKindEnv
let supers = asksClassRecord ch hsClassHead classSupers
(ctx,(_,[a])) = chToClassHead ke cHead
assertEntailment ctx [ IsIn s a | s <- supers]
return []
tcRule prule@HsRule { hsRuleUniq = uniq, hsRuleFreeVars = vs, hsRuleLeftExpr = e1, hsRuleRightExpr = e2, hsRuleSrcLoc = sloc } =
withContext (locMsg sloc "in the RULES pragma" $ hsRuleString prule) ans where
ans = do
vs' <- mapM dv vs
tr <- newBox kindStar
let (vs,envs) = unzip vs'
ch <- getClassHierarchy
((e1,rs1),(e2,rs2)) <- localEnv (mconcat envs) $ do
(e1,ps1) <- listenPreds (tcExpr e1 tr)
(e2,ps2) <- listenPreds (tcExpr e2 tr)
([],rs1) <- splitPreds ch Set.empty ps1
([],rs2) <- splitPreds ch Set.empty ps2
return ((e1,rs1),(e2,rs2))
mapM_ unBox vs
vs <- flattenType vs
tr <- flattenType tr
let mvs = Set.toList $ Set.unions $ map freeMetaVars (tr:vs)
nvs <- mapM (newVar . metaKind) mvs
sequence_ [ varBind mv (TVar v) | v <- nvs | mv <- mvs ]
(rs1,rs2) <- flattenType (rs1,rs2)
ch <- getClassHierarchy
rs1 <- return $ simplify ch rs1
rs2 <- return $ simplify ch rs2
assertEntailment rs1 rs2
return prule { hsRuleLeftExpr = e1, hsRuleRightExpr = e2 }
dv (n,Nothing) = do
v <- newMetaVar Tau kindStar
let env = (Map.singleton (toName Val n) v)
addToCollectedEnv env
return (v,env)
dv (n,Just t) = do
kt <- getKindEnv
tt <- hsTypeToType kt t
let env = (Map.singleton (toName Val n) tt)
addToCollectedEnv env
return (tt,env)
tcDecl :: HsDecl -> Sigma -> Tc (HsDecl,TypeEnv)
tcDecl decl@(HsActionDecl srcLoc pat@(HsPVar v) exp) typ = withContext (declDiagnostic decl) $ do
typ <- evalType typ
(pat',env) <- tcPat pat typ
let tio = TCon (Tycon tc_IO (Kfun kindStar kindStar))
e' <- tcExpr exp (TAp tio typ)
return (decl { hsDeclPat = pat', hsDeclExp = e' }, Map.singleton (toName Val v) typ)
tcDecl decl@(HsPatBind sloc (HsPVar v) rhs wheres) typ = withContext (declDiagnostic decl) $ do
typ <- evalType typ
mainFunc <- nameOfMainFunc
when ( v == mainFunc ) $ do
tMain <- typeOfMainFunc
typ `subsumes` tMain
return ()
(wheres', env) <- tcWheres wheres
localEnv env $ do
case rhs of
HsUnGuardedRhs e -> do
e' <- tcExpr e typ
return (HsPatBind sloc (HsPVar v) (HsUnGuardedRhs e') wheres', Map.singleton (toName Val v) typ)
HsGuardedRhss as -> do
gas <- mapM (tcGuardedRhs typ) as
return (HsPatBind sloc (HsPVar v) (HsGuardedRhss gas) wheres', Map.singleton (toName Val v) typ)
tcDecl decl@(HsFunBind matches) typ = withContext (declDiagnostic decl) $ do
typ <- evalType typ
matches' <- mapM (`tcMatch` typ) matches
return (HsFunBind matches', Map.singleton (getDeclName decl) typ)
tcDecl _ _ = error "Main.tcDecl: bad."
tcMatch :: HsMatch -> Sigma -> Tc HsMatch
tcMatch (HsMatch sloc funName pats rhs wheres) typ = withContext (locMsg sloc "in" $ show funName) $ do
withMetaVars mv [kindArg,kindFunRet] (\ [a,b] -> a `fn` b) $ \ [a,b] -> lam (p:ps) (a `fn` b) rs
(p',env) <- tcPat p s1'
localEnv env $ do
s2' <- evalType s2'
lamPoly ps s2' (p':rs)
lam [] typ rs = do
(wheres', env) <- tcWheres wheres
rhs <- localEnv env $ tcRhs rhs typ
return (HsMatch sloc funName (reverse rs) rhs wheres')
lam _ t _ = do
t <- flattenType t
fail $ "expected a -> b, found: " ++ prettyPrintType t
lamPoly ps s@TMetaVar {} rs = lam ps s rs
lamPoly ps s rs = do
(_,_,s) <- skolomize s
lam ps s rs
typ <- evalType typ
res <- lam pats typ []
return res
typeOfMainFunc :: Tc Type
typeOfMainFunc = do
a <- newMetaVar Tau kindStar
return $ tAp (TCon (Tycon tc_IO (Kfun kindStar kindStar))) a
nameOfMainFunc :: Tc Name
nameOfMainFunc = fmap (parseName Val . maybe "Main.main" snd . optMainFunc) getOptions
declDiagnostic :: (HsDecl) -> Diagnostic
declDiagnostic decl@(HsPatBind sloc (HsPVar {}) _ _) = locMsg sloc "in the declaration" $ render $ ppHsDecl decl
declDiagnostic decl@(HsPatBind sloc pat _ _) = locMsg sloc "in the pattern binding" $ render $ ppHsDecl decl
declDiagnostic decl@(HsFunBind matches) = locMsg (srcLoc decl) "in the function binding" $ render $ ppHsDecl decl
declDiagnostic _ = error "Main.declDiagnostic: bad."
tiExpl :: Expl -> Tc (HsDecl,TypeEnv)
tiExpl (sc, decl@HsForeignDecl {}) = do return (decl,Map.empty)
tiExpl (sc, decl@HsForeignExport {}) = do return (decl,Map.empty)
tiExpl (sc, decl) = withContext (locSimple (srcLoc decl) ("in the explicitly typed " ++ (render $ ppHsDecl decl))) $ do
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "** typing expl: " ++ show (getDeclName decl) ++ " " ++ prettyPrintType sc
sc <- evalFullType sc
(vs,qs,typ) <- skolomize sc
let sc' = (tForAll vs (qs :=> typ))
mp = (Map.singleton (getDeclName decl) sc')
addCoerce (getDeclName decl) (ctAbs vs)
addToCollectedEnv mp
(ret,ps) <- localEnv mp $ listenPreds (tcDecl decl typ)
ps <- flattenType ps
ch <- getClassHierarchy
env <- freeMetaVarsEnv
(_,ds,rs) <- splitReduce env (freeMetaVarsPreds qs) ps
printRule $ "endtiExpl: " <+> show env <+> show ps <+> show qs <+> show ds <+> show rs
addPreds ds
assertEntailment qs rs
return ret
restricted :: Bool -> [HsDecl] -> Bool
restricted monomorphismRestriction bs = any isHsActionDecl bs || (monomorphismRestriction && any isHsPatBind bs)
getBindGroupName ( expl , impls ) = map getDeclName ( snds expl + + concat ( rights impls ) + + lefts impls )
tiProgram :: [BindGroup] -> [HsDecl] -> Tc [HsDecl]
tiProgram bgs es = ans where
ans = do
let (pr,is) = progressStep (progressNew (length bgs + 1) 45) '.'
wdump FD.Progress $ liftIO $ do hPutStr stderr ("(" ++ is)
(r,ps) <- listenPreds $ f pr bgs []
ps <- flattenType ps
( [ ] , rs ) < - splitPreds ch Set.empty ps
(_,[],rs) <- splitReduce Set.empty Set.empty ps
topDefaults rs
return r
f pr (bg:bgs) rs = do
(ds,env) <- (tcBindGroup bg)
let (pr',os) = progressStep pr '.'
wdump FD.Progress $ liftIO $ do hPutStr stderr os
localEnv env $ f pr' bgs (ds ++ rs)
f _ [] rs = do
ch <- getClassHierarchy
pdecls <- mapM tcMiscDecl es
wdump FD.Progress $ liftIO $ do hPutStr stderr ")\n"
return (rs ++ concat pdecls)
tiLit :: HsLiteral -> Tc Tau
tiLit (HsChar _) = return tChar
tiLit (HsCharPrim _) = return tCharzh
tiLit (HsInt _) = do
v <- newVar kindStar
return $ TForAll [v] ([IsIn class_Num (TVar v)] :=> TVar v)
addPreds [ IsIn class_Num v ]
tiLit (HsFrac _) = do
v <- newVar kindStar
return $ TForAll [v] ([IsIn class_Fractional (TVar v)] :=> TVar v)
addPreds [ IsIn class_Fractional v ]
tiLit (HsStringPrim _) = return (TCon (Tycon tc_BitsPtr kindHash))
tiLit (HsString _) = return tString
tiLit _ = error "Main.tiLit: bad."
create a Program structure from a list of decls and
getFunDeclsBg :: TypeEnv -> [HsDecl] -> [BindGroup]
getFunDeclsBg sigEnv decls = makeProgram sigEnv equationGroups where
equationGroups :: [[HsDecl]]
equationGroups = getBindGroups bindDecls getDeclName getDeclDeps
bindDecls = collectBindDecls decls
getBindGroups :: Ord name =>
getBindGroups ns fn fd = map f $ stronglyConnComp [ (n, fn n, fd n) | n <- ns] where
f (AcyclicSCC x) = [x]
f (CyclicSCC xs) = xs
makeProgram :: TypeEnv -> [[HsDecl]] -> [BindGroup]
makeProgram sigEnv groups = map (makeBindGroup sigEnv ) groups
| reunite decls with their signatures , if ever they had one
makeBindGroup :: TypeEnv -> [HsDecl] -> BindGroup
makeBindGroup sigEnv decls = (exps, f impls) where
(exps, impls) = makeBindGroup' sigEnv decls
enames = map (getDeclName . snd) exps
f xs = map g $ stronglyConnComp [ (x, getDeclName x,[ d | d <- getDeclDeps x, d `notElem` enames]) | x <- xs]
g (AcyclicSCC x) = Left x
g (CyclicSCC xs) = Right xs
makeBindGroup' _ [] = ([], [])
makeBindGroup' sigEnv (d:ds) = case Map.lookup funName sigEnv of
Nothing -> (restExpls, d:restImpls)
Just scheme -> ((scheme, d):restExpls, restImpls)
where
funName = getDeclName d
(restExpls, restImpls) = makeBindGroup' sigEnv ds
collectBindDecls :: [HsDecl] -> [HsDecl]
collectBindDecls = filter isBindDecl where
isBindDecl :: HsDecl -> Bool
isBindDecl HsActionDecl {} = True
isBindDecl HsPatBind {} = True
isBindDecl HsFunBind {} = True
isBindDecl _ = False
|
7d345d622835651605279407c248727e6152e81c27eb0fdfbedc0dda3b9f18a4 | alesaccoia/festival_flinger | darpa_phones.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;
Centre for Speech Technology Research ; ;
University of Edinburgh , UK ; ;
Copyright ( c ) 1999 ; ;
All Rights Reserved . ; ;
;;; ;;
;;; Permission is hereby granted, free of charge, to use and distribute ;;
;;; this software and its documentation without restriction, including ;;
;;; without limitation the rights to use, copy, modify, merge, publish, ;;
;;; distribute, sublicense, and/or sell copies of this work, and to ;;
;;; permit persons to whom this work is furnished to do so, subject to ;;
;;; the following conditions: ;;
;;; 1. The code must retain the above copyright notice, this list of ;;
;;; conditions and the following disclaimer. ;;
;;; 2. Any modifications must be clearly marked as such. ;;
3 . Original authors ' names are not deleted . ; ;
;;; 4. The authors' names are not used to endorse or promote products ;;
;;; derived from this software without specific prior written ;;
;;; permission. ;;
;;; ;;
;;; THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK ;;
;;; DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;
;;; ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;
;;; SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE ;;
;;; FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , IN ; ;
;;; AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;
;;; ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;
;;; THIS SOFTWARE. ;;
;;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Author :
Date : April 1999
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; (yet another) darpa definition
;;;
(require 'phoneset)
(set! darpa_fs (cadr
(defPhoneSet
darpa
(Features
(vowel (syllabic + -)
(length long short diphthong schwa)
(height high mid low)
(front front mid back)
(round + -))
(consonant
(syllabic + -)
(manner stop affricate fricative approximant nasal)
(place alveolar dental labial palatal velar)
(voicing + -))
(silence
(syllabic -)))
(Phones
;; type syl length height front round
(aa vowel + long low back -)
(ae vowel + short low front -)
(ah vowel + short mid mid -)
(ao vowel + long low front +)
(aw vowel + diphthong low mid -)
(ax vowel + schwa mid mid -)
(axr vowel + schwa mid mid -)
(ay vowel + diphthong low mid -)
(eh vowel + short mid front -)
(ey vowel + diphthong mid front -)
(ih vowel + short high front -)
(iy vowel + long high front -)
(ow vowel + diphthong mid back +)
(oy vowel + diphthong mid back +)
(uh vowel + short high back +)
(uw vowel + long high back +)
;; type syl manner place voicing
(b consonant - stop labial +)
(ch consonant - affricate alveolar -)
(d consonant - stop alveolar +)
(dh consonant - fricative dental +)
(dx consonant - stop alveolar +)
(el consonant + approximant alveolar +)
(em consonant + nasal labial +)
(en consonant + stop alveolar +)
(er consonant + approximant alveolar +)
(f consonant - fricative labial -)
(g consonant - stop velar +)
(hh consonant - fricative velar -)
(jh consonant - affricate alveolar +)
(k consonant - stop velar -)
(l consonant - approximant alveolar +)
(m consonant - nasal labial +)
(n consonant - nasal alveolar +)
(nx consonant - nasal alveolar +)
(ng consonant - nasal velar +)
(p consonant - stop labial -)
(r consonant - approximant alveolar +)
(s consonant - fricative alveolar -)
(sh consonant - fricative palatal -)
(t consonant - stop alveolar -)
(th consonant - fricative dental -)
(v consonant - fricative labial +)
(w consonant - approximant velar +)
(y consonant - approximant palatal +)
(z consonant - fricative alveolar +)
(zh consonant - fricative palatal +)
(pau silence -)
; (sil silence -)
))))
(provide 'darpa_phones)
| null | https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/darpa_phones.scm | scheme |
;;
;
;
;
;
;;
Permission is hereby granted, free of charge, to use and distribute ;;
this software and its documentation without restriction, including ;;
without limitation the rights to use, copy, modify, merge, publish, ;;
distribute, sublicense, and/or sell copies of this work, and to ;;
permit persons to whom this work is furnished to do so, subject to ;;
the following conditions: ;;
1. The code must retain the above copyright notice, this list of ;;
conditions and the following disclaimer. ;;
2. Any modifications must be clearly marked as such. ;;
;
4. The authors' names are not used to endorse or promote products ;;
derived from this software without specific prior written ;;
permission. ;;
;;
THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK ;;
DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;
SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE ;;
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;
;
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;
THIS SOFTWARE. ;;
;;
(yet another) darpa definition
type syl length height front round
type syl manner place voicing
(sil silence -) | Author :
Date : April 1999
(require 'phoneset)
(set! darpa_fs (cadr
(defPhoneSet
darpa
(Features
(vowel (syllabic + -)
(length long short diphthong schwa)
(height high mid low)
(front front mid back)
(round + -))
(consonant
(syllabic + -)
(manner stop affricate fricative approximant nasal)
(place alveolar dental labial palatal velar)
(voicing + -))
(silence
(syllabic -)))
(Phones
(aa vowel + long low back -)
(ae vowel + short low front -)
(ah vowel + short mid mid -)
(ao vowel + long low front +)
(aw vowel + diphthong low mid -)
(ax vowel + schwa mid mid -)
(axr vowel + schwa mid mid -)
(ay vowel + diphthong low mid -)
(eh vowel + short mid front -)
(ey vowel + diphthong mid front -)
(ih vowel + short high front -)
(iy vowel + long high front -)
(ow vowel + diphthong mid back +)
(oy vowel + diphthong mid back +)
(uh vowel + short high back +)
(uw vowel + long high back +)
(b consonant - stop labial +)
(ch consonant - affricate alveolar -)
(d consonant - stop alveolar +)
(dh consonant - fricative dental +)
(dx consonant - stop alveolar +)
(el consonant + approximant alveolar +)
(em consonant + nasal labial +)
(en consonant + stop alveolar +)
(er consonant + approximant alveolar +)
(f consonant - fricative labial -)
(g consonant - stop velar +)
(hh consonant - fricative velar -)
(jh consonant - affricate alveolar +)
(k consonant - stop velar -)
(l consonant - approximant alveolar +)
(m consonant - nasal labial +)
(n consonant - nasal alveolar +)
(nx consonant - nasal alveolar +)
(ng consonant - nasal velar +)
(p consonant - stop labial -)
(r consonant - approximant alveolar +)
(s consonant - fricative alveolar -)
(sh consonant - fricative palatal -)
(t consonant - stop alveolar -)
(th consonant - fricative dental -)
(v consonant - fricative labial +)
(w consonant - approximant velar +)
(y consonant - approximant palatal +)
(z consonant - fricative alveolar +)
(zh consonant - fricative palatal +)
(pau silence -)
))))
(provide 'darpa_phones)
|
b5b0c67d37c75cc83ce30eece2b61b6c3ef262d963968c6bb44bbdd13fcb5207 | arachne-framework/factui | basic_rules.cljc | (ns factui.bench.basic-rules
(:require
#?(:cljs [factui.api :as api :include-macros true]
:clj [factui.api :as api])
#?(:clj [clara.rules :as cr]
:cljs [clara.rules :as cr :include-macros true])
[clojure.pprint :refer [pprint]]
#?(:clj [clojure.test :as t :refer [deftest is testing run-tests]]
:cljs [cljs.test :as t :refer-macros [deftest is testing run-tests]])
#?(:cljs [factui.facts :as f :refer [Datom]]
:clj [factui.facts :as f]))
#?(:clj (:import [factui.facts Datom])))
(cr/defrule person-book-topics
"If a person has read a book on a topic, they must like this topic (why not)"
[:person/has-read [{:keys [e v]}] (= e ?p) (= v ?b)]
[:book/topic [{:keys [e v]}] (= e ?b) (= v ?topic)]
=>
(api/transact! [{:db/id ?p
:person/likes ?topic}]))
(cr/defrule people-make-friends-from-books
"If two people have read the same book, they must be friends (wat)"
[:person/has-read [{:keys [e v]}] (= e ?p1) (= v ?b)]
[:person/has-read [{:keys [e v]}] (= e ?p2) (= v ?b)]
[:test (not= ?p1 ?p2)]
=>
(api/transact! [{:db/id ?p1
:person/friends ?p2}]))
(cr/defrule friends-are-always-mutual
"Friends are mutual"
[:person/friends [{:keys [e v]}] (= e ?p1) (= v ?p2)]
[:not [:person/friends [{:keys [e v]}] (= e ?p2) (= v ?p1)]]
=>
(api/transact! [{:db/id ?p2
:person/friends ?p1}]))
(cr/defquery person-by-id
[:?pid]
[:person/id [{:keys [e v]}] (= e ?p) (= v ?pid)]
[:person/name [{:keys [e v]}] (= e ?p) (= v ?name)]
[:person/age [{:keys [e v]}] (= e ?p) (= v ?age)]
[:person/friends [{:keys [e v]}] (= e ?p) (= v ?friends)]
[:person/likes [{:keys [e v]}] (= e ?p) (= v ?likes)]
[:person/reading [{:keys [e v]}] (= e ?p) (= v ?reading)]
[:person/has-read [{:keys [e v]}] (= e ?p) (= v ?has-read)])
(cr/defquery book-by-id
[:?pid]
[:book/id [{:keys [e v]}] (= e ?p) (= v ?pid)]
[:book/title [{:keys [e v]}] (= e ?p) (= v ?title)]
[:book/topic [{:keys [e v]}] (= e ?p) (= v ?topic)])
(cr/defquery person-friends
[]
[:person/friends [{:keys [e v]}] (= e ?p) (= v ?p2)])
(cr/defquery dum-datoms
[]
[?d <- Datom])
| null | https://raw.githubusercontent.com/arachne-framework/factui/818ea79d7f84dfe80ad23ade0b6b2ed5bb1c6287/test/factui/bench/basic_rules.cljc | clojure | (ns factui.bench.basic-rules
(:require
#?(:cljs [factui.api :as api :include-macros true]
:clj [factui.api :as api])
#?(:clj [clara.rules :as cr]
:cljs [clara.rules :as cr :include-macros true])
[clojure.pprint :refer [pprint]]
#?(:clj [clojure.test :as t :refer [deftest is testing run-tests]]
:cljs [cljs.test :as t :refer-macros [deftest is testing run-tests]])
#?(:cljs [factui.facts :as f :refer [Datom]]
:clj [factui.facts :as f]))
#?(:clj (:import [factui.facts Datom])))
(cr/defrule person-book-topics
"If a person has read a book on a topic, they must like this topic (why not)"
[:person/has-read [{:keys [e v]}] (= e ?p) (= v ?b)]
[:book/topic [{:keys [e v]}] (= e ?b) (= v ?topic)]
=>
(api/transact! [{:db/id ?p
:person/likes ?topic}]))
(cr/defrule people-make-friends-from-books
"If two people have read the same book, they must be friends (wat)"
[:person/has-read [{:keys [e v]}] (= e ?p1) (= v ?b)]
[:person/has-read [{:keys [e v]}] (= e ?p2) (= v ?b)]
[:test (not= ?p1 ?p2)]
=>
(api/transact! [{:db/id ?p1
:person/friends ?p2}]))
(cr/defrule friends-are-always-mutual
"Friends are mutual"
[:person/friends [{:keys [e v]}] (= e ?p1) (= v ?p2)]
[:not [:person/friends [{:keys [e v]}] (= e ?p2) (= v ?p1)]]
=>
(api/transact! [{:db/id ?p2
:person/friends ?p1}]))
(cr/defquery person-by-id
[:?pid]
[:person/id [{:keys [e v]}] (= e ?p) (= v ?pid)]
[:person/name [{:keys [e v]}] (= e ?p) (= v ?name)]
[:person/age [{:keys [e v]}] (= e ?p) (= v ?age)]
[:person/friends [{:keys [e v]}] (= e ?p) (= v ?friends)]
[:person/likes [{:keys [e v]}] (= e ?p) (= v ?likes)]
[:person/reading [{:keys [e v]}] (= e ?p) (= v ?reading)]
[:person/has-read [{:keys [e v]}] (= e ?p) (= v ?has-read)])
(cr/defquery book-by-id
[:?pid]
[:book/id [{:keys [e v]}] (= e ?p) (= v ?pid)]
[:book/title [{:keys [e v]}] (= e ?p) (= v ?title)]
[:book/topic [{:keys [e v]}] (= e ?p) (= v ?topic)])
(cr/defquery person-friends
[]
[:person/friends [{:keys [e v]}] (= e ?p) (= v ?p2)])
(cr/defquery dum-datoms
[]
[?d <- Datom])
|
|
8569a13dd57d88dc37e7671f0d2c3a0d31accd37d9a74fbbc08ab16680a4442d | janestreet/core_kernel | total_map.ml | include Total_map_intf
module Stable = struct
open Core.Core_stable
module V1 = struct
type ('key, 'a, 'cmp, 'enum) t = ('key, 'a, 'cmp) Map.V1.t
module type S =
Stable_V1_S with type ('key, 'a, 'cmp, 'enum) total_map := ('key, 'a, 'cmp, 'enum) t
module type For_include_functor =
Stable_V1_For_include_functor
with type ('key, 'a, 'cmp, 'enum) Total_map.total_map := ('key, 'a, 'cmp, 'enum) t
module Make_with_witnesses (Key : Key_with_witnesses) = struct
module Key = struct
include Key
include Comparable.V1.Make (Key)
end
type comparator_witness = Key.comparator_witness
type enumeration_witness = Key.enumeration_witness
type nonrec 'a t = 'a Key.Map.t [@@deriving bin_io, sexp, compare]
end
module Make_for_include_functor_with_witnesses (Key : Key_with_witnesses) = struct
module Total_map = Make_with_witnesses (Key)
end
end
end
open! Core
open! Import
module Enumeration = Enumeration
type ('key, 'a, 'cmp, 'enum) t = ('key, 'a, 'cmp, 'enum) Stable.V1.t
module type S_plain =
S_plain with type ('key, 'a, 'cmp, 'enum) total_map := ('key, 'a, 'cmp, 'enum) t
module type For_include_functor_plain =
For_include_functor_plain
with type ('key, 'a, 'cmp, 'enum) Total_map.total_map := ('key, 'a, 'cmp, 'enum) t
module type S = S with type ('key, 'a, 'cmp, 'enum) total_map := ('key, 'a, 'cmp, 'enum) t
module type For_include_functor =
For_include_functor
with type ('key, 'a, 'cmp, 'enum) Total_map.total_map := ('key, 'a, 'cmp, 'enum) t
let to_map t = t
let key_not_in_enumeration t key =
failwiths
~here:[%here]
"Key was not provided in the enumeration given to [Total_map.Make]"
key
(Map.comparator t).sexp_of_t
;;
let change t k ~f =
Map.update t k ~f:(function
| Some x -> f x
| None -> key_not_in_enumeration t k)
;;
let find t k =
try Map.find_exn t k with
| _ -> key_not_in_enumeration t k
;;
let pair t1 t2 key = function
| `Left _ -> key_not_in_enumeration t2 key
| `Right _ -> key_not_in_enumeration t1 key
| `Both (v1, v2) -> v1, v2
;;
let iter2 t1 t2 ~f =
Map.iter2 t1 t2 ~f:(fun ~key ~data ->
let v1, v2 = pair t1 t2 key data in
f ~key v1 v2)
;;
let fold2 t1 t2 ~init ~f =
Map.fold2 t1 t2 ~init ~f:(fun ~key ~data acc ->
let v1, v2 = pair t1 t2 key data in
f ~key v1 v2 acc)
;;
let map2 t1 t2 ~f =
Map.merge t1 t2 ~f:(fun ~key v ->
let v1, v2 = pair t1 t2 key v in
Some (f v1 v2))
;;
let set t key data = Map.set t ~key ~data
module Sequence3 (A : Applicative.S3) = struct
let sequence t =
List.fold
(Map.to_alist t)
~init:(A.return (Map.Using_comparator.empty ~comparator:(Map.comparator t)))
~f:(fun acc (key, data) ->
A.map2 acc data ~f:(fun acc data -> Map.set acc ~key ~data))
;;
end
module Sequence2 (A : Applicative.S2) = Sequence3 (Applicative.S2_to_S3 (A))
module Sequence (A : Applicative) = Sequence2 (Applicative.S_to_S2 (A))
include struct
open Map
let data = data
let for_all = for_all
let for_alli = for_alli
let iter = iter
let iter_keys = iter_keys
let iteri = iteri
let map = map
let mapi = mapi
let fold = fold
let fold_right = fold_right
let to_alist = to_alist
end
module Make_plain_with_witnesses (Key : Key_plain_with_witnesses) = struct
module Key = struct
include Key
include Comparable.Make_plain_using_comparator (Key)
end
type comparator_witness = Key.comparator_witness
type enumeration_witness = Key.enumeration_witness
type 'a t = 'a Key.Map.t [@@deriving sexp_of, compare, equal]
let create f =
List.fold Key.all ~init:Key.Map.empty ~f:(fun t key -> Map.set t ~key ~data:(f key))
;;
let create_const x = create (fun _ -> x)
include Applicative.Make (struct
type nonrec 'a t = 'a t
let return = create_const
let apply t1 t2 = map2 t1 t2 ~f:(fun f x -> f x)
let map = `Custom map
end)
end
module Make_for_include_functor_plain_with_witnesses (Key : Key_plain_with_witnesses) =
struct
module Total_map = Make_plain_with_witnesses (Key)
end
module Make_with_witnesses (Key : Key_with_witnesses) = struct
module Key = struct
include Key
include Comparable.Make_binable_using_comparator (Key)
end
type 'a t = 'a Key.Map.t [@@deriving sexp, bin_io, compare, equal]
include (
Make_plain_with_witnesses
(Key) :
module type of Make_plain_with_witnesses (Key)
with module Key := Key
with type 'a t := 'a t)
let all_set = Key.Set.of_list Key.all
let validate_map_from_serialization map =
let keys = Map.key_set map in
let keys_minus_all = Set.diff keys all_set in
let all_minus_keys = Set.diff all_set keys in
Validate.maybe_raise
(Validate.of_list
[ (if Set.is_empty keys_minus_all
then Validate.pass
else
Validate.fails
"map from serialization has keys not provided in the enumeration"
keys_minus_all
[%sexp_of: Key.Set.t])
; (if Set.is_empty all_minus_keys
then Validate.pass
else
Validate.fails
"map from serialization doesn't have keys it should have"
all_minus_keys
[%sexp_of: Key.Set.t])
])
;;
let t_of_sexp a_of_sexp sexp =
let t = t_of_sexp a_of_sexp sexp in
validate_map_from_serialization t;
t
;;
include Bin_prot.Utils.Make_binable1_without_uuid [@alert "-legacy"] (struct
type nonrec 'a t = 'a t
module Binable = Key.Map
let to_binable x = x
let of_binable x =
validate_map_from_serialization x;
x
;;
end)
end
module Make_for_include_functor_with_witnesses (Key : Key_with_witnesses) = struct
module Total_map = Make_with_witnesses (Key)
end
module Make_plain (Key : Key_plain) = Make_plain_with_witnesses (struct
include Key
include Comparable.Make_plain (Key)
include Enumeration.Make (Key)
end)
module Make_for_include_functor_plain (Key : Key_plain) = struct
module Total_map = Make_plain (Key)
end
module Make (Key : Key) = Make_with_witnesses (struct
include Key
include Comparable.Make_binable (Key)
include Enumeration.Make (Key)
end)
module Make_for_include_functor (Key : Key) = struct
module Total_map = Make (Key)
end
| null | https://raw.githubusercontent.com/janestreet/core_kernel/597299d11c2ee99f219592d89a5890f8f0b6dfe7/total_map/src/total_map.ml | ocaml | include Total_map_intf
module Stable = struct
open Core.Core_stable
module V1 = struct
type ('key, 'a, 'cmp, 'enum) t = ('key, 'a, 'cmp) Map.V1.t
module type S =
Stable_V1_S with type ('key, 'a, 'cmp, 'enum) total_map := ('key, 'a, 'cmp, 'enum) t
module type For_include_functor =
Stable_V1_For_include_functor
with type ('key, 'a, 'cmp, 'enum) Total_map.total_map := ('key, 'a, 'cmp, 'enum) t
module Make_with_witnesses (Key : Key_with_witnesses) = struct
module Key = struct
include Key
include Comparable.V1.Make (Key)
end
type comparator_witness = Key.comparator_witness
type enumeration_witness = Key.enumeration_witness
type nonrec 'a t = 'a Key.Map.t [@@deriving bin_io, sexp, compare]
end
module Make_for_include_functor_with_witnesses (Key : Key_with_witnesses) = struct
module Total_map = Make_with_witnesses (Key)
end
end
end
open! Core
open! Import
module Enumeration = Enumeration
type ('key, 'a, 'cmp, 'enum) t = ('key, 'a, 'cmp, 'enum) Stable.V1.t
module type S_plain =
S_plain with type ('key, 'a, 'cmp, 'enum) total_map := ('key, 'a, 'cmp, 'enum) t
module type For_include_functor_plain =
For_include_functor_plain
with type ('key, 'a, 'cmp, 'enum) Total_map.total_map := ('key, 'a, 'cmp, 'enum) t
module type S = S with type ('key, 'a, 'cmp, 'enum) total_map := ('key, 'a, 'cmp, 'enum) t
module type For_include_functor =
For_include_functor
with type ('key, 'a, 'cmp, 'enum) Total_map.total_map := ('key, 'a, 'cmp, 'enum) t
let to_map t = t
let key_not_in_enumeration t key =
failwiths
~here:[%here]
"Key was not provided in the enumeration given to [Total_map.Make]"
key
(Map.comparator t).sexp_of_t
;;
let change t k ~f =
Map.update t k ~f:(function
| Some x -> f x
| None -> key_not_in_enumeration t k)
;;
let find t k =
try Map.find_exn t k with
| _ -> key_not_in_enumeration t k
;;
let pair t1 t2 key = function
| `Left _ -> key_not_in_enumeration t2 key
| `Right _ -> key_not_in_enumeration t1 key
| `Both (v1, v2) -> v1, v2
;;
let iter2 t1 t2 ~f =
Map.iter2 t1 t2 ~f:(fun ~key ~data ->
let v1, v2 = pair t1 t2 key data in
f ~key v1 v2)
;;
let fold2 t1 t2 ~init ~f =
Map.fold2 t1 t2 ~init ~f:(fun ~key ~data acc ->
let v1, v2 = pair t1 t2 key data in
f ~key v1 v2 acc)
;;
let map2 t1 t2 ~f =
Map.merge t1 t2 ~f:(fun ~key v ->
let v1, v2 = pair t1 t2 key v in
Some (f v1 v2))
;;
let set t key data = Map.set t ~key ~data
module Sequence3 (A : Applicative.S3) = struct
let sequence t =
List.fold
(Map.to_alist t)
~init:(A.return (Map.Using_comparator.empty ~comparator:(Map.comparator t)))
~f:(fun acc (key, data) ->
A.map2 acc data ~f:(fun acc data -> Map.set acc ~key ~data))
;;
end
module Sequence2 (A : Applicative.S2) = Sequence3 (Applicative.S2_to_S3 (A))
module Sequence (A : Applicative) = Sequence2 (Applicative.S_to_S2 (A))
include struct
open Map
let data = data
let for_all = for_all
let for_alli = for_alli
let iter = iter
let iter_keys = iter_keys
let iteri = iteri
let map = map
let mapi = mapi
let fold = fold
let fold_right = fold_right
let to_alist = to_alist
end
module Make_plain_with_witnesses (Key : Key_plain_with_witnesses) = struct
module Key = struct
include Key
include Comparable.Make_plain_using_comparator (Key)
end
type comparator_witness = Key.comparator_witness
type enumeration_witness = Key.enumeration_witness
type 'a t = 'a Key.Map.t [@@deriving sexp_of, compare, equal]
let create f =
List.fold Key.all ~init:Key.Map.empty ~f:(fun t key -> Map.set t ~key ~data:(f key))
;;
let create_const x = create (fun _ -> x)
include Applicative.Make (struct
type nonrec 'a t = 'a t
let return = create_const
let apply t1 t2 = map2 t1 t2 ~f:(fun f x -> f x)
let map = `Custom map
end)
end
module Make_for_include_functor_plain_with_witnesses (Key : Key_plain_with_witnesses) =
struct
module Total_map = Make_plain_with_witnesses (Key)
end
module Make_with_witnesses (Key : Key_with_witnesses) = struct
module Key = struct
include Key
include Comparable.Make_binable_using_comparator (Key)
end
type 'a t = 'a Key.Map.t [@@deriving sexp, bin_io, compare, equal]
include (
Make_plain_with_witnesses
(Key) :
module type of Make_plain_with_witnesses (Key)
with module Key := Key
with type 'a t := 'a t)
let all_set = Key.Set.of_list Key.all
let validate_map_from_serialization map =
let keys = Map.key_set map in
let keys_minus_all = Set.diff keys all_set in
let all_minus_keys = Set.diff all_set keys in
Validate.maybe_raise
(Validate.of_list
[ (if Set.is_empty keys_minus_all
then Validate.pass
else
Validate.fails
"map from serialization has keys not provided in the enumeration"
keys_minus_all
[%sexp_of: Key.Set.t])
; (if Set.is_empty all_minus_keys
then Validate.pass
else
Validate.fails
"map from serialization doesn't have keys it should have"
all_minus_keys
[%sexp_of: Key.Set.t])
])
;;
let t_of_sexp a_of_sexp sexp =
let t = t_of_sexp a_of_sexp sexp in
validate_map_from_serialization t;
t
;;
include Bin_prot.Utils.Make_binable1_without_uuid [@alert "-legacy"] (struct
type nonrec 'a t = 'a t
module Binable = Key.Map
let to_binable x = x
let of_binable x =
validate_map_from_serialization x;
x
;;
end)
end
module Make_for_include_functor_with_witnesses (Key : Key_with_witnesses) = struct
module Total_map = Make_with_witnesses (Key)
end
module Make_plain (Key : Key_plain) = Make_plain_with_witnesses (struct
include Key
include Comparable.Make_plain (Key)
include Enumeration.Make (Key)
end)
module Make_for_include_functor_plain (Key : Key_plain) = struct
module Total_map = Make_plain (Key)
end
module Make (Key : Key) = Make_with_witnesses (struct
include Key
include Comparable.Make_binable (Key)
include Enumeration.Make (Key)
end)
module Make_for_include_functor (Key : Key) = struct
module Total_map = Make (Key)
end
|
|
7a83a08ce70b5f6b5c5f733a247d9aab34b8887e183fff98ad25359f8476da0f | goblint/analyzer | topDown.ml | * Top down solver using box / warrow . This is superseded by td3 but kept as a simple version without term & space ( & incremental ) .
open Prelude
open Analyses
open Constraints
open Messages
module WP =
functor (S:EqConstrSys) ->
functor (HM:Hashtbl.S with type key = S.v) ->
struct
open SolverBox.Warrow (S.Dom)
include Generic.SolverStats (S) (HM)
module VS = Set.Make (S.Var)
module P =
struct
type t = S.Var.t * S.Var.t [@@deriving eq, hash]
end
module HPM = Hashtbl.Make (P)
let solve st vs =
let stable = HM.create 10 in
let infl = HM.create 10 in (* y -> xs *)
let set = HM.create 10 in (* y -> xs *)
let sidevs = HM.create 10 in (* side-effected variables *)
let called = HM.create 10 in
let rho = HM.create 10 in
let rho' = HPM.create 10 in (* x,y -> d *)
let wpoint = HM.create 10 in
let add_infl y x =
if tracing then trace "sol2" "add_infl %a %a\n" S.Var.pretty_trace y S.Var.pretty_trace x;
HM.replace infl y (VS.add x (try HM.find infl y with Not_found -> VS.empty))
in
let add_set x y d =
HM.replace set y (VS.add x (try HM.find set y with Not_found -> VS.empty));
HPM.add rho' (x,y) d;
HM.replace sidevs y ()
in
let is_side x = HM.mem set x in
let rec destabilize x =
if tracing then trace " sol2 " " destabilize % a\n " S.Var.pretty_trace x ;
let w = HM.find_default infl x VS.empty in
HM.replace infl x VS.empty;
VS.iter (fun y ->
HM.remove stable y;
if tracing then trace "sol2" "destabilize %a\n" S.Var.pretty_trace y;
if not (HM.mem called y) then destabilize y) w
and solve x =
if tracing then trace "sol2" "solve %a, called: %b, stable: %b\n" S.Var.pretty_trace x (HM.mem called x) (HM.mem stable x);
if not (HM.mem called x || HM.mem stable x) then (
HM.replace stable x ();
HM.replace called x ();
let wpx = HM.mem wpoint x in
init x;
let old = HM.find rho x in
let tmp' = eq x (eval x) (side x) in
let tmp = S.Dom.join tmp' (sides x) in
if tracing then trace "sol" "Var: %a\n" S.Var.pretty_trace x ;
if tracing then trace "sol" "Contrib:%a\n" S.Dom.pretty tmp;
let tmp = if is_side x then S.Dom.widen old (S.Dom.join old tmp) else if wpx then box old tmp else tmp in
HM.remove called x;
if not (S.Dom.equal old tmp) then (
if tracing then if is_side x then trace "sol2" "solve side: old = %a, tmp = %a, widen = %a\n" S.Dom.pretty old S.Dom.pretty tmp S.Dom.pretty (S.Dom.widen old (S.Dom.join old tmp));
update_var_event x old tmp;
if tracing then trace "sol" "New Value:%a\n\n" S.Dom.pretty tmp;
if tracing then trace "sol2" "new value for %a (wpx: %b, is_side: %b) is %a. Old value was %a\n" S.Var.pretty_trace x (HM.mem rho x) (is_side x) S.Dom.pretty tmp S.Dom.pretty old;
HM.replace rho x tmp;
destabilize x;
);
(solve[@tailcall]) x
)
and eq x get set =
if tracing then trace "sol2" "eq %a\n" S.Var.pretty_trace x;
eval_rhs_event x;
match S.system x with
| None -> S.Dom.bot ()
| Some f ->
let effects = ref Set.empty in
let sidef y d =
if not (Set.mem y !effects) then (
TODO needed ? tests also work without this ...
effects := Set.add y !effects
);
set y d
in
f get sidef
and eval x y =
if tracing then trace "sol2" "eval %a ## %a\n" S.Var.pretty_trace x S.Var.pretty_trace y;
get_var_event y;
if HM.mem called y || S.system y = None then HM.replace wpoint y ();
solve y;
add_infl y x;
HM.find rho y
and sides x =
let w = try HM.find set x with Not_found -> VS.empty in
let d = Enum.fold (fun d y -> let r = try S.Dom.join d (HPM.find rho' (y,x)) with Not_found -> d in if tracing then trace "sol2" "sides: side %a from %a: %a\n" S.Var.pretty_trace x S.Var.pretty_trace y S.Dom.pretty r; r) (S.Dom.bot ()) (VS.enum w) in
if tracing then trace "sol2" "sides %a ## %a\n" S.Var.pretty_trace x S.Dom.pretty d;
d
and side x y d =
if tracing then trace "sol2" "side %a ## %a (wpx: %b) ## %a\n" S.Var.pretty_trace x S.Var.pretty_trace y (HM.mem rho y) S.Dom.pretty d;
let old = try HPM.find rho' (x,y) with Not_found -> S.Dom.bot () in
if not (S.Dom.equal old d) then (
add_set x y (S.Dom.join old d);
HM.remove stable y;
solve y;
)
and init x =
if tracing then trace "sol2" "init %a\n" S.Var.pretty_trace x;
if not (HM.mem rho x) then (
new_var_event x;
HM.replace rho x (S.Dom.bot ())
)
in
let set_start (x,d) =
if tracing then trace "sol2" "set_start %a ## %a\n" S.Var.pretty_trace x S.Dom.pretty d;
init x;
add_set x x d;
solve x
in
start_event ();
List.iter set_start st;
List.iter init vs;
List.iter solve vs;
let keys h = HM.fold (fun k _ a -> k::a) h [] in
let n = ref 1 in
(* iterate until there are no more new side-effects *)
let rec solve_sidevs () =
let gs = keys sidevs in
HM.clear sidevs;
if gs <> [] then (
if tracing then trace "sol2" "Round %d: %d side-effected variables to solve\n" !n (List.length gs);
incr n;
List.iter solve gs;
List.iter solve vs;
solve_sidevs ()
)
in
solve_sidevs ();
stop_event ();
HM.clear stable;
HM.clear infl ;
HM.clear set ;
HPM.clear rho' ;
rho
end
let _ =
Selector.add_solver ("topdown", (module EqIncrSolverFromEqSolver (WP)));
| null | https://raw.githubusercontent.com/goblint/analyzer/b9f527f6d3066d548c5551716366fbed8acecf35/src/solvers/topDown.ml | ocaml | y -> xs
y -> xs
side-effected variables
x,y -> d
iterate until there are no more new side-effects | * Top down solver using box / warrow . This is superseded by td3 but kept as a simple version without term & space ( & incremental ) .
open Prelude
open Analyses
open Constraints
open Messages
module WP =
functor (S:EqConstrSys) ->
functor (HM:Hashtbl.S with type key = S.v) ->
struct
open SolverBox.Warrow (S.Dom)
include Generic.SolverStats (S) (HM)
module VS = Set.Make (S.Var)
module P =
struct
type t = S.Var.t * S.Var.t [@@deriving eq, hash]
end
module HPM = Hashtbl.Make (P)
let solve st vs =
let stable = HM.create 10 in
let called = HM.create 10 in
let rho = HM.create 10 in
let wpoint = HM.create 10 in
let add_infl y x =
if tracing then trace "sol2" "add_infl %a %a\n" S.Var.pretty_trace y S.Var.pretty_trace x;
HM.replace infl y (VS.add x (try HM.find infl y with Not_found -> VS.empty))
in
let add_set x y d =
HM.replace set y (VS.add x (try HM.find set y with Not_found -> VS.empty));
HPM.add rho' (x,y) d;
HM.replace sidevs y ()
in
let is_side x = HM.mem set x in
let rec destabilize x =
if tracing then trace " sol2 " " destabilize % a\n " S.Var.pretty_trace x ;
let w = HM.find_default infl x VS.empty in
HM.replace infl x VS.empty;
VS.iter (fun y ->
HM.remove stable y;
if tracing then trace "sol2" "destabilize %a\n" S.Var.pretty_trace y;
if not (HM.mem called y) then destabilize y) w
and solve x =
if tracing then trace "sol2" "solve %a, called: %b, stable: %b\n" S.Var.pretty_trace x (HM.mem called x) (HM.mem stable x);
if not (HM.mem called x || HM.mem stable x) then (
HM.replace stable x ();
HM.replace called x ();
let wpx = HM.mem wpoint x in
init x;
let old = HM.find rho x in
let tmp' = eq x (eval x) (side x) in
let tmp = S.Dom.join tmp' (sides x) in
if tracing then trace "sol" "Var: %a\n" S.Var.pretty_trace x ;
if tracing then trace "sol" "Contrib:%a\n" S.Dom.pretty tmp;
let tmp = if is_side x then S.Dom.widen old (S.Dom.join old tmp) else if wpx then box old tmp else tmp in
HM.remove called x;
if not (S.Dom.equal old tmp) then (
if tracing then if is_side x then trace "sol2" "solve side: old = %a, tmp = %a, widen = %a\n" S.Dom.pretty old S.Dom.pretty tmp S.Dom.pretty (S.Dom.widen old (S.Dom.join old tmp));
update_var_event x old tmp;
if tracing then trace "sol" "New Value:%a\n\n" S.Dom.pretty tmp;
if tracing then trace "sol2" "new value for %a (wpx: %b, is_side: %b) is %a. Old value was %a\n" S.Var.pretty_trace x (HM.mem rho x) (is_side x) S.Dom.pretty tmp S.Dom.pretty old;
HM.replace rho x tmp;
destabilize x;
);
(solve[@tailcall]) x
)
and eq x get set =
if tracing then trace "sol2" "eq %a\n" S.Var.pretty_trace x;
eval_rhs_event x;
match S.system x with
| None -> S.Dom.bot ()
| Some f ->
let effects = ref Set.empty in
let sidef y d =
if not (Set.mem y !effects) then (
TODO needed ? tests also work without this ...
effects := Set.add y !effects
);
set y d
in
f get sidef
and eval x y =
if tracing then trace "sol2" "eval %a ## %a\n" S.Var.pretty_trace x S.Var.pretty_trace y;
get_var_event y;
if HM.mem called y || S.system y = None then HM.replace wpoint y ();
solve y;
add_infl y x;
HM.find rho y
and sides x =
let w = try HM.find set x with Not_found -> VS.empty in
let d = Enum.fold (fun d y -> let r = try S.Dom.join d (HPM.find rho' (y,x)) with Not_found -> d in if tracing then trace "sol2" "sides: side %a from %a: %a\n" S.Var.pretty_trace x S.Var.pretty_trace y S.Dom.pretty r; r) (S.Dom.bot ()) (VS.enum w) in
if tracing then trace "sol2" "sides %a ## %a\n" S.Var.pretty_trace x S.Dom.pretty d;
d
and side x y d =
if tracing then trace "sol2" "side %a ## %a (wpx: %b) ## %a\n" S.Var.pretty_trace x S.Var.pretty_trace y (HM.mem rho y) S.Dom.pretty d;
let old = try HPM.find rho' (x,y) with Not_found -> S.Dom.bot () in
if not (S.Dom.equal old d) then (
add_set x y (S.Dom.join old d);
HM.remove stable y;
solve y;
)
and init x =
if tracing then trace "sol2" "init %a\n" S.Var.pretty_trace x;
if not (HM.mem rho x) then (
new_var_event x;
HM.replace rho x (S.Dom.bot ())
)
in
let set_start (x,d) =
if tracing then trace "sol2" "set_start %a ## %a\n" S.Var.pretty_trace x S.Dom.pretty d;
init x;
add_set x x d;
solve x
in
start_event ();
List.iter set_start st;
List.iter init vs;
List.iter solve vs;
let keys h = HM.fold (fun k _ a -> k::a) h [] in
let n = ref 1 in
let rec solve_sidevs () =
let gs = keys sidevs in
HM.clear sidevs;
if gs <> [] then (
if tracing then trace "sol2" "Round %d: %d side-effected variables to solve\n" !n (List.length gs);
incr n;
List.iter solve gs;
List.iter solve vs;
solve_sidevs ()
)
in
solve_sidevs ();
stop_event ();
HM.clear stable;
HM.clear infl ;
HM.clear set ;
HPM.clear rho' ;
rho
end
let _ =
Selector.add_solver ("topdown", (module EqIncrSolverFromEqSolver (WP)));
|
2eba7f3823a14fb9e9f21845cea62ee4d74fa051795e8a48c3d8259233582cd2 | avsm/ocaml-ssh | mpl_typechk.ml |
* Copyright ( c ) 2005 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* $ I d : mpl_typechk.ml , v 1.34 2006/02/16 18:44:39 avsm Exp $
* Copyright (c) 2005 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* $Id: mpl_typechk.ml,v 1.34 2006/02/16 18:44:39 avsm Exp $
*)
open Mpl_syntaxtree
open Mpl_utils
open Printf
exception Type_error of string
module L = Mpl_location
module B = Mpl_bits
(* Types for variables *)
module Var = struct
type isz =
|I8
|I16
|I32
|I64
(* Base types for statements that target language needs support for *)
type b =
|UInt of isz
|String
|Bool
|Opaque
(* Higher level constructs *)
type value_attr = {
mutable av_min: int option;
mutable av_max: int option;
mutable av_const: expr option;
mutable av_value: expr option;
mutable av_variant: ((expr * string) list) option;
mutable av_default: expr option;
mutable av_bitvars: int list;
mutable av_bitops: B.t list;
mutable av_bitdummy: bool;
av_bound: bool;
}
type array_attr = {
mutable aa_align: int option;
}
type t =
|Packet of (string * (string list))
|Value of (string * b * value_attr)
|Array of (string * b * array_attr)
|Class of string list
|Label
type x =
| S_var of (id * expr option * t) (* id,size,type *)
| S_class of (id * b * (expr * expr * expr option * xs) list) (* id,ty,(match,label,guard,xs) *)
| S_array of (id * expr * xs) (* id,int size,xs *)
| S_unit
and xs = x list
let new_value ?(bitdummy=false) vty v =
Value (vty, v, {av_min=None; av_max=None; av_const=None; av_value=None;
av_variant=None; av_bound=false; av_bitvars=[]; av_bitdummy=bitdummy;
av_bitops=[]; av_default=None})
let new_array vty v =
Array (vty, v, {aa_align=None})
let to_string =
let so fn = function |None -> "N" |Some x -> fn x in
let soe = so string_of_expr in
let soi = so string_of_int in
let sov = so (fun x -> String.concat ", " (List.map (fun (a,b) ->
sprintf "%s->%s" (string_of_expr a) b) x)) in
let fn = function
|UInt I8 -> "byte"
|UInt I16 -> "uint16"
|UInt I32 -> "uint32"
|UInt I64 -> "uint64"
|String -> "string"
|Bool -> "bool"
|Opaque -> "opaque"
in function
|Packet (id,args) ->
sprintf "Packet(%s: %s)" id (String.concat "," args)
|Value (vty,b,a) ->
sprintf "Value(%s:%s) [Mn:%s Mx:%s C:%s Vl:%s Vx:%s BV:%s BO:%s BD:%s]" (fn b) vty
(soi a.av_min) (soi a.av_max) (soe a.av_const) (soe a.av_value)
(sov a.av_variant) (String.concat "," (List.map string_of_int a.av_bitvars)) ""
(if a.av_bitdummy then "Y" else "N")
|Array (vty,b,a) -> sprintf "Array(%s:%s) [Align:%s]" (fn b) vty (soi a.aa_align)
|Class x -> sprintf "Class(%s)" (String.concat "" x)
|Label -> sprintf "Label"
open Printer_utils.Printer
let rec print_x e = function
|S_var (id,exo,t) :: r->
let s = match exo with |None -> "" |Some x -> sprintf "[%s]" (string_of_expr x) in
e.p (sprintf "Var (%s%s): %s" id s (to_string t));
print_x e r
|S_class (id,idty,l) :: r ->
e.p (sprintf "Class (%s): %s" id (to_string (new_value "xxx" idty)));
list_iter_indent e (fun e (ex,id,guard,sts) ->
let g = match guard with |None -> ""| Some e -> sprintf "when %s " (string_of_expr e) in
e.p (sprintf "| %s:%s %s->" (string_of_expr ex) (string_of_expr id) g);
print_x e sts;
) l;
print_x e r
|S_unit :: r -> e.p "Unit"; print_x e r
|S_array (id, sz, xs) :: r ->
e.p (sprintf "Array (%s)" id); print_x e r
|[] -> ()
end
(* Type checking environment *)
type env = {
t_sdefs: (string, statements) Hashtbl.t;
t_tdefs: (string, (L.t * string)) Hashtbl.t;
root: Mpl_syntaxtree.packet;
statevars: (string * Var.b) list;
vars: (string * Var.t) list;
bit_accum: int;
sizes: (id, unit) Hashtbl.t;
offsets: (id, unit) Hashtbl.t;
custom: (id, unit) Hashtbl.t;
}
let log = Logger.log_verbose
let bind_var env id ty =
{env with vars=(id,ty)::env.vars}
let get_var_type env id =
assoc_opt id env.vars
let bind_accum env a =
{env with bit_accum=(env.bit_accum+a) mod 8}
let bind_reset_accum env =
{env with bit_accum=0}
let dump_env env =
log (sprintf "[tenv] {%s} acc=%d" (String.concat ", "
(List.map (fun (a,b) ->
sprintf "%s:%s" a (Var.to_string b)) env.vars)) env.bit_accum)
(* Types for expressions *)
module Expr = struct
type t =
|Int
|String
|Bool
let to_string = function
|Int -> "int"
|String -> "string"
|Bool -> "bool"
type fdef = {
fname: string;
fret: t;
fargs: t list;
}
let builtin_funcs = [
{fname="offset"; fret=Int; fargs=[String]};
{fname="sizeof"; fret=Int; fargs=[String]};
{fname="remaining"; fret=Int; fargs=[]};
]
let rec typeof ?(unmarshal=false) (env:env) =
let terr x = raise (Type_error (sprintf "expr: %s" x)) in
let tmatch (ext:t) l =
let err ex ety =
terr (sprintf "expecting type %s, got type %s (%s)"
(to_string ext) (string_of_expr ex) (to_string ety)) in
List.iter (fun x -> let tx = typeof ~unmarshal:unmarshal env x in if tx <> ext then err x tx) l
in function
|True -> Bool
|False -> Bool
|String_constant _ -> String
|Int_constant _ -> Int
|Range (a,b) -> tmatch Int [a;b]; Int
|And (a,b) -> tmatch Bool [a;b]; Bool
|Or (a,b) -> tmatch Bool [a;b]; Bool
|Not a -> tmatch Bool [a]; Bool
|Greater (a,b) -> tmatch Int [a;b]; Bool
|Less (a,b) -> tmatch Int [a;b]; Bool
|Greater_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Less_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Equals (a,b) -> tmatch Int [a;b]; Bool
|Plus (a,b) -> tmatch Int [a;b]; Int
|Minus (a,b) -> tmatch Int [a;b]; Int
|Multiply (a,b) -> tmatch Int [a;b]; Int
|Divide (a,b) -> tmatch Int [a;b]; Int
|Function_call (nm, arg) -> begin
let targ nm =
terr (sprintf "Function '%s' requires argument" nm) in
match nm,arg with
|"offset",Some v -> begin
Hashtbl.replace env.offsets v ();
match get_var_type env v with
|None when unmarshal -> terr (sprintf "Unknown variable '%s' in offset()" v)
|None -> Int (* XXX we check values later *)
|Some id -> Int
end
|"offset",None -> targ "offset"
|"sizeof",Some v -> begin
Hashtbl.replace env.sizes v ();
match get_var_type env v with
|None when unmarshal -> terr (sprintf "Unknown variable '%s' in sizeof()" v)
|None -> Int
|Some id -> Int
end
|"array_length",None -> targ "array_length"
|"array_length",Some v -> begin
Int
end
|"sizeof",None -> targ "sizeof"
|"remaining",None ->
Int;
|"remaining",Some _ ->
terr ("Function 'remaining()' should not have any arguments")
|_,_ -> terr (sprintf "Unknown function '%s'" nm)
end
|Identifier id -> begin
let module V = Var in
match get_var_type env id with
|Some V.Value(vty,V.UInt _,_) -> Int
|Some x ->
terr (sprintf "identifier %s has non-integer type %s" id (Var.to_string x))
|None ->
terr (sprintf "unknown variable %s" id)
end
let rec typeof_statevar (env:env) =
let terr x = raise (Type_error (sprintf "statevar: %s" x)) in
let tmatch (ext:t) l =
let err ex ety =
terr (sprintf "expecting type %s, got type %s (%s)"
(to_string ext) (to_string ety) (string_of_expr ex)) in
List.iter (fun x -> let tx = typeof_statevar env x in if tx <> ext then err x tx) l
in function
|True -> Bool
|False -> Bool
|String_constant _ -> String
|Int_constant _ -> Int
|Range (a,b) -> terr "Ranges not allowed in classify guards"
|And (a,b) -> tmatch Bool [a;b]; Bool
|Or (a,b) -> tmatch Bool [a;b]; Bool
|Not a -> tmatch Bool [a]; Bool
|Greater (a,b) -> tmatch Int [a;b]; Bool
|Less (a,b) -> tmatch Int [a;b]; Bool
|Greater_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Less_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Equals (a,b) -> tmatch Int [a;b]; Bool
|Plus (a,b) -> tmatch Int [a;b]; Int
|Minus (a,b) -> tmatch Int [a;b]; Int
|Multiply (a,b) -> tmatch Int [a;b]; Int
|Divide (a,b) -> tmatch Int [a;b]; Int
|Function_call (nm, arg) -> terr "Function calls not allowed in classify guards"
|Identifier id -> begin
let module V = Var in
match assoc_opt id env.statevars with
|Some (V.UInt _) -> Int
|Some V.Bool -> Bool
|Some _ -> terr "internal error: typeof_statevar"
|None -> terr (sprintf "unknown state variable '%s'" id)
end
(* Ensure expression is a constant *)
let check_const e =
let rec fn = function
|True -> true
|False -> true
|String_constant _ -> true
|Int_constant _ -> true
|Range (a,b) -> false
|And (a,b) -> fn2 a b
|Or (a,b) -> fn2 a b
|Not a -> fn a
|Greater (a,b) -> fn2 a b
|Less (a,b) -> fn2 a b
|Greater_or_equal (a,b) -> fn2 a b
|Less_or_equal (a,b) -> fn2 a b
|Equals (a,b) -> fn2 a b
|Plus (a,b) -> fn2 a b
|Minus (a,b) -> fn2 a b
|Multiply (a,b) -> fn2 a b
|Divide (a,b) -> fn2 a b
|Function_call _ -> false
|Identifier _ -> false
and fn2 a b = (fn a) && (fn b) in
if not (fn e) then
raise (Type_error (sprintf "expr '%s' is not constant"
(string_of_expr e)))
let is_const e =
try check_const e; true with
Type_error _ -> false
(* Convert expression to a constant int *)
let to_const_int e =
let terr e = raise (Type_error
(sprintf "int_expr_fold: unable to constant fold (%s) to type int"
(string_of_expr e))) in
let rec fn = function
|Int_constant x -> x
|Range _ as e -> terr e
|Plus (a,b) -> (fn a) + (fn b)
|Minus (a,b) -> (fn a) - (fn b)
|Multiply (a,b) -> (fn a) * (fn b)
|Divide (a,b) -> (fn a) / (fn b)
|_ as e -> terr e
in
fn e
(* Convert expression to a constant string *)
let to_const_string e =
let terr e = raise (Type_error
(sprintf "string_expr_fold: unable to constant fold (%s) to type string"
(string_of_expr e))) in
let rec fn = function
|String_constant x -> x
|_ as e -> terr e
in fn e
(* Extract a list of variable identifiers from an expression *)
let get_variables =
let rec fn acc = function
|Identifier id -> id :: acc
|Range (a,b) -> fn2 acc a b
|And (a,b) -> fn2 acc a b
|Or (a,b) -> fn2 acc a b
|Not a -> fn acc a
|Greater (a,b) -> fn2 acc a b
|Less (a,b) -> fn2 acc a b
|Greater_or_equal (a,b) -> fn2 acc a b
|Less_or_equal (a,b) -> fn2 acc a b
|Equals (a,b) -> fn2 acc a b
|Plus (a,b) -> fn2 acc a b
|Minus (a,b) -> fn2 acc a b
|Multiply (a,b) -> fn2 acc a b
|Divide (a,b) -> fn2 acc a b
|Function_call _ |String_constant _ |Int_constant _
|True | False -> acc
and fn2 acc a b = let acc' = fn acc a in fn acc' b
in fn []
let get_functions n fnn =
let rec fn acc = function
|Function_call (nm,argo) ->
if n = nm then (fnn argo) :: acc else acc
|Range (a,b) -> fn2 acc a b
|And (a,b) -> fn2 acc a b
|Or (a,b) -> fn2 acc a b
|Not a -> fn acc a
|Greater (a,b) -> fn2 acc a b
|Less (a,b) -> fn2 acc a b
|Greater_or_equal (a,b) -> fn2 acc a b
|Less_or_equal (a,b) -> fn2 acc a b
|Equals (a,b) -> fn2 acc a b
|Plus (a,b) -> fn2 acc a b
|Minus (a,b) -> fn2 acc a b
|Multiply (a,b) -> fn2 acc a b
|Divide (a,b) -> fn2 acc a b
|Identifier _ |String_constant _ |Int_constant _
|True | False -> acc
and fn2 acc a b = let acc' = fn acc a in fn acc' b
in fn []
let get_sizeof =
get_functions "sizeof" (must (fun x -> x))
let get_offset =
get_functions "offset" (must (fun x -> x))
end
module E = Expr
module V = Var
module Types = struct
Mapping of user - exposed types to our basic types . Different user - exposed
types may have different packing formats , but _ must _ map onto one of the
basic types indicated above . The backend will transform these basic types
into the desired language ( e.g. C / Java / OCaml / Python )
types may have different packing formats, but _must_ map onto one of the
basic types indicated above. The backend will transform these basic types
into the desired language (e.g. C/Java/OCaml/Python) *)
let of_string = function
|"string8"|"string32" -> Some V.String
|"mpint" -> Some V.Opaque
|"bit"|"byte" -> Some (V.UInt V.I8)
|"uint16" -> Some (V.UInt V.I16)
|"uint32" -> Some (V.UInt V.I32)
|"uint64" -> Some (V.UInt V.I64)
|"boolean" -> Some V.Bool
|"dns_label"|"dns_label_comp" -> Some V.Opaque
|_ -> None
let is_custom = function
|"bit"|"byte"|"uint16"|"uint32"|"uint64" -> false
|_ -> true
end
module T = Types
(* Unmarshalling type checking *)
let rec typeof env (xs:statements) : (env * Var.xs) =
let env,xs = List.fold_left (fun (env,axs) (loc,s) ->
let terr x = raise (Type_error (sprintf "statement%s %s"
(Mpl_location.string_of_location loc) x)) in
(* Mark a variable as not being free any more *)
let tdup env x = match get_var_type env x with
|None -> () |Some _ -> terr (sprintf "Duplicate variable '%s'" x) in
let check_bit_accum env =
if env.bit_accum mod 8 != 0 then
terr (sprintf "Bit-fields must be byte-aligned, but ends on %d bits"
env.bit_accum);
bind_reset_accum env;
in
let renv, rxs = match s with
|Unit ->
env, V.S_unit
|Packet (id, pmod, args) ->
tdup env id;
(* XXX check that the pmod and args are valid, needs external interface files *)
env, (V.S_var (id, None, V.Packet ((String.capitalize pmod), args)))
|Variable (id, ty, sz, attrs) -> begin
First make sure variable has n't already been bound
tdup env id;
(* Figure out the variable's type *)
let varty =
match (ty,sz) with
|"label", Some _ ->
terr "label types cannot be arrays"
|"label", None ->
if List.length attrs > 0 then
terr "label types cannot have attributes";
V.Label
|t, None -> begin
if ty = "bit" then terr "Bit fields must specify a size";
let tty = match T.of_string t with
|None -> terr ("unknown type " ^ t)
|Some x -> x in
if T.is_custom t then Hashtbl.replace env.custom t ();
V.new_value ty tty
end
|t, Some sz -> begin
(* ensure array size is of type int *)
let _ = try
if E.typeof ~unmarshal:true env sz <> E.Int then
terr (sprintf "Array size (%s) is not an integer type" (string_of_expr sz));
with Type_error x -> terr x in
match (t, T.of_string t) with
|"bit", Some x ->
let szi = E.to_const_int sz in
if szi < 1 then
terr (sprintf "Bitfield size %d is too small, min 1" szi);
if szi > 15 then
terr (sprintf "Bitfield size %d is too long, max 15" szi);
V.new_value ty x
|_, None -> terr ("unknown type " ^ t)
|_, Some x -> begin
let avs = E.get_variables sz in
List.iter (fun vid -> match get_var_type env vid with
|None -> terr ("typeof: " ^ id);
|Some (V.Value (_,_,{V.av_bound=true}))
|Some (V.Value (_,_,{V.av_value=Some _})) ->
()
|_ ->
terr (sprintf "Variable '%s' must have a value attribute to tie it to '%s'" vid id)
) avs;
Var.new_array ty x
end
end;
in
(* Check variable attributes *)
let varty = List.fold_left (fun varty attr -> match varty,attr with
|V.Packet _ as o, _ -> o
|V.Class _, _ ->
terr "Internal error, V.Class"
|V.Label, _ ->
terr "Label types cannot have attributes"
|V.Array (_,_, {V.aa_align=Some _}), Align e ->
terr "Duplicate attribute 'align'"
|V.Value (_,_, {V.av_min=Some _}), Min e ->
terr "Duplicate attribute 'min'"
|V.Value (_,_, {V.av_max=Some _}), Max e ->
terr "Duplicate attribute 'max'"
|V.Value (_,_, {V.av_const=Some _}), Const e ->
terr "Duplicate attribute 'const'"
|V.Value (_,_, {V.av_value=Some _}), Value e ->
terr "Duplicate attribute 'value'"
|V.Value (_,_, {V.av_default=Some _}), Default e ->
terr "Duplicate attribute 'default'"
|V.Array (_,V.UInt V.I8, ({V.aa_align=None} as a)) as o, Align e ->
a.V.aa_align <- Some (E.to_const_int e); o
|V.Array _, _ ->
terr "This attribute is not compatible with array variables"
|_, Align _ ->
terr "Attribute 'align' not valid except with arrays"
|V.Value (_,V.UInt _, ({V.av_min=None} as a)) as o, Min e ->
a.V.av_min <- Some (E.to_const_int e); o
|V.Value _, Min e ->
terr "Attribute 'min' must be used with int variables"
|V.Value (_,V.UInt _, ({V.av_max=None} as a)) as o, Max e ->
a.V.av_max <- Some (E.to_const_int e); o
|V.Value _, Max e ->
terr "Attribute 'max' must be used with int variables"
|V.Value (_,vt, ({V.av_value=None} as a)) as o, Value e ->
let () = match (E.typeof env e), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf "Attribute 'value' types dont match (%s and %s)"
(E.to_string b) (V.to_string o)) in
a.V.av_value <- Some e; o
|V.Value (_,vt, ({V.av_default=None} as a)) as o, Default e -> begin
E.check_const e;
let () = match (E.typeof env e), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf "Attribute 'default' types dont match (%s and %s)"
(E.to_string b) (V.to_string o)) in
a.V.av_default <- Some e;
o
end
|V.Value (_,vt, ({V.av_const=None} as a)) as o, Const e -> begin
E.check_const e;
let () = match (E.typeof env e), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf "Attribute 'const' types dont match (%s and %s)"
(E.to_string b) (V.to_string o)) in
a.V.av_const <- Some e;
o
end
|V.Value (vty,vt, ({V.av_min=None; av_max=None; av_const=None; av_value=None; av_variant=None} as a)) as o, Variant (x,def) -> begin
let h = Hashtbl.create 1 in
List.iter (fun (m,r) ->
(* Variant tag Unknown is reserved *)
if String.lowercase r = "unknown" then
terr "Variant tag 'Unknown' is reserved";
(* All variant matches must be constants and the strings unique *)
let _ = if Hashtbl.mem h r then
terr (sprintf "Variant match '%s' is duplicated" r)
else
Hashtbl.add h r () in
E.check_const m;
match (E.typeof env m), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf
"Attribute 'variant' matches must all have type '%s', not '%s'"
(V.to_string o) (E.to_string b))
) x;
a.V.av_variant <- Some x;
a.V.av_default <- def;
o
end
|_ as a, Variant _ ->
terr (sprintf "Cannot mix attribute 'variant' with other attributes (%s)" (V.to_string a))
) varty attrs in
(* Record variable in the environment *)
let env = bind_var env id varty in
(* Check bit alignments *)
let env = match (ty,sz) with
|"bit", Some sz ->
let env = bind_accum env (Expr.to_const_int sz) in
env
|_ ->
check_bit_accum env
in
let vr = V.S_var (id, sz, varty) in
env, vr
end
|Array (id, sz, sts) ->
(* Check we are on a byte boundary *)
let env = check_bit_accum env in
(* Make sure the id is unique *)
tdup env id;
(* Just bind the id as a Label in our environment to reserve the name *)
let env = bind_var env id V.Label in
(* Check that the array size expression is an Int *)
let _ = match Expr.typeof env sz with
|E.Int -> ()
|_ -> terr (sprintf "Array size (%s) is not of type int" (string_of_expr sz))
in
(* Check that the vars used in the size expr have associated values *)
List.iter (fun var ->
match get_var_type env var with
|None -> terr "internal error: no type for var in Array"
|Some (V.Value (_,_, {V.av_value=None})) ->
terr (sprintf "Must specify 'value' attribute for variable '%s'" var)
|Some (V.Value (_,_, ({V.av_max=Some _}|{V.av_const=Some _}|{V.av_min=Some _}))) ->
terr (sprintf "Cannot have attributes min/max/const on variables referenced in array sizes")
|Some _ -> ()
) (E.get_variables sz);
let aenv,vrxs = typeof env sts in
(* Check that the array block is bit aligned *)
let _ = check_bit_accum aenv in
(* Return original env, not the array statement env! *)
let vr = V.S_array (id, sz, vrxs) in
env, vr
|Classify (id, l) -> begin
(* Make sure that the classified identifier exists *)
let idty = match get_var_type env id with
|None -> terr (sprintf "Variable '%s' unknown for classification" id)
|Some (V.Value (_,_, {V.av_bound=true})) ->
terr "Classify variable has already been previously bound to another classify";
|Some ( V.Value ( _ , _ , { V.av_value = Some _ } ) ) - >
terr " Classify can not bind to a variable with a ' value ' attribute "
terr "Classify cannot bind to a variable with a 'value' attribute" *)
|Some (V.Value (_,_, ({V.av_max=Some _}|{V.av_const=Some _}|{V.av_min=Some _}))) ->
terr (sprintf "Cannot have attributes min/max/const on classified variables")
|Some ty -> ty in
(* No permanent variables are recorded in a classification right now *)
(* Remap the classified variable as bound *)
let envv = List.fold_left (fun a (i,b) ->
let x = match b with
|V.Value (vid,sz,at) as o ->
if id = i then V.Value (vid,sz,{at with V.av_bound=true})
else o
|o -> o
in
(i,x) :: a
) [] env.vars in
let env = {env with vars=envv} in
let cenvs,cxs = List.split (List.fold_left (fun a (ex,id,guard,xs) ->
may (fun g ->
let gt = E.typeof_statevar env g in
match gt with
|E.Bool -> ()
|_ -> terr (sprintf "Classify guard '%s' must be of type bool" (string_of_expr g))) guard;
let exty = E.typeof env ex in
let _ = match (idty, exty) with
|V.Class _,_ -> terr ("internal error, V.Class")
|V.Value(_,V.UInt _, _), E.Int
|V.Value(_,V.String, _), E.String
|V.Value(_,V.Bool, _), E.Bool
-> true
|V.Packet _, _
|V.Value(_), _
|V.Array(_), _
|V.Label, _
-> terr (sprintf "Classify type (%s) does not match expression (%s)"
(Var.to_string idty) (string_of_expr ex))
in
let e,x = typeof env xs in
let x = (ex,id,guard,x) in
(e,x) :: a
) [] l) in
(* Check that all sub-environments are equally bit-aligned *)
let acl = list_unique (List.map (fun e -> e.bit_accum) cenvs) in
let ac = if List.length acl > 1 then terr (sprintf "Classify elements are not equally bit-aligned (%s)"
(String.concat "," (List.map string_of_int acl))) else List.hd acl in
(* Add the bit alignments to the environment, but no variables are bound *)
let idty' = match idty with |V.Value (_,x,_) -> x |_ -> terr "idty" in
let vr = V.S_class (id,idty', cxs) in
{env with bit_accum=ac}, vr
end in
renv, (rxs::axs)
) (env,[]) xs in
env, (List.rev xs)
let resolve_defs env xs =
(* all ids must be unique *)
(* XXX TODO check for overlapping typedef/structs, and that struct statements
only contain basic Variables with no external references *)
let nm = Hashtbl.create 1 in
Hashtbl.iter (fun id (loc,t) ->
prerr_endline (sprintf "[dbg] typedef %s -> %s" id t);
if Hashtbl.mem nm id then raise
(Type_error (sprintf "%s duplicate typedef %s" (L.string_of_location loc) id));
Hashtbl.add nm id ();
match T.of_string t with
|None -> raise (Type_error (sprintf "%s unknown basic type %s for typedef %s" (L.string_of_location loc) t id))
|Some _ -> ()
) env.t_tdefs;
let rec fn = function
| (loc,Variable (id,ty,ex,at)) as o :: r ->
if Hashtbl.mem env.t_tdefs ty then begin
let _,t = Hashtbl.find env.t_tdefs ty in
(loc,Variable (id,t,ex,at)) :: (fn r)
end else begin
if Hashtbl.mem env.t_sdefs ty then begin
let xst = Hashtbl.find env.t_sdefs ty in
let xst = List.map (fun (_,x) -> match x with
|Variable (vid,t,ex,at) ->
(loc, Variable ((id ^ "_" ^ vid), t, ex, at))
|_ -> raise (Type_error (sprintf "struct %s contains non-variable entry" ty))
) xst in
xst @ (fn r)
end else
o :: (fn r)
end
| (loc,Classify (id, l)) :: r ->
let l = List.map (fun (a,b,c,xsc) -> (a,b,c,(fn xsc))) l in
(loc,Classify (id,l)) :: (fn r)
| (loc,Array (id, ex, xsa)) :: r -> (loc,Array (id, ex, (fn xsa))) :: (fn r)
| (loc, Packet _) as o :: r -> o :: (fn r)
| (loc,Unit) :: r -> (loc,Unit) :: (fn r)
| [] -> [] in
fn xs
let typecheck ps =
(* send this environment to each packet *)
List.map (fun packet ->
let terr x = raise (Type_error (sprintf "packet %s%s %s" packet.name
(L.string_of_location packet.loc) x)) in
let h = Hashtbl.create 1 in
List.iter (fun b ->
let id,ty = match b with
|P_bool id -> id, Var.Bool
|P_int id -> id, Var.UInt Var.I8
in if Hashtbl.mem h id then terr (sprintf "Duplicate state variable '%s'" id)
else Hashtbl.add h id ty) packet.args;
let svars = Hashtbl.fold (fun k v a -> (k,v) :: a) h [] in
let env = {root=packet; statevars=svars; vars=[]; bit_accum=0; sizes=Hashtbl.create 1; offsets=Hashtbl.create 1;
t_tdefs=ps.tdefs; t_sdefs=ps.sdefs; custom=Hashtbl.create 1} in
typeof env (resolve_defs env packet.body)
) ps.pdefs
| null | https://raw.githubusercontent.com/avsm/ocaml-ssh/26577d1501e7a43e4b520239b08da114c542eda4/mpl/mpl_typechk.ml | ocaml | Types for variables
Base types for statements that target language needs support for
Higher level constructs
id,size,type
id,ty,(match,label,guard,xs)
id,int size,xs
Type checking environment
Types for expressions
XXX we check values later
Ensure expression is a constant
Convert expression to a constant int
Convert expression to a constant string
Extract a list of variable identifiers from an expression
Unmarshalling type checking
Mark a variable as not being free any more
XXX check that the pmod and args are valid, needs external interface files
Figure out the variable's type
ensure array size is of type int
Check variable attributes
Variant tag Unknown is reserved
All variant matches must be constants and the strings unique
Record variable in the environment
Check bit alignments
Check we are on a byte boundary
Make sure the id is unique
Just bind the id as a Label in our environment to reserve the name
Check that the array size expression is an Int
Check that the vars used in the size expr have associated values
Check that the array block is bit aligned
Return original env, not the array statement env!
Make sure that the classified identifier exists
No permanent variables are recorded in a classification right now
Remap the classified variable as bound
Check that all sub-environments are equally bit-aligned
Add the bit alignments to the environment, but no variables are bound
all ids must be unique
XXX TODO check for overlapping typedef/structs, and that struct statements
only contain basic Variables with no external references
send this environment to each packet |
* Copyright ( c ) 2005 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* $ I d : mpl_typechk.ml , v 1.34 2006/02/16 18:44:39 avsm Exp $
* Copyright (c) 2005 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* $Id: mpl_typechk.ml,v 1.34 2006/02/16 18:44:39 avsm Exp $
*)
open Mpl_syntaxtree
open Mpl_utils
open Printf
exception Type_error of string
module L = Mpl_location
module B = Mpl_bits
module Var = struct
type isz =
|I8
|I16
|I32
|I64
type b =
|UInt of isz
|String
|Bool
|Opaque
type value_attr = {
mutable av_min: int option;
mutable av_max: int option;
mutable av_const: expr option;
mutable av_value: expr option;
mutable av_variant: ((expr * string) list) option;
mutable av_default: expr option;
mutable av_bitvars: int list;
mutable av_bitops: B.t list;
mutable av_bitdummy: bool;
av_bound: bool;
}
type array_attr = {
mutable aa_align: int option;
}
type t =
|Packet of (string * (string list))
|Value of (string * b * value_attr)
|Array of (string * b * array_attr)
|Class of string list
|Label
type x =
| S_unit
and xs = x list
let new_value ?(bitdummy=false) vty v =
Value (vty, v, {av_min=None; av_max=None; av_const=None; av_value=None;
av_variant=None; av_bound=false; av_bitvars=[]; av_bitdummy=bitdummy;
av_bitops=[]; av_default=None})
let new_array vty v =
Array (vty, v, {aa_align=None})
let to_string =
let so fn = function |None -> "N" |Some x -> fn x in
let soe = so string_of_expr in
let soi = so string_of_int in
let sov = so (fun x -> String.concat ", " (List.map (fun (a,b) ->
sprintf "%s->%s" (string_of_expr a) b) x)) in
let fn = function
|UInt I8 -> "byte"
|UInt I16 -> "uint16"
|UInt I32 -> "uint32"
|UInt I64 -> "uint64"
|String -> "string"
|Bool -> "bool"
|Opaque -> "opaque"
in function
|Packet (id,args) ->
sprintf "Packet(%s: %s)" id (String.concat "," args)
|Value (vty,b,a) ->
sprintf "Value(%s:%s) [Mn:%s Mx:%s C:%s Vl:%s Vx:%s BV:%s BO:%s BD:%s]" (fn b) vty
(soi a.av_min) (soi a.av_max) (soe a.av_const) (soe a.av_value)
(sov a.av_variant) (String.concat "," (List.map string_of_int a.av_bitvars)) ""
(if a.av_bitdummy then "Y" else "N")
|Array (vty,b,a) -> sprintf "Array(%s:%s) [Align:%s]" (fn b) vty (soi a.aa_align)
|Class x -> sprintf "Class(%s)" (String.concat "" x)
|Label -> sprintf "Label"
open Printer_utils.Printer
let rec print_x e = function
|S_var (id,exo,t) :: r->
let s = match exo with |None -> "" |Some x -> sprintf "[%s]" (string_of_expr x) in
e.p (sprintf "Var (%s%s): %s" id s (to_string t));
print_x e r
|S_class (id,idty,l) :: r ->
e.p (sprintf "Class (%s): %s" id (to_string (new_value "xxx" idty)));
list_iter_indent e (fun e (ex,id,guard,sts) ->
let g = match guard with |None -> ""| Some e -> sprintf "when %s " (string_of_expr e) in
e.p (sprintf "| %s:%s %s->" (string_of_expr ex) (string_of_expr id) g);
print_x e sts;
) l;
print_x e r
|S_unit :: r -> e.p "Unit"; print_x e r
|S_array (id, sz, xs) :: r ->
e.p (sprintf "Array (%s)" id); print_x e r
|[] -> ()
end
type env = {
t_sdefs: (string, statements) Hashtbl.t;
t_tdefs: (string, (L.t * string)) Hashtbl.t;
root: Mpl_syntaxtree.packet;
statevars: (string * Var.b) list;
vars: (string * Var.t) list;
bit_accum: int;
sizes: (id, unit) Hashtbl.t;
offsets: (id, unit) Hashtbl.t;
custom: (id, unit) Hashtbl.t;
}
let log = Logger.log_verbose
let bind_var env id ty =
{env with vars=(id,ty)::env.vars}
let get_var_type env id =
assoc_opt id env.vars
let bind_accum env a =
{env with bit_accum=(env.bit_accum+a) mod 8}
let bind_reset_accum env =
{env with bit_accum=0}
let dump_env env =
log (sprintf "[tenv] {%s} acc=%d" (String.concat ", "
(List.map (fun (a,b) ->
sprintf "%s:%s" a (Var.to_string b)) env.vars)) env.bit_accum)
module Expr = struct
type t =
|Int
|String
|Bool
let to_string = function
|Int -> "int"
|String -> "string"
|Bool -> "bool"
type fdef = {
fname: string;
fret: t;
fargs: t list;
}
let builtin_funcs = [
{fname="offset"; fret=Int; fargs=[String]};
{fname="sizeof"; fret=Int; fargs=[String]};
{fname="remaining"; fret=Int; fargs=[]};
]
let rec typeof ?(unmarshal=false) (env:env) =
let terr x = raise (Type_error (sprintf "expr: %s" x)) in
let tmatch (ext:t) l =
let err ex ety =
terr (sprintf "expecting type %s, got type %s (%s)"
(to_string ext) (string_of_expr ex) (to_string ety)) in
List.iter (fun x -> let tx = typeof ~unmarshal:unmarshal env x in if tx <> ext then err x tx) l
in function
|True -> Bool
|False -> Bool
|String_constant _ -> String
|Int_constant _ -> Int
|Range (a,b) -> tmatch Int [a;b]; Int
|And (a,b) -> tmatch Bool [a;b]; Bool
|Or (a,b) -> tmatch Bool [a;b]; Bool
|Not a -> tmatch Bool [a]; Bool
|Greater (a,b) -> tmatch Int [a;b]; Bool
|Less (a,b) -> tmatch Int [a;b]; Bool
|Greater_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Less_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Equals (a,b) -> tmatch Int [a;b]; Bool
|Plus (a,b) -> tmatch Int [a;b]; Int
|Minus (a,b) -> tmatch Int [a;b]; Int
|Multiply (a,b) -> tmatch Int [a;b]; Int
|Divide (a,b) -> tmatch Int [a;b]; Int
|Function_call (nm, arg) -> begin
let targ nm =
terr (sprintf "Function '%s' requires argument" nm) in
match nm,arg with
|"offset",Some v -> begin
Hashtbl.replace env.offsets v ();
match get_var_type env v with
|None when unmarshal -> terr (sprintf "Unknown variable '%s' in offset()" v)
|Some id -> Int
end
|"offset",None -> targ "offset"
|"sizeof",Some v -> begin
Hashtbl.replace env.sizes v ();
match get_var_type env v with
|None when unmarshal -> terr (sprintf "Unknown variable '%s' in sizeof()" v)
|None -> Int
|Some id -> Int
end
|"array_length",None -> targ "array_length"
|"array_length",Some v -> begin
Int
end
|"sizeof",None -> targ "sizeof"
|"remaining",None ->
Int;
|"remaining",Some _ ->
terr ("Function 'remaining()' should not have any arguments")
|_,_ -> terr (sprintf "Unknown function '%s'" nm)
end
|Identifier id -> begin
let module V = Var in
match get_var_type env id with
|Some V.Value(vty,V.UInt _,_) -> Int
|Some x ->
terr (sprintf "identifier %s has non-integer type %s" id (Var.to_string x))
|None ->
terr (sprintf "unknown variable %s" id)
end
let rec typeof_statevar (env:env) =
let terr x = raise (Type_error (sprintf "statevar: %s" x)) in
let tmatch (ext:t) l =
let err ex ety =
terr (sprintf "expecting type %s, got type %s (%s)"
(to_string ext) (to_string ety) (string_of_expr ex)) in
List.iter (fun x -> let tx = typeof_statevar env x in if tx <> ext then err x tx) l
in function
|True -> Bool
|False -> Bool
|String_constant _ -> String
|Int_constant _ -> Int
|Range (a,b) -> terr "Ranges not allowed in classify guards"
|And (a,b) -> tmatch Bool [a;b]; Bool
|Or (a,b) -> tmatch Bool [a;b]; Bool
|Not a -> tmatch Bool [a]; Bool
|Greater (a,b) -> tmatch Int [a;b]; Bool
|Less (a,b) -> tmatch Int [a;b]; Bool
|Greater_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Less_or_equal (a,b) -> tmatch Int [a;b]; Bool
|Equals (a,b) -> tmatch Int [a;b]; Bool
|Plus (a,b) -> tmatch Int [a;b]; Int
|Minus (a,b) -> tmatch Int [a;b]; Int
|Multiply (a,b) -> tmatch Int [a;b]; Int
|Divide (a,b) -> tmatch Int [a;b]; Int
|Function_call (nm, arg) -> terr "Function calls not allowed in classify guards"
|Identifier id -> begin
let module V = Var in
match assoc_opt id env.statevars with
|Some (V.UInt _) -> Int
|Some V.Bool -> Bool
|Some _ -> terr "internal error: typeof_statevar"
|None -> terr (sprintf "unknown state variable '%s'" id)
end
let check_const e =
let rec fn = function
|True -> true
|False -> true
|String_constant _ -> true
|Int_constant _ -> true
|Range (a,b) -> false
|And (a,b) -> fn2 a b
|Or (a,b) -> fn2 a b
|Not a -> fn a
|Greater (a,b) -> fn2 a b
|Less (a,b) -> fn2 a b
|Greater_or_equal (a,b) -> fn2 a b
|Less_or_equal (a,b) -> fn2 a b
|Equals (a,b) -> fn2 a b
|Plus (a,b) -> fn2 a b
|Minus (a,b) -> fn2 a b
|Multiply (a,b) -> fn2 a b
|Divide (a,b) -> fn2 a b
|Function_call _ -> false
|Identifier _ -> false
and fn2 a b = (fn a) && (fn b) in
if not (fn e) then
raise (Type_error (sprintf "expr '%s' is not constant"
(string_of_expr e)))
let is_const e =
try check_const e; true with
Type_error _ -> false
let to_const_int e =
let terr e = raise (Type_error
(sprintf "int_expr_fold: unable to constant fold (%s) to type int"
(string_of_expr e))) in
let rec fn = function
|Int_constant x -> x
|Range _ as e -> terr e
|Plus (a,b) -> (fn a) + (fn b)
|Minus (a,b) -> (fn a) - (fn b)
|Multiply (a,b) -> (fn a) * (fn b)
|Divide (a,b) -> (fn a) / (fn b)
|_ as e -> terr e
in
fn e
let to_const_string e =
let terr e = raise (Type_error
(sprintf "string_expr_fold: unable to constant fold (%s) to type string"
(string_of_expr e))) in
let rec fn = function
|String_constant x -> x
|_ as e -> terr e
in fn e
let get_variables =
let rec fn acc = function
|Identifier id -> id :: acc
|Range (a,b) -> fn2 acc a b
|And (a,b) -> fn2 acc a b
|Or (a,b) -> fn2 acc a b
|Not a -> fn acc a
|Greater (a,b) -> fn2 acc a b
|Less (a,b) -> fn2 acc a b
|Greater_or_equal (a,b) -> fn2 acc a b
|Less_or_equal (a,b) -> fn2 acc a b
|Equals (a,b) -> fn2 acc a b
|Plus (a,b) -> fn2 acc a b
|Minus (a,b) -> fn2 acc a b
|Multiply (a,b) -> fn2 acc a b
|Divide (a,b) -> fn2 acc a b
|Function_call _ |String_constant _ |Int_constant _
|True | False -> acc
and fn2 acc a b = let acc' = fn acc a in fn acc' b
in fn []
let get_functions n fnn =
let rec fn acc = function
|Function_call (nm,argo) ->
if n = nm then (fnn argo) :: acc else acc
|Range (a,b) -> fn2 acc a b
|And (a,b) -> fn2 acc a b
|Or (a,b) -> fn2 acc a b
|Not a -> fn acc a
|Greater (a,b) -> fn2 acc a b
|Less (a,b) -> fn2 acc a b
|Greater_or_equal (a,b) -> fn2 acc a b
|Less_or_equal (a,b) -> fn2 acc a b
|Equals (a,b) -> fn2 acc a b
|Plus (a,b) -> fn2 acc a b
|Minus (a,b) -> fn2 acc a b
|Multiply (a,b) -> fn2 acc a b
|Divide (a,b) -> fn2 acc a b
|Identifier _ |String_constant _ |Int_constant _
|True | False -> acc
and fn2 acc a b = let acc' = fn acc a in fn acc' b
in fn []
let get_sizeof =
get_functions "sizeof" (must (fun x -> x))
let get_offset =
get_functions "offset" (must (fun x -> x))
end
module E = Expr
module V = Var
module Types = struct
Mapping of user - exposed types to our basic types . Different user - exposed
types may have different packing formats , but _ must _ map onto one of the
basic types indicated above . The backend will transform these basic types
into the desired language ( e.g. C / Java / OCaml / Python )
types may have different packing formats, but _must_ map onto one of the
basic types indicated above. The backend will transform these basic types
into the desired language (e.g. C/Java/OCaml/Python) *)
let of_string = function
|"string8"|"string32" -> Some V.String
|"mpint" -> Some V.Opaque
|"bit"|"byte" -> Some (V.UInt V.I8)
|"uint16" -> Some (V.UInt V.I16)
|"uint32" -> Some (V.UInt V.I32)
|"uint64" -> Some (V.UInt V.I64)
|"boolean" -> Some V.Bool
|"dns_label"|"dns_label_comp" -> Some V.Opaque
|_ -> None
let is_custom = function
|"bit"|"byte"|"uint16"|"uint32"|"uint64" -> false
|_ -> true
end
module T = Types
let rec typeof env (xs:statements) : (env * Var.xs) =
let env,xs = List.fold_left (fun (env,axs) (loc,s) ->
let terr x = raise (Type_error (sprintf "statement%s %s"
(Mpl_location.string_of_location loc) x)) in
let tdup env x = match get_var_type env x with
|None -> () |Some _ -> terr (sprintf "Duplicate variable '%s'" x) in
let check_bit_accum env =
if env.bit_accum mod 8 != 0 then
terr (sprintf "Bit-fields must be byte-aligned, but ends on %d bits"
env.bit_accum);
bind_reset_accum env;
in
let renv, rxs = match s with
|Unit ->
env, V.S_unit
|Packet (id, pmod, args) ->
tdup env id;
env, (V.S_var (id, None, V.Packet ((String.capitalize pmod), args)))
|Variable (id, ty, sz, attrs) -> begin
First make sure variable has n't already been bound
tdup env id;
let varty =
match (ty,sz) with
|"label", Some _ ->
terr "label types cannot be arrays"
|"label", None ->
if List.length attrs > 0 then
terr "label types cannot have attributes";
V.Label
|t, None -> begin
if ty = "bit" then terr "Bit fields must specify a size";
let tty = match T.of_string t with
|None -> terr ("unknown type " ^ t)
|Some x -> x in
if T.is_custom t then Hashtbl.replace env.custom t ();
V.new_value ty tty
end
|t, Some sz -> begin
let _ = try
if E.typeof ~unmarshal:true env sz <> E.Int then
terr (sprintf "Array size (%s) is not an integer type" (string_of_expr sz));
with Type_error x -> terr x in
match (t, T.of_string t) with
|"bit", Some x ->
let szi = E.to_const_int sz in
if szi < 1 then
terr (sprintf "Bitfield size %d is too small, min 1" szi);
if szi > 15 then
terr (sprintf "Bitfield size %d is too long, max 15" szi);
V.new_value ty x
|_, None -> terr ("unknown type " ^ t)
|_, Some x -> begin
let avs = E.get_variables sz in
List.iter (fun vid -> match get_var_type env vid with
|None -> terr ("typeof: " ^ id);
|Some (V.Value (_,_,{V.av_bound=true}))
|Some (V.Value (_,_,{V.av_value=Some _})) ->
()
|_ ->
terr (sprintf "Variable '%s' must have a value attribute to tie it to '%s'" vid id)
) avs;
Var.new_array ty x
end
end;
in
let varty = List.fold_left (fun varty attr -> match varty,attr with
|V.Packet _ as o, _ -> o
|V.Class _, _ ->
terr "Internal error, V.Class"
|V.Label, _ ->
terr "Label types cannot have attributes"
|V.Array (_,_, {V.aa_align=Some _}), Align e ->
terr "Duplicate attribute 'align'"
|V.Value (_,_, {V.av_min=Some _}), Min e ->
terr "Duplicate attribute 'min'"
|V.Value (_,_, {V.av_max=Some _}), Max e ->
terr "Duplicate attribute 'max'"
|V.Value (_,_, {V.av_const=Some _}), Const e ->
terr "Duplicate attribute 'const'"
|V.Value (_,_, {V.av_value=Some _}), Value e ->
terr "Duplicate attribute 'value'"
|V.Value (_,_, {V.av_default=Some _}), Default e ->
terr "Duplicate attribute 'default'"
|V.Array (_,V.UInt V.I8, ({V.aa_align=None} as a)) as o, Align e ->
a.V.aa_align <- Some (E.to_const_int e); o
|V.Array _, _ ->
terr "This attribute is not compatible with array variables"
|_, Align _ ->
terr "Attribute 'align' not valid except with arrays"
|V.Value (_,V.UInt _, ({V.av_min=None} as a)) as o, Min e ->
a.V.av_min <- Some (E.to_const_int e); o
|V.Value _, Min e ->
terr "Attribute 'min' must be used with int variables"
|V.Value (_,V.UInt _, ({V.av_max=None} as a)) as o, Max e ->
a.V.av_max <- Some (E.to_const_int e); o
|V.Value _, Max e ->
terr "Attribute 'max' must be used with int variables"
|V.Value (_,vt, ({V.av_value=None} as a)) as o, Value e ->
let () = match (E.typeof env e), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf "Attribute 'value' types dont match (%s and %s)"
(E.to_string b) (V.to_string o)) in
a.V.av_value <- Some e; o
|V.Value (_,vt, ({V.av_default=None} as a)) as o, Default e -> begin
E.check_const e;
let () = match (E.typeof env e), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf "Attribute 'default' types dont match (%s and %s)"
(E.to_string b) (V.to_string o)) in
a.V.av_default <- Some e;
o
end
|V.Value (_,vt, ({V.av_const=None} as a)) as o, Const e -> begin
E.check_const e;
let () = match (E.typeof env e), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf "Attribute 'const' types dont match (%s and %s)"
(E.to_string b) (V.to_string o)) in
a.V.av_const <- Some e;
o
end
|V.Value (vty,vt, ({V.av_min=None; av_max=None; av_const=None; av_value=None; av_variant=None} as a)) as o, Variant (x,def) -> begin
let h = Hashtbl.create 1 in
List.iter (fun (m,r) ->
if String.lowercase r = "unknown" then
terr "Variant tag 'Unknown' is reserved";
let _ = if Hashtbl.mem h r then
terr (sprintf "Variant match '%s' is duplicated" r)
else
Hashtbl.add h r () in
E.check_const m;
match (E.typeof env m), vt with
|E.Int, V.UInt _
|E.String, V.String
|E.Bool, V.Bool -> ()
|_ as b,_ -> terr (sprintf
"Attribute 'variant' matches must all have type '%s', not '%s'"
(V.to_string o) (E.to_string b))
) x;
a.V.av_variant <- Some x;
a.V.av_default <- def;
o
end
|_ as a, Variant _ ->
terr (sprintf "Cannot mix attribute 'variant' with other attributes (%s)" (V.to_string a))
) varty attrs in
let env = bind_var env id varty in
let env = match (ty,sz) with
|"bit", Some sz ->
let env = bind_accum env (Expr.to_const_int sz) in
env
|_ ->
check_bit_accum env
in
let vr = V.S_var (id, sz, varty) in
env, vr
end
|Array (id, sz, sts) ->
let env = check_bit_accum env in
tdup env id;
let env = bind_var env id V.Label in
let _ = match Expr.typeof env sz with
|E.Int -> ()
|_ -> terr (sprintf "Array size (%s) is not of type int" (string_of_expr sz))
in
List.iter (fun var ->
match get_var_type env var with
|None -> terr "internal error: no type for var in Array"
|Some (V.Value (_,_, {V.av_value=None})) ->
terr (sprintf "Must specify 'value' attribute for variable '%s'" var)
|Some (V.Value (_,_, ({V.av_max=Some _}|{V.av_const=Some _}|{V.av_min=Some _}))) ->
terr (sprintf "Cannot have attributes min/max/const on variables referenced in array sizes")
|Some _ -> ()
) (E.get_variables sz);
let aenv,vrxs = typeof env sts in
let _ = check_bit_accum aenv in
let vr = V.S_array (id, sz, vrxs) in
env, vr
|Classify (id, l) -> begin
let idty = match get_var_type env id with
|None -> terr (sprintf "Variable '%s' unknown for classification" id)
|Some (V.Value (_,_, {V.av_bound=true})) ->
terr "Classify variable has already been previously bound to another classify";
|Some ( V.Value ( _ , _ , { V.av_value = Some _ } ) ) - >
terr " Classify can not bind to a variable with a ' value ' attribute "
terr "Classify cannot bind to a variable with a 'value' attribute" *)
|Some (V.Value (_,_, ({V.av_max=Some _}|{V.av_const=Some _}|{V.av_min=Some _}))) ->
terr (sprintf "Cannot have attributes min/max/const on classified variables")
|Some ty -> ty in
let envv = List.fold_left (fun a (i,b) ->
let x = match b with
|V.Value (vid,sz,at) as o ->
if id = i then V.Value (vid,sz,{at with V.av_bound=true})
else o
|o -> o
in
(i,x) :: a
) [] env.vars in
let env = {env with vars=envv} in
let cenvs,cxs = List.split (List.fold_left (fun a (ex,id,guard,xs) ->
may (fun g ->
let gt = E.typeof_statevar env g in
match gt with
|E.Bool -> ()
|_ -> terr (sprintf "Classify guard '%s' must be of type bool" (string_of_expr g))) guard;
let exty = E.typeof env ex in
let _ = match (idty, exty) with
|V.Class _,_ -> terr ("internal error, V.Class")
|V.Value(_,V.UInt _, _), E.Int
|V.Value(_,V.String, _), E.String
|V.Value(_,V.Bool, _), E.Bool
-> true
|V.Packet _, _
|V.Value(_), _
|V.Array(_), _
|V.Label, _
-> terr (sprintf "Classify type (%s) does not match expression (%s)"
(Var.to_string idty) (string_of_expr ex))
in
let e,x = typeof env xs in
let x = (ex,id,guard,x) in
(e,x) :: a
) [] l) in
let acl = list_unique (List.map (fun e -> e.bit_accum) cenvs) in
let ac = if List.length acl > 1 then terr (sprintf "Classify elements are not equally bit-aligned (%s)"
(String.concat "," (List.map string_of_int acl))) else List.hd acl in
let idty' = match idty with |V.Value (_,x,_) -> x |_ -> terr "idty" in
let vr = V.S_class (id,idty', cxs) in
{env with bit_accum=ac}, vr
end in
renv, (rxs::axs)
) (env,[]) xs in
env, (List.rev xs)
let resolve_defs env xs =
let nm = Hashtbl.create 1 in
Hashtbl.iter (fun id (loc,t) ->
prerr_endline (sprintf "[dbg] typedef %s -> %s" id t);
if Hashtbl.mem nm id then raise
(Type_error (sprintf "%s duplicate typedef %s" (L.string_of_location loc) id));
Hashtbl.add nm id ();
match T.of_string t with
|None -> raise (Type_error (sprintf "%s unknown basic type %s for typedef %s" (L.string_of_location loc) t id))
|Some _ -> ()
) env.t_tdefs;
let rec fn = function
| (loc,Variable (id,ty,ex,at)) as o :: r ->
if Hashtbl.mem env.t_tdefs ty then begin
let _,t = Hashtbl.find env.t_tdefs ty in
(loc,Variable (id,t,ex,at)) :: (fn r)
end else begin
if Hashtbl.mem env.t_sdefs ty then begin
let xst = Hashtbl.find env.t_sdefs ty in
let xst = List.map (fun (_,x) -> match x with
|Variable (vid,t,ex,at) ->
(loc, Variable ((id ^ "_" ^ vid), t, ex, at))
|_ -> raise (Type_error (sprintf "struct %s contains non-variable entry" ty))
) xst in
xst @ (fn r)
end else
o :: (fn r)
end
| (loc,Classify (id, l)) :: r ->
let l = List.map (fun (a,b,c,xsc) -> (a,b,c,(fn xsc))) l in
(loc,Classify (id,l)) :: (fn r)
| (loc,Array (id, ex, xsa)) :: r -> (loc,Array (id, ex, (fn xsa))) :: (fn r)
| (loc, Packet _) as o :: r -> o :: (fn r)
| (loc,Unit) :: r -> (loc,Unit) :: (fn r)
| [] -> [] in
fn xs
let typecheck ps =
List.map (fun packet ->
let terr x = raise (Type_error (sprintf "packet %s%s %s" packet.name
(L.string_of_location packet.loc) x)) in
let h = Hashtbl.create 1 in
List.iter (fun b ->
let id,ty = match b with
|P_bool id -> id, Var.Bool
|P_int id -> id, Var.UInt Var.I8
in if Hashtbl.mem h id then terr (sprintf "Duplicate state variable '%s'" id)
else Hashtbl.add h id ty) packet.args;
let svars = Hashtbl.fold (fun k v a -> (k,v) :: a) h [] in
let env = {root=packet; statevars=svars; vars=[]; bit_accum=0; sizes=Hashtbl.create 1; offsets=Hashtbl.create 1;
t_tdefs=ps.tdefs; t_sdefs=ps.sdefs; custom=Hashtbl.create 1} in
typeof env (resolve_defs env packet.body)
) ps.pdefs
|
82f8f5de4d80f74ed1038b3035a543c516e7ae3b9ef2ab630f49fe22c4907305 | aws-beam/aws-erlang | aws_lambda.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
%% @doc Lambda
%%
%% Overview
%%
%% Lambda is a compute service that lets you run code without provisioning or
%% managing servers.
%%
%% Lambda runs your code on a high-availability compute infrastructure and
%% performs all of the administration of the compute resources, including
%% server and operating system maintenance, capacity provisioning and
%% automatic scaling, code monitoring and logging. With Lambda, you can run
%% code for virtually any type of application or backend service. For more
%% information about the Lambda service, see What is Lambda in the Lambda
%% Developer Guide.
%%
The Lambda API Reference provides information about each of the API
%% methods, including details about the parameters in each API request and
%% response.
%%
You can use Software Development Kits ( SDKs ) , Integrated Development
%% Environment (IDE) Toolkits, and command line tools to access the API. For
installation instructions , see Tools for Amazon Web Services .
%%
%% For a list of Region-specific endpoints that Lambda supports, see Lambda
endpoints and quotas in the Amazon Web Services General Reference ..
%%
%% When making the API calls, you will need to authenticate your request by
providing a signature . Lambda supports signature version 4 . For more
information , see Signature Version 4 signing process in the Amazon Web
%% Services General Reference..
%%
CA certificates
%%
Because Amazon Web Services SDKs use the CA certificates from your
computer , changes to the certificates on the Amazon Web Services servers
%% can cause connection failures when you attempt to use an SDK. You can
prevent these failures by keeping your computer 's CA certificates and
%% operating system up-to-date. If you encounter this issue in a corporate
%% environment and do not manage your own computer, you might need to ask an
%% administrator to assist with the update process. The following list shows
minimum operating system and Java versions :
%%
< ul > < li > Microsoft Windows versions that have updates from January 2005
or later installed contain at least one of the required CAs in their trust
%% list.
%%
< /li > < li > Mac OS X 10.4 with Java for Release 5 ( February
2007 ) , ( October 2007 ) , and later versions contain at least
one of the required CAs in their trust list .
%%
< /li > < li > Red Hat Enterprise Linux 5 ( March 2007 ) , 6 , and 7 and CentOS 5 ,
6 , and 7 all contain at least one of the required CAs in their default
trusted CA list .
%%
< /li > < li > Java 1.4.2_12 ( May 2006 ) , 5 Update 2 ( March 2005 ) , and all
later versions , including Java 6 ( December 2006 ) , 7 , and 8 , contain at
least one of the required CAs in their default trusted CA list .
%%
< /li > < /ul > When accessing the Lambda management console or Lambda API
%% endpoints, whether through browsers or programmatically, you will need to
%% ensure your client machines support any of the following CAs:
%%
< ul > < li > Amazon Root CA 1
%%
%% </li> <li> Starfield Services Root Certificate Authority - G2
%%
< /li > < li > Starfield Class 2 Certification Authority
%%
< /li > < /ul > Root certificates from the first two authorities are available
from Amazon trust services , but keeping your computer up - to - date is the
more straightforward solution . To learn more about ACM - provided
certificates , see Amazon Web Services Certificate Manager FAQs .
-module(aws_lambda).
-export([add_layer_version_permission/4,
add_layer_version_permission/5,
add_permission/3,
add_permission/4,
create_alias/3,
create_alias/4,
create_code_signing_config/2,
create_code_signing_config/3,
create_event_source_mapping/2,
create_event_source_mapping/3,
create_function/2,
create_function/3,
create_function_url_config/3,
create_function_url_config/4,
delete_alias/4,
delete_alias/5,
delete_code_signing_config/3,
delete_code_signing_config/4,
delete_event_source_mapping/3,
delete_event_source_mapping/4,
delete_function/3,
delete_function/4,
delete_function_code_signing_config/3,
delete_function_code_signing_config/4,
delete_function_concurrency/3,
delete_function_concurrency/4,
delete_function_event_invoke_config/3,
delete_function_event_invoke_config/4,
delete_function_url_config/3,
delete_function_url_config/4,
delete_layer_version/4,
delete_layer_version/5,
delete_provisioned_concurrency_config/3,
delete_provisioned_concurrency_config/4,
get_account_settings/1,
get_account_settings/3,
get_account_settings/4,
get_alias/3,
get_alias/5,
get_alias/6,
get_code_signing_config/2,
get_code_signing_config/4,
get_code_signing_config/5,
get_event_source_mapping/2,
get_event_source_mapping/4,
get_event_source_mapping/5,
get_function/2,
get_function/4,
get_function/5,
get_function_code_signing_config/2,
get_function_code_signing_config/4,
get_function_code_signing_config/5,
get_function_concurrency/2,
get_function_concurrency/4,
get_function_concurrency/5,
get_function_configuration/2,
get_function_configuration/4,
get_function_configuration/5,
get_function_event_invoke_config/2,
get_function_event_invoke_config/4,
get_function_event_invoke_config/5,
get_function_url_config/2,
get_function_url_config/4,
get_function_url_config/5,
get_layer_version/3,
get_layer_version/5,
get_layer_version/6,
get_layer_version_by_arn/2,
get_layer_version_by_arn/4,
get_layer_version_by_arn/5,
get_layer_version_policy/3,
get_layer_version_policy/5,
get_layer_version_policy/6,
get_policy/2,
get_policy/4,
get_policy/5,
get_provisioned_concurrency_config/3,
get_provisioned_concurrency_config/5,
get_provisioned_concurrency_config/6,
get_runtime_management_config/2,
get_runtime_management_config/4,
get_runtime_management_config/5,
invoke/3,
invoke/4,
invoke_async/3,
invoke_async/4,
list_aliases/2,
list_aliases/4,
list_aliases/5,
list_code_signing_configs/1,
list_code_signing_configs/3,
list_code_signing_configs/4,
list_event_source_mappings/1,
list_event_source_mappings/3,
list_event_source_mappings/4,
list_function_event_invoke_configs/2,
list_function_event_invoke_configs/4,
list_function_event_invoke_configs/5,
list_function_url_configs/2,
list_function_url_configs/4,
list_function_url_configs/5,
list_functions/1,
list_functions/3,
list_functions/4,
list_functions_by_code_signing_config/2,
list_functions_by_code_signing_config/4,
list_functions_by_code_signing_config/5,
list_layer_versions/2,
list_layer_versions/4,
list_layer_versions/5,
list_layers/1,
list_layers/3,
list_layers/4,
list_provisioned_concurrency_configs/2,
list_provisioned_concurrency_configs/4,
list_provisioned_concurrency_configs/5,
list_tags/2,
list_tags/4,
list_tags/5,
list_versions_by_function/2,
list_versions_by_function/4,
list_versions_by_function/5,
publish_layer_version/3,
publish_layer_version/4,
publish_version/3,
publish_version/4,
put_function_code_signing_config/3,
put_function_code_signing_config/4,
put_function_concurrency/3,
put_function_concurrency/4,
put_function_event_invoke_config/3,
put_function_event_invoke_config/4,
put_provisioned_concurrency_config/3,
put_provisioned_concurrency_config/4,
put_runtime_management_config/3,
put_runtime_management_config/4,
remove_layer_version_permission/5,
remove_layer_version_permission/6,
remove_permission/4,
remove_permission/5,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_alias/4,
update_alias/5,
update_code_signing_config/3,
update_code_signing_config/4,
update_event_source_mapping/3,
update_event_source_mapping/4,
update_function_code/3,
update_function_code/4,
update_function_configuration/3,
update_function_configuration/4,
update_function_event_invoke_config/3,
update_function_event_invoke_config/4,
update_function_url_config/3,
update_function_url_config/4]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Adds permissions to the resource-based policy of a version of an
%% Lambda layer.
%%
%% Use this action to grant layer usage permission to other accounts. You can
%% grant permission to a single account, all accounts in an organization, or
all Amazon Web Services accounts .
%%
%% To revoke permission, call `RemoveLayerVersionPermission' with the
%% statement ID that you specified when you added it.
add_layer_version_permission(Client, LayerName, VersionNumber, Input) ->
add_layer_version_permission(Client, LayerName, VersionNumber, Input, []).
add_layer_version_permission(Client, LayerName, VersionNumber, Input0, Options0) ->
Method = post,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), "/policy"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"RevisionId">>, <<"RevisionId">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Grants an Amazon Web Service , Amazon Web Services account , or Amazon
Web Services organization permission to use a function .
%%
%% You can apply the policy at the function level, or specify a qualifier to
%% restrict access to a single version or alias. If you use a qualifier, the
invoker must use the full Amazon Resource Name ( ARN ) of that version or
%% alias to invoke the function. Note: Lambda does not support adding
%% policies to version $LATEST.
%%
%% To grant permission to another account, specify the account ID as the
%% `Principal'. To grant permission to an organization defined in
Organizations , specify the organization ID as the ` PrincipalOrgID ' .
For Amazon Web Services , the principal is a domain - style identifier that
%% the service defines, such as `s3.amazonaws.com' or
` sns.amazonaws.com ' . For Amazon Web Services , you can also specify the
ARN of the associated resource as the ` SourceArn ' . If you grant
%% permission to a service principal without specifying the source, other
%% accounts could potentially configure resources in their account to invoke
%% your Lambda function.
%%
%% This operation adds a statement to a resource-based permissions policy for
%% the function. For more information about function policies, see Using
%% resource-based policies for Lambda.
add_permission(Client, FunctionName, Input) ->
add_permission(Client, FunctionName, Input, []).
add_permission(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/policy"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates an alias for a Lambda function version.
%%
%% Use aliases to provide clients with a function identifier that you can
%% update to invoke a different version.
%%
You can also map an alias to split invocation requests between two
versions . Use the ` RoutingConfig ' parameter to specify a second
%% version and the percentage of invocation requests that it receives.
create_alias(Client, FunctionName, Input) ->
create_alias(Client, FunctionName, Input, []).
create_alias(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a code signing configuration.
%%
%% A code signing configuration defines a list of allowed signing profiles
%% and defines the code-signing validation policy (action to be taken if
%% deployment validation checks fail).
create_code_signing_config(Client, Input) ->
create_code_signing_config(Client, Input, []).
create_code_signing_config(Client, Input0, Options0) ->
Method = post,
Path = ["/2020-04-22/code-signing-configs/"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a mapping between an event source and an Lambda function.
%%
%% Lambda reads items from the event source and invokes the function.
%%
%% For details about how to configure different event sources, see the
%% following topics.
%%
< ul > < li > Amazon DynamoDB Streams
%%
< /li > < li > Amazon Kinesis
%%
< /li > < li > Amazon SQS
%%
< /li > < li > Amazon MQ and RabbitMQ
%%
< /li > < li > Amazon MSK
%%
%% </li> <li> Apache Kafka
%%
%% </li> </ul> The following error handling options are available only for
stream sources ( DynamoDB and ):
%%
%% <ul> <li> `BisectBatchOnFunctionError' – If the function returns an
error , split the batch in two and retry .
%%
< /li > < li > ` DestinationConfig ' – Send discarded records to an Amazon
SQS queue or Amazon SNS topic .
%%
%% </li> <li> `MaximumRecordAgeInSeconds' – Discard records older than
%% the specified age. The default value is infinite (-1). When set to
%% infinite (-1), failed records are retried until the record expires
%%
%% </li> <li> `MaximumRetryAttempts' – Discard records after the
%% specified number of retries. The default value is infinite (-1). When set
%% to infinite (-1), failed records are retried until the record expires.
%%
%% </li> <li> `ParallelizationFactor' – Process multiple batches from
%% each shard concurrently.
%%
%% </li> </ul> For information about which configuration parameters apply to
%% each event source, see the following topics.
%%
< ul > < li > Amazon DynamoDB Streams
%%
< /li > < li > Amazon Kinesis
%%
< /li > < li > Amazon SQS
%%
< /li > < li > Amazon MQ and RabbitMQ
%%
< /li > < li > Amazon MSK
%%
%% </li> <li> Apache Kafka
%%
%% </li> </ul>
create_event_source_mapping(Client, Input) ->
create_event_source_mapping(Client, Input, []).
create_event_source_mapping(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/event-source-mappings/"],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a Lambda function.
%%
%% To create a function, you need a deployment package and an execution role.
%% The deployment package is a .zip file archive or container image that
%% contains your function code. The execution role grants the function
permission to use Amazon Web Services , such as Amazon CloudWatch Logs for
log streaming and X - Ray for request tracing .
%%
%% If the deployment package is a container image, then you set the package
%% type to `Image'. For a container image, the code property must include
the URI of a container image in the Amazon ECR registry . You do not need
%% to specify the handler and runtime properties.
%%
%% If the deployment package is a .zip file archive, then you set the package
%% type to `Zip'. For a .zip file archive, the code property specifies
%% the location of the .zip file. You must also specify the handler and
%% runtime properties. The code in the deployment package must be compatible
%% with the target instruction set architecture of the function (`x86-64'
%% or `arm64'). If you do not specify the architecture, then the default
%% value is `x86-64'.
%%
%% When you create a function, Lambda provisions an instance of the function
and its supporting resources . If your function connects to a VPC , this
process can take a minute or so . During this time , you ca n't invoke or
modify the function . The ` State ' , ` StateReason ' , and
%% `StateReasonCode' fields in the response from
%% `GetFunctionConfiguration' indicate when the function is ready to
%% invoke. For more information, see Lambda function states.
%%
%% A function has an unpublished version, and can have published versions and
%% aliases. The unpublished version changes when you update your
%% function's code and configuration. A published version is a snapshot
%% of your function code and configuration that can't be changed. An
%% alias is a named resource that maps to a version, and can be changed to
%% map to a different version. Use the `Publish' parameter to create
%% version `1' of your function from its initial configuration.
%%
%% The other parameters let you configure version-specific and function-level
%% settings. You can modify version-specific settings later with
` UpdateFunctionConfiguration ' . Function - level settings apply to both
%% the unpublished and published versions of the function, and include tags
( ` TagResource ' ) and per - function concurrency limits
%% (`PutFunctionConcurrency').
%%
%% You can use code signing if your deployment package is a .zip file
archive . To enable code signing for this function , specify the ARN of a
%% code-signing configuration. When a user attempts to deploy a code package
with ` UpdateFunctionCode ' , Lambda checks that the code package has a
%% valid signature from a trusted publisher. The code-signing configuration
%% includes set of signing profiles, which define the trusted publishers for
%% this function.
%%
If another Amazon Web Services account or an Amazon Web Service invokes
your function , use ` AddPermission ' to grant permission by creating a
resource - based Identity and Access Management ( IAM ) policy . You can grant
%% permissions at the function level, on a version, or on an alias.
%%
%% To invoke your function directly, use `Invoke'. To invoke your
function in response to events in other Amazon Web Services , create an
%% event source mapping (`CreateEventSourceMapping'), or configure a
%% function trigger in the other service. For more information, see Invoking
%% Lambda functions.
create_function(Client, Input) ->
create_function(Client, Input, []).
create_function(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a Lambda function URL with the specified configuration
%% parameters.
%%
A function URL is a dedicated HTTP(S ) endpoint that you can use to invoke
%% your function.
create_function_url_config(Client, FunctionName, Input) ->
create_function_url_config(Client, FunctionName, Input, []).
create_function_url_config(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a Lambda function alias.
delete_alias(Client, FunctionName, Name, Input) ->
delete_alias(Client, FunctionName, Name, Input, []).
delete_alias(Client, FunctionName, Name, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes the code signing configuration.
%%
%% You can delete the code signing configuration only if no function is using
%% it.
delete_code_signing_config(Client, CodeSigningConfigArn, Input) ->
delete_code_signing_config(Client, CodeSigningConfigArn, Input, []).
delete_code_signing_config(Client, CodeSigningConfigArn, Input0, Options0) ->
Method = delete,
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes an event source mapping.
%%
%% You can get the identifier of a mapping from the output of
%% `ListEventSourceMappings'.
%%
%% When you delete an event source mapping, it enters a `Deleting' state
and might not be completely deleted for several seconds .
delete_event_source_mapping(Client, UUID, Input) ->
delete_event_source_mapping(Client, UUID, Input, []).
delete_event_source_mapping(Client, UUID, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/event-source-mappings/", aws_util:encode_uri(UUID), ""],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a Lambda function.
%%
%% To delete a specific function version, use the `Qualifier' parameter.
%% Otherwise, all versions and aliases are deleted.
%%
%% To delete Lambda event source mappings that invoke a function, use
` DeleteEventSourceMapping ' . For Amazon Web Services and resources that
%% invoke your function directly, delete the trigger in the service where you
%% originally configured it.
delete_function(Client, FunctionName, Input) ->
delete_function(Client, FunctionName, Input, []).
delete_function(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Removes the code signing configuration from the function.
delete_function_code_signing_config(Client, FunctionName, Input) ->
delete_function_code_signing_config(Client, FunctionName, Input, []).
delete_function_code_signing_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2020-06-30/functions/", aws_util:encode_uri(FunctionName), "/code-signing-config"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Removes a concurrent execution limit from a function.
delete_function_concurrency(Client, FunctionName, Input) ->
delete_function_concurrency(Client, FunctionName, Input, []).
delete_function_concurrency(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2017-10-31/functions/", aws_util:encode_uri(FunctionName), "/concurrency"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes the configuration for asynchronous invocation for a function,
%% version, or alias.
%%
%% To configure options for asynchronous invocation, use
%% `PutFunctionEventInvokeConfig'.
delete_function_event_invoke_config(Client, FunctionName, Input) ->
delete_function_event_invoke_config(Client, FunctionName, Input, []).
delete_function_event_invoke_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a Lambda function URL.
%%
%% When you delete a function URL, you can't recover it. Creating a new
%% function URL results in a different URL address.
delete_function_url_config(Client, FunctionName, Input) ->
delete_function_url_config(Client, FunctionName, Input, []).
delete_function_url_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a version of an Lambda layer.
%%
%% Deleted versions can no longer be viewed or added to functions. To avoid
%% breaking functions, a copy of the version remains in Lambda until no
%% functions refer to it.
delete_layer_version(Client, LayerName, VersionNumber, Input) ->
delete_layer_version(Client, LayerName, VersionNumber, Input, []).
delete_layer_version(Client, LayerName, VersionNumber, Input0, Options0) ->
Method = delete,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes the provisioned concurrency configuration for a function.
delete_provisioned_concurrency_config(Client, FunctionName, Input) ->
delete_provisioned_concurrency_config(Client, FunctionName, Input, []).
delete_provisioned_concurrency_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Retrieves details about your account's limits and usage in an
Amazon Web Services Region .
get_account_settings(Client)
when is_map(Client) ->
get_account_settings(Client, #{}, #{}).
get_account_settings(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_account_settings(Client, QueryMap, HeadersMap, []).
get_account_settings(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2016-08-19/account-settings/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns details about a Lambda function alias.
get_alias(Client, FunctionName, Name)
when is_map(Client) ->
get_alias(Client, FunctionName, Name, #{}, #{}).
get_alias(Client, FunctionName, Name, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_alias(Client, FunctionName, Name, QueryMap, HeadersMap, []).
get_alias(Client, FunctionName, Name, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns information about the specified code signing configuration.
get_code_signing_config(Client, CodeSigningConfigArn)
when is_map(Client) ->
get_code_signing_config(Client, CodeSigningConfigArn, #{}, #{}).
get_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, []).
get_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns details about an event source mapping.
%%
%% You can get the identifier of a mapping from the output of
%% `ListEventSourceMappings'.
get_event_source_mapping(Client, UUID)
when is_map(Client) ->
get_event_source_mapping(Client, UUID, #{}, #{}).
get_event_source_mapping(Client, UUID, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_event_source_mapping(Client, UUID, QueryMap, HeadersMap, []).
get_event_source_mapping(Client, UUID, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/event-source-mappings/", aws_util:encode_uri(UUID), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns information about the function or function version, with a
link to download the deployment package that 's valid for 10 minutes .
%%
%% If you specify a function version, only details that are specific to that
%% version are returned.
get_function(Client, FunctionName)
when is_map(Client) ->
get_function(Client, FunctionName, #{}, #{}).
get_function(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function(Client, FunctionName, QueryMap, HeadersMap, []).
get_function(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns the code signing configuration for the specified function.
get_function_code_signing_config(Client, FunctionName)
when is_map(Client) ->
get_function_code_signing_config(Client, FunctionName, #{}, #{}).
get_function_code_signing_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_code_signing_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_code_signing_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-06-30/functions/", aws_util:encode_uri(FunctionName), "/code-signing-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns details about the reserved concurrency configuration for a
%% function.
%%
%% To set a concurrency limit for a function, use
%% `PutFunctionConcurrency'.
get_function_concurrency(Client, FunctionName)
when is_map(Client) ->
get_function_concurrency(Client, FunctionName, #{}, #{}).
get_function_concurrency(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_concurrency(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_concurrency(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/concurrency"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns the version-specific settings of a Lambda function or
%% version.
%%
%% The output includes only options that can vary between versions of a
function . To modify these settings , use ` UpdateFunctionConfiguration ' .
%%
%% To get all of a function's details, including function-level settings,
%% use `GetFunction'.
get_function_configuration(Client, FunctionName)
when is_map(Client) ->
get_function_configuration(Client, FunctionName, #{}, #{}).
get_function_configuration(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_configuration(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_configuration(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/configuration"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Retrieves the configuration for asynchronous invocation for a
%% function, version, or alias.
%%
%% To configure options for asynchronous invocation, use
%% `PutFunctionEventInvokeConfig'.
get_function_event_invoke_config(Client, FunctionName)
when is_map(Client) ->
get_function_event_invoke_config(Client, FunctionName, #{}, #{}).
get_function_event_invoke_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_event_invoke_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_event_invoke_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns details about a Lambda function URL.
get_function_url_config(Client, FunctionName)
when is_map(Client) ->
get_function_url_config(Client, FunctionName, #{}, #{}).
get_function_url_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_url_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_url_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns information about a version of an Lambda layer, with a link
to download the layer archive that 's valid for 10 minutes .
get_layer_version(Client, LayerName, VersionNumber)
when is_map(Client) ->
get_layer_version(Client, LayerName, VersionNumber, #{}, #{}).
get_layer_version(Client, LayerName, VersionNumber, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_layer_version(Client, LayerName, VersionNumber, QueryMap, HeadersMap, []).
get_layer_version(Client, LayerName, VersionNumber, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns information about a version of an Lambda layer, with a link
to download the layer archive that 's valid for 10 minutes .
get_layer_version_by_arn(Client, Arn)
when is_map(Client) ->
get_layer_version_by_arn(Client, Arn, #{}, #{}).
get_layer_version_by_arn(Client, Arn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_layer_version_by_arn(Client, Arn, QueryMap, HeadersMap, []).
get_layer_version_by_arn(Client, Arn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers?find=LayerVersion"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Arn">>, Arn}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns the permission policy for a version of an Lambda layer.
%%
%% For more information, see `AddLayerVersionPermission'.
get_layer_version_policy(Client, LayerName, VersionNumber)
when is_map(Client) ->
get_layer_version_policy(Client, LayerName, VersionNumber, #{}, #{}).
get_layer_version_policy(Client, LayerName, VersionNumber, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_layer_version_policy(Client, LayerName, VersionNumber, QueryMap, HeadersMap, []).
get_layer_version_policy(Client, LayerName, VersionNumber, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), "/policy"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the resource - based IAM policy for a function , version , or
%% alias.
get_policy(Client, FunctionName)
when is_map(Client) ->
get_policy(Client, FunctionName, #{}, #{}).
get_policy(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_policy(Client, FunctionName, QueryMap, HeadersMap, []).
get_policy(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/policy"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Retrieves the provisioned concurrency configuration for a
%% function's alias or version.
get_provisioned_concurrency_config(Client, FunctionName, Qualifier)
when is_map(Client) ->
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, #{}, #{}).
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, QueryMap, HeadersMap, []).
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, Qualifier}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Retrieves the runtime management configuration for a function's
%% version.
%%
If the runtime update mode is Manual , this includes the ARN of the runtime
%% version and the runtime update mode. If the runtime update mode is Auto or
%% Function update, this includes the runtime update mode and `null' is
returned for the ARN . For more information , see Runtime updates .
get_runtime_management_config(Client, FunctionName)
when is_map(Client) ->
get_runtime_management_config(Client, FunctionName, #{}, #{}).
get_runtime_management_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_runtime_management_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_runtime_management_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2021-07-20/functions/", aws_util:encode_uri(FunctionName), "/runtime-management-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Invokes a Lambda function .
%%
%% You can invoke a function synchronously (and wait for the response), or
%% asynchronously. To invoke a function asynchronously, set
` InvocationType ' to ` Event ' .
%%
%% For synchronous invocation, details about the function response, including
%% errors, are included in the response body and headers. For either
%% invocation type, you can find more information in the execution log and
%% trace.
%%
%% When an error occurs, your function may be invoked multiple times. Retry
%% behavior varies by error type, client, event source, and invocation type.
%% For example, if you invoke a function asynchronously and it returns an
error , Lambda executes the function up to two more times . For more
%% information, see Error handling and automatic retries in Lambda.
%%
%% For asynchronous invocation, Lambda adds events to a queue before sending
%% them to your function. If your function does not have enough capacity to
%% keep up with the queue, events may be lost. Occasionally, your function
%% may receive the same event multiple times, even if no error occurs. To
%% retain events that were not processed, configure your function with a
%% dead-letter queue.
%%
%% The status code in the API response doesn't reflect function errors.
%% Error codes are reserved for errors that prevent your function from
%% executing, such as permissions errors, quota errors, or issues with your
%% function's code and configuration. For example, Lambda returns
%% `TooManyRequestsException' if running the function would cause you to
%% exceed a concurrency limit at either the account level
%% (`ConcurrentInvocationLimitExceeded') or function level
%% (`ReservedFunctionConcurrentInvocationLimitExceeded').
%%
%% For functions with a long timeout, your client might disconnect during
%% synchronous invocation while it waits for a response. Configure your HTTP
%% client, SDK, firewall, proxy, or operating system to allow for long
%% connections with timeout or keep-alive settings.
%%
This operation requires permission for the lambda : InvokeFunction action .
%% For details on how to set up permissions for cross-account invocations,
%% see Granting function access to other accounts.
invoke(Client, FunctionName, Input) ->
invoke(Client, FunctionName, Input, []).
invoke(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/invocations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
HeadersMapping = [
{<<"X-Amz-Client-Context">>, <<"ClientContext">>},
{<<"X-Amz-Invocation-Type">>, <<"InvocationType">>},
{<<"X-Amz-Log-Type">>, <<"LogType">>}
],
{Headers, Input1} = aws_request:build_headers(HeadersMapping, Input0),
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
case request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode) of
{ok, Body0, {_, ResponseHeaders, _} = Response} ->
ResponseHeadersParams =
[
{<<"X-Amz-Executed-Version">>, <<"ExecutedVersion">>},
{<<"X-Amz-Function-Error">>, <<"FunctionError">>},
{<<"X-Amz-Log-Result">>, <<"LogResult">>}
],
FoldFun = fun({Name_, Key_}, Acc_) ->
case lists:keyfind(Name_, 1, ResponseHeaders) of
false -> Acc_;
{_, Value_} -> Acc_#{Key_ => Value_}
end
end,
Body = lists:foldl(FoldFun, Body0, ResponseHeadersParams),
{ok, Body, Response};
Result ->
Result
end.
%% @doc For asynchronous function invocation, use `Invoke'.
%%
Invokes a function asynchronously .
invoke_async(Client, FunctionName, Input) ->
invoke_async(Client, FunctionName, Input, []).
invoke_async(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2014-11-13/functions/", aws_util:encode_uri(FunctionName), "/invoke-async/"],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Returns a list of aliases for a Lambda function.
list_aliases(Client, FunctionName)
when is_map(Client) ->
list_aliases(Client, FunctionName, #{}, #{}).
list_aliases(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_aliases(Client, FunctionName, QueryMap, HeadersMap, []).
list_aliases(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"FunctionVersion">>, maps:get(<<"FunctionVersion">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of code signing configurations.
%%
A request returns up to 10,000 configurations per call . You can use the
` MaxItems ' parameter to return fewer configurations per call .
list_code_signing_configs(Client)
when is_map(Client) ->
list_code_signing_configs(Client, #{}, #{}).
list_code_signing_configs(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_code_signing_configs(Client, QueryMap, HeadersMap, []).
list_code_signing_configs(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-04-22/code-signing-configs/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists event source mappings.
%%
%% Specify an `EventSourceArn' to show only event source mappings for a
%% single event source.
list_event_source_mappings(Client)
when is_map(Client) ->
list_event_source_mappings(Client, #{}, #{}).
list_event_source_mappings(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_event_source_mappings(Client, QueryMap, HeadersMap, []).
list_event_source_mappings(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/event-source-mappings/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"EventSourceArn">>, maps:get(<<"EventSourceArn">>, QueryMap, undefined)},
{<<"FunctionName">>, maps:get(<<"FunctionName">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Retrieves a list of configurations for asynchronous invocation for a
%% function.
%%
%% To configure options for asynchronous invocation, use
%% `PutFunctionEventInvokeConfig'.
list_function_event_invoke_configs(Client, FunctionName)
when is_map(Client) ->
list_function_event_invoke_configs(Client, FunctionName, #{}, #{}).
list_function_event_invoke_configs(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_function_event_invoke_configs(Client, FunctionName, QueryMap, HeadersMap, []).
list_function_event_invoke_configs(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config/list"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of Lambda function URLs for the specified function.
list_function_url_configs(Client, FunctionName)
when is_map(Client) ->
list_function_url_configs(Client, FunctionName, #{}, #{}).
list_function_url_configs(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_function_url_configs(Client, FunctionName, QueryMap, HeadersMap, []).
list_function_url_configs(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/urls"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of Lambda functions, with the version-specific
%% configuration of each.
%%
Lambda returns up to 50 functions per call .
%%
%% Set `FunctionVersion' to `ALL' to include all published versions
%% of each function in addition to the unpublished version.
%%
The ` ListFunctions ' operation returns a subset of the
` FunctionConfiguration ' fields . To get the additional fields ( State ,
StateReasonCode , StateReason , LastUpdateStatus , LastUpdateStatusReason ,
%% LastUpdateStatusReasonCode, RuntimeVersionConfig) for a function or
%% version, use `GetFunction'.
list_functions(Client)
when is_map(Client) ->
list_functions(Client, #{}, #{}).
list_functions(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_functions(Client, QueryMap, HeadersMap, []).
list_functions(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"FunctionVersion">>, maps:get(<<"FunctionVersion">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MasterRegion">>, maps:get(<<"MasterRegion">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc List the functions that use the specified code signing configuration.
%%
%% You can use this method prior to deleting a code signing configuration, to
%% verify that no functions are using it.
list_functions_by_code_signing_config(Client, CodeSigningConfigArn)
when is_map(Client) ->
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, #{}, #{}).
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, []).
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), "/functions"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists the versions of an Lambda layer.
%%
%% Versions that have been deleted aren't listed. Specify a runtime
%% identifier to list only versions that indicate that they're compatible
%% with that runtime. Specify a compatible architecture to include only layer
%% versions that are compatible with that architecture.
list_layer_versions(Client, LayerName)
when is_map(Client) ->
list_layer_versions(Client, LayerName, #{}, #{}).
list_layer_versions(Client, LayerName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_layer_versions(Client, LayerName, QueryMap, HeadersMap, []).
list_layer_versions(Client, LayerName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"CompatibleArchitecture">>, maps:get(<<"CompatibleArchitecture">>, QueryMap, undefined)},
{<<"CompatibleRuntime">>, maps:get(<<"CompatibleRuntime">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists Lambda layers and shows information about the latest version of
%% each.
%%
%% Specify a runtime identifier to list only layers that indicate that
%% they're compatible with that runtime. Specify a compatible
%% architecture to include only layers that are compatible with that
%% instruction set architecture.
list_layers(Client)
when is_map(Client) ->
list_layers(Client, #{}, #{}).
list_layers(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_layers(Client, QueryMap, HeadersMap, []).
list_layers(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"CompatibleArchitecture">>, maps:get(<<"CompatibleArchitecture">>, QueryMap, undefined)},
{<<"CompatibleRuntime">>, maps:get(<<"CompatibleRuntime">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Retrieves a list of provisioned concurrency configurations for a
%% function.
list_provisioned_concurrency_configs(Client, FunctionName)
when is_map(Client) ->
list_provisioned_concurrency_configs(Client, FunctionName, #{}, #{}).
list_provisioned_concurrency_configs(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_provisioned_concurrency_configs(Client, FunctionName, QueryMap, HeadersMap, []).
list_provisioned_concurrency_configs(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency?List=ALL"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a function's tags.
%%
%% You can also view tags with `GetFunction'.
list_tags(Client, Resource)
when is_map(Client) ->
list_tags(Client, Resource, #{}, #{}).
list_tags(Client, Resource, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags(Client, Resource, QueryMap, HeadersMap, []).
list_tags(Client, Resource, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2017-03-31/tags/", aws_util:encode_uri(Resource), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of versions, with the version-specific configuration
%% of each.
%%
Lambda returns up to 50 versions per call .
list_versions_by_function(Client, FunctionName)
when is_map(Client) ->
list_versions_by_function(Client, FunctionName, #{}, #{}).
list_versions_by_function(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_versions_by_function(Client, FunctionName, QueryMap, HeadersMap, []).
list_versions_by_function(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/versions"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Creates an Lambda layer from a ZIP archive.
%%
%% Each time you call `PublishLayerVersion' with the same layer name, a
%% new version is created.
%%
%% Add layers to your function with `CreateFunction' or
` UpdateFunctionConfiguration ' .
publish_layer_version(Client, LayerName, Input) ->
publish_layer_version(Client, LayerName, Input, []).
publish_layer_version(Client, LayerName, Input0, Options0) ->
Method = post,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a version from the current code and configuration of a
%% function.
%%
%% Use versions to create a snapshot of your function code and configuration
%% that doesn't change.
%%
%% Lambda doesn't publish a version if the function's configuration
%% and code haven't changed since the last version. Use
` UpdateFunctionCode ' or ` UpdateFunctionConfiguration ' to update
%% the function before publishing a version.
%%
%% Clients can invoke versions directly or with an alias. To create an alias,
%% use `CreateAlias'.
publish_version(Client, FunctionName, Input) ->
publish_version(Client, FunctionName, Input, []).
publish_version(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/versions"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Update the code signing configuration for the function.
%%
%% Changes to the code signing configuration take effect the next time a user
%% tries to deploy a code package to the function.
put_function_code_signing_config(Client, FunctionName, Input) ->
put_function_code_signing_config(Client, FunctionName, Input, []).
put_function_code_signing_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2020-06-30/functions/", aws_util:encode_uri(FunctionName), "/code-signing-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Sets the maximum number of simultaneous executions for a function,
%% and reserves capacity for that concurrency level.
%%
%% Concurrency settings apply to the function as a whole, including all
%% published versions and the unpublished version. Reserving concurrency both
%% ensures that your function has capacity to process the specified number of
%% events simultaneously, and prevents it from scaling beyond that level. Use
%% `GetFunction' to see the current setting for a function.
%%
%% Use `GetAccountSettings' to see your Regional concurrency limit. You
%% can reserve concurrency for as many functions as you like, as long as you
leave at least 100 simultaneous executions unreserved for functions that
%% aren't configured with a per-function limit. For more information, see
%% Lambda function scaling.
put_function_concurrency(Client, FunctionName, Input) ->
put_function_concurrency(Client, FunctionName, Input, []).
put_function_concurrency(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2017-10-31/functions/", aws_util:encode_uri(FunctionName), "/concurrency"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Configures options for asynchronous invocation on a function,
%% version, or alias.
%%
%% If a configuration already exists for a function, version, or alias, this
%% operation overwrites it. If you exclude any settings, they are removed. To
set one option without affecting existing settings for other options , use
%% `UpdateFunctionEventInvokeConfig'.
%%
%% By default, Lambda retries an asynchronous invocation twice if the
function returns an error . It retains events in a queue for up to six
%% hours. When an event fails all processing attempts or stays in the
%% asynchronous invocation queue for too long, Lambda discards it. To retain
%% discarded events, configure a dead-letter queue with
` UpdateFunctionConfiguration ' .
%%
%% To send an invocation record to a queue, topic, function, or event bus,
%% specify a destination. You can configure separate destinations for
%% successful invocations (on-success) and events that fail all processing
%% attempts (on-failure). You can configure destinations in addition to or
%% instead of a dead-letter queue.
put_function_event_invoke_config(Client, FunctionName, Input) ->
put_function_event_invoke_config(Client, FunctionName, Input, []).
put_function_event_invoke_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Adds a provisioned concurrency configuration to a function's
%% alias or version.
put_provisioned_concurrency_config(Client, FunctionName, Input) ->
put_provisioned_concurrency_config(Client, FunctionName, Input, []).
put_provisioned_concurrency_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency"],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Sets the runtime management configuration for a function's
%% version.
%%
%% For more information, see Runtime updates.
put_runtime_management_config(Client, FunctionName, Input) ->
put_runtime_management_config(Client, FunctionName, Input, []).
put_runtime_management_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2021-07-20/functions/", aws_util:encode_uri(FunctionName), "/runtime-management-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Removes a statement from the permissions policy for a version of an
%% Lambda layer.
%%
%% For more information, see `AddLayerVersionPermission'.
remove_layer_version_permission(Client, LayerName, StatementId, VersionNumber, Input) ->
remove_layer_version_permission(Client, LayerName, StatementId, VersionNumber, Input, []).
remove_layer_version_permission(Client, LayerName, StatementId, VersionNumber, Input0, Options0) ->
Method = delete,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), "/policy/", aws_util:encode_uri(StatementId), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"RevisionId">>, <<"RevisionId">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Revokes function - use permission from an Amazon Web Service or another
Amazon Web Services account .
%%
You can get the ID of the statement from the output of ` GetPolicy ' .
remove_permission(Client, FunctionName, StatementId, Input) ->
remove_permission(Client, FunctionName, StatementId, Input, []).
remove_permission(Client, FunctionName, StatementId, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/policy/", aws_util:encode_uri(StatementId), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>},
{<<"RevisionId">>, <<"RevisionId">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Adds tags to a function.
tag_resource(Client, Resource, Input) ->
tag_resource(Client, Resource, Input, []).
tag_resource(Client, Resource, Input0, Options0) ->
Method = post,
Path = ["/2017-03-31/tags/", aws_util:encode_uri(Resource), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Removes tags from a function.
untag_resource(Client, Resource, Input) ->
untag_resource(Client, Resource, Input, []).
untag_resource(Client, Resource, Input0, Options0) ->
Method = delete,
Path = ["/2017-03-31/tags/", aws_util:encode_uri(Resource), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"TagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates the configuration of a Lambda function alias.
update_alias(Client, FunctionName, Name, Input) ->
update_alias(Client, FunctionName, Name, Input, []).
update_alias(Client, FunctionName, Name, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Update the code signing configuration.
%%
%% Changes to the code signing configuration take effect the next time a user
%% tries to deploy a code package to the function.
update_code_signing_config(Client, CodeSigningConfigArn, Input) ->
update_code_signing_config(Client, CodeSigningConfigArn, Input, []).
update_code_signing_config(Client, CodeSigningConfigArn, Input0, Options0) ->
Method = put,
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates an event source mapping.
%%
%% You can change the function that Lambda invokes, or pause invocation and
%% resume later from the same location.
%%
%% For details about how to configure different event sources, see the
%% following topics.
%%
< ul > < li > Amazon DynamoDB Streams
%%
< /li > < li > Amazon Kinesis
%%
< /li > < li > Amazon SQS
%%
< /li > < li > Amazon MQ and RabbitMQ
%%
< /li > < li > Amazon MSK
%%
%% </li> <li> Apache Kafka
%%
%% </li> </ul> The following error handling options are available only for
stream sources ( DynamoDB and ):
%%
%% <ul> <li> `BisectBatchOnFunctionError' – If the function returns an
error , split the batch in two and retry .
%%
< /li > < li > ` DestinationConfig ' – Send discarded records to an Amazon
SQS queue or Amazon SNS topic .
%%
%% </li> <li> `MaximumRecordAgeInSeconds' – Discard records older than
%% the specified age. The default value is infinite (-1). When set to
%% infinite (-1), failed records are retried until the record expires
%%
%% </li> <li> `MaximumRetryAttempts' – Discard records after the
%% specified number of retries. The default value is infinite (-1). When set
%% to infinite (-1), failed records are retried until the record expires.
%%
%% </li> <li> `ParallelizationFactor' – Process multiple batches from
%% each shard concurrently.
%%
%% </li> </ul> For information about which configuration parameters apply to
%% each event source, see the following topics.
%%
< ul > < li > Amazon DynamoDB Streams
%%
< /li > < li > Amazon Kinesis
%%
< /li > < li > Amazon SQS
%%
< /li > < li > Amazon MQ and RabbitMQ
%%
< /li > < li > Amazon MSK
%%
%% </li> <li> Apache Kafka
%%
%% </li> </ul>
update_event_source_mapping(Client, UUID, Input) ->
update_event_source_mapping(Client, UUID, Input, []).
update_event_source_mapping(Client, UUID, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/event-source-mappings/", aws_util:encode_uri(UUID), ""],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates a Lambda function's code.
%%
%% If code signing is enabled for the function, the code package must be
%% signed by a trusted publisher. For more information, see Configuring code
%% signing for Lambda.
%%
%% If the function's package type is `Image', then you must specify
the code package in ` ImageUri ' as the URI of a container image in the
Amazon ECR registry .
%%
%% If the function's package type is `Zip', then you must specify the
deployment package as a .zip file archive . Enter the Amazon S3 bucket and
%% key of the code .zip file location. You can also provide the function code
inline using the ` ZipFile ' field .
%%
%% The code in the deployment package must be compatible with the target
%% instruction set architecture of the function (`x86-64' or
%% `arm64').
%%
%% The function's code is locked when you publish a version. You
%% can't modify the code of a published version, only the unpublished
%% version.
%%
%% For a function defined as a container image, Lambda resolves the image tag
to an image digest . In Amazon ECR , if you update the image tag to a new
%% image, Lambda does not automatically update the function.
update_function_code(Client, FunctionName, Input) ->
update_function_code(Client, FunctionName, Input, []).
update_function_code(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/code"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Modify the version-specific settings of a Lambda function.
%%
%% When you update a function, Lambda provisions an instance of the function
and its supporting resources . If your function connects to a VPC , this
process can take a minute . During this time , you ca n't modify the
%% function, but you can still invoke it. The `LastUpdateStatus',
%% `LastUpdateStatusReason', and `LastUpdateStatusReasonCode' fields
%% in the response from `GetFunctionConfiguration' indicate when the
%% update is complete and the function is processing events with the new
%% configuration. For more information, see Lambda function states.
%%
%% These settings can vary between versions of a function and are locked when
%% you publish a version. You can't modify the configuration of a
%% published version, only the unpublished version.
%%
%% To configure function concurrency, use `PutFunctionConcurrency'. To
grant invoke permissions to an Amazon Web Services account or Amazon Web
Service , use ` AddPermission ' .
update_function_configuration(Client, FunctionName, Input) ->
update_function_configuration(Client, FunctionName, Input, []).
update_function_configuration(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/configuration"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates the configuration for asynchronous invocation for a function,
%% version, or alias.
%%
%% To configure options for asynchronous invocation, use
%% `PutFunctionEventInvokeConfig'.
update_function_event_invoke_config(Client, FunctionName, Input) ->
update_function_event_invoke_config(Client, FunctionName, Input, []).
update_function_event_invoke_config(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates the configuration for a Lambda function URL.
update_function_url_config(Client, FunctionName, Input) ->
update_function_url_config(Client, FunctionName, Input, []).
update_function_url_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"lambda">>},
Host = build_host(<<"lambda">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_lambda.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
@doc Lambda
Overview
Lambda is a compute service that lets you run code without provisioning or
managing servers.
Lambda runs your code on a high-availability compute infrastructure and
performs all of the administration of the compute resources, including
server and operating system maintenance, capacity provisioning and
automatic scaling, code monitoring and logging. With Lambda, you can run
code for virtually any type of application or backend service. For more
information about the Lambda service, see What is Lambda in the Lambda
Developer Guide.
methods, including details about the parameters in each API request and
response.
Environment (IDE) Toolkits, and command line tools to access the API. For
For a list of Region-specific endpoints that Lambda supports, see Lambda
When making the API calls, you will need to authenticate your request by
Services General Reference..
can cause connection failures when you attempt to use an SDK. You can
operating system up-to-date. If you encounter this issue in a corporate
environment and do not manage your own computer, you might need to ask an
administrator to assist with the update process. The following list shows
list.
endpoints, whether through browsers or programmatically, you will need to
ensure your client machines support any of the following CAs:
</li> <li> Starfield Services Root Certificate Authority - G2
====================================================================
API
====================================================================
@doc Adds permissions to the resource-based policy of a version of an
Lambda layer.
Use this action to grant layer usage permission to other accounts. You can
grant permission to a single account, all accounts in an organization, or
To revoke permission, call `RemoveLayerVersionPermission' with the
statement ID that you specified when you added it.
You can apply the policy at the function level, or specify a qualifier to
restrict access to a single version or alias. If you use a qualifier, the
alias to invoke the function. Note: Lambda does not support adding
policies to version $LATEST.
To grant permission to another account, specify the account ID as the
`Principal'. To grant permission to an organization defined in
the service defines, such as `s3.amazonaws.com' or
permission to a service principal without specifying the source, other
accounts could potentially configure resources in their account to invoke
your Lambda function.
This operation adds a statement to a resource-based permissions policy for
the function. For more information about function policies, see Using
resource-based policies for Lambda.
@doc Creates an alias for a Lambda function version.
Use aliases to provide clients with a function identifier that you can
update to invoke a different version.
version and the percentage of invocation requests that it receives.
@doc Creates a code signing configuration.
A code signing configuration defines a list of allowed signing profiles
and defines the code-signing validation policy (action to be taken if
deployment validation checks fail).
@doc Creates a mapping between an event source and an Lambda function.
Lambda reads items from the event source and invokes the function.
For details about how to configure different event sources, see the
following topics.
</li> <li> Apache Kafka
</li> </ul> The following error handling options are available only for
<ul> <li> `BisectBatchOnFunctionError' – If the function returns an
</li> <li> `MaximumRecordAgeInSeconds' – Discard records older than
the specified age. The default value is infinite (-1). When set to
infinite (-1), failed records are retried until the record expires
</li> <li> `MaximumRetryAttempts' – Discard records after the
specified number of retries. The default value is infinite (-1). When set
to infinite (-1), failed records are retried until the record expires.
</li> <li> `ParallelizationFactor' – Process multiple batches from
each shard concurrently.
</li> </ul> For information about which configuration parameters apply to
each event source, see the following topics.
</li> <li> Apache Kafka
</li> </ul>
@doc Creates a Lambda function.
To create a function, you need a deployment package and an execution role.
The deployment package is a .zip file archive or container image that
contains your function code. The execution role grants the function
If the deployment package is a container image, then you set the package
type to `Image'. For a container image, the code property must include
to specify the handler and runtime properties.
If the deployment package is a .zip file archive, then you set the package
type to `Zip'. For a .zip file archive, the code property specifies
the location of the .zip file. You must also specify the handler and
runtime properties. The code in the deployment package must be compatible
with the target instruction set architecture of the function (`x86-64'
or `arm64'). If you do not specify the architecture, then the default
value is `x86-64'.
When you create a function, Lambda provisions an instance of the function
`StateReasonCode' fields in the response from
`GetFunctionConfiguration' indicate when the function is ready to
invoke. For more information, see Lambda function states.
A function has an unpublished version, and can have published versions and
aliases. The unpublished version changes when you update your
function's code and configuration. A published version is a snapshot
of your function code and configuration that can't be changed. An
alias is a named resource that maps to a version, and can be changed to
map to a different version. Use the `Publish' parameter to create
version `1' of your function from its initial configuration.
The other parameters let you configure version-specific and function-level
settings. You can modify version-specific settings later with
the unpublished and published versions of the function, and include tags
(`PutFunctionConcurrency').
You can use code signing if your deployment package is a .zip file
code-signing configuration. When a user attempts to deploy a code package
valid signature from a trusted publisher. The code-signing configuration
includes set of signing profiles, which define the trusted publishers for
this function.
permissions at the function level, on a version, or on an alias.
To invoke your function directly, use `Invoke'. To invoke your
event source mapping (`CreateEventSourceMapping'), or configure a
function trigger in the other service. For more information, see Invoking
Lambda functions.
@doc Creates a Lambda function URL with the specified configuration
parameters.
your function.
@doc Deletes a Lambda function alias.
@doc Deletes the code signing configuration.
You can delete the code signing configuration only if no function is using
it.
@doc Deletes an event source mapping.
You can get the identifier of a mapping from the output of
`ListEventSourceMappings'.
When you delete an event source mapping, it enters a `Deleting' state
@doc Deletes a Lambda function.
To delete a specific function version, use the `Qualifier' parameter.
Otherwise, all versions and aliases are deleted.
To delete Lambda event source mappings that invoke a function, use
invoke your function directly, delete the trigger in the service where you
originally configured it.
@doc Removes the code signing configuration from the function.
@doc Removes a concurrent execution limit from a function.
@doc Deletes the configuration for asynchronous invocation for a function,
version, or alias.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig'.
@doc Deletes a Lambda function URL.
When you delete a function URL, you can't recover it. Creating a new
function URL results in a different URL address.
@doc Deletes a version of an Lambda layer.
Deleted versions can no longer be viewed or added to functions. To avoid
breaking functions, a copy of the version remains in Lambda until no
functions refer to it.
@doc Deletes the provisioned concurrency configuration for a function.
@doc Retrieves details about your account's limits and usage in an
@doc Returns details about a Lambda function alias.
@doc Returns information about the specified code signing configuration.
@doc Returns details about an event source mapping.
You can get the identifier of a mapping from the output of
`ListEventSourceMappings'.
@doc Returns information about the function or function version, with a
If you specify a function version, only details that are specific to that
version are returned.
@doc Returns the code signing configuration for the specified function.
@doc Returns details about the reserved concurrency configuration for a
function.
To set a concurrency limit for a function, use
`PutFunctionConcurrency'.
@doc Returns the version-specific settings of a Lambda function or
version.
The output includes only options that can vary between versions of a
To get all of a function's details, including function-level settings,
use `GetFunction'.
@doc Retrieves the configuration for asynchronous invocation for a
function, version, or alias.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig'.
@doc Returns details about a Lambda function URL.
@doc Returns information about a version of an Lambda layer, with a link
@doc Returns information about a version of an Lambda layer, with a link
@doc Returns the permission policy for a version of an Lambda layer.
For more information, see `AddLayerVersionPermission'.
alias.
@doc Retrieves the provisioned concurrency configuration for a
function's alias or version.
@doc Retrieves the runtime management configuration for a function's
version.
version and the runtime update mode. If the runtime update mode is Auto or
Function update, this includes the runtime update mode and `null' is
You can invoke a function synchronously (and wait for the response), or
asynchronously. To invoke a function asynchronously, set
For synchronous invocation, details about the function response, including
errors, are included in the response body and headers. For either
invocation type, you can find more information in the execution log and
trace.
When an error occurs, your function may be invoked multiple times. Retry
behavior varies by error type, client, event source, and invocation type.
For example, if you invoke a function asynchronously and it returns an
information, see Error handling and automatic retries in Lambda.
For asynchronous invocation, Lambda adds events to a queue before sending
them to your function. If your function does not have enough capacity to
keep up with the queue, events may be lost. Occasionally, your function
may receive the same event multiple times, even if no error occurs. To
retain events that were not processed, configure your function with a
dead-letter queue.
The status code in the API response doesn't reflect function errors.
Error codes are reserved for errors that prevent your function from
executing, such as permissions errors, quota errors, or issues with your
function's code and configuration. For example, Lambda returns
`TooManyRequestsException' if running the function would cause you to
exceed a concurrency limit at either the account level
(`ConcurrentInvocationLimitExceeded') or function level
(`ReservedFunctionConcurrentInvocationLimitExceeded').
For functions with a long timeout, your client might disconnect during
synchronous invocation while it waits for a response. Configure your HTTP
client, SDK, firewall, proxy, or operating system to allow for long
connections with timeout or keep-alive settings.
For details on how to set up permissions for cross-account invocations,
see Granting function access to other accounts.
@doc For asynchronous function invocation, use `Invoke'.
@doc Returns a list of aliases for a Lambda function.
@doc Returns a list of code signing configurations.
@doc Lists event source mappings.
Specify an `EventSourceArn' to show only event source mappings for a
single event source.
@doc Retrieves a list of configurations for asynchronous invocation for a
function.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig'.
@doc Returns a list of Lambda function URLs for the specified function.
@doc Returns a list of Lambda functions, with the version-specific
configuration of each.
Set `FunctionVersion' to `ALL' to include all published versions
of each function in addition to the unpublished version.
LastUpdateStatusReasonCode, RuntimeVersionConfig) for a function or
version, use `GetFunction'.
@doc List the functions that use the specified code signing configuration.
You can use this method prior to deleting a code signing configuration, to
verify that no functions are using it.
@doc Lists the versions of an Lambda layer.
Versions that have been deleted aren't listed. Specify a runtime
identifier to list only versions that indicate that they're compatible
with that runtime. Specify a compatible architecture to include only layer
versions that are compatible with that architecture.
@doc Lists Lambda layers and shows information about the latest version of
each.
Specify a runtime identifier to list only layers that indicate that
they're compatible with that runtime. Specify a compatible
architecture to include only layers that are compatible with that
instruction set architecture.
@doc Retrieves a list of provisioned concurrency configurations for a
function.
@doc Returns a function's tags.
You can also view tags with `GetFunction'.
@doc Returns a list of versions, with the version-specific configuration
of each.
@doc Creates an Lambda layer from a ZIP archive.
Each time you call `PublishLayerVersion' with the same layer name, a
new version is created.
Add layers to your function with `CreateFunction' or
@doc Creates a version from the current code and configuration of a
function.
Use versions to create a snapshot of your function code and configuration
that doesn't change.
Lambda doesn't publish a version if the function's configuration
and code haven't changed since the last version. Use
the function before publishing a version.
Clients can invoke versions directly or with an alias. To create an alias,
use `CreateAlias'.
@doc Update the code signing configuration for the function.
Changes to the code signing configuration take effect the next time a user
tries to deploy a code package to the function.
@doc Sets the maximum number of simultaneous executions for a function,
and reserves capacity for that concurrency level.
Concurrency settings apply to the function as a whole, including all
published versions and the unpublished version. Reserving concurrency both
ensures that your function has capacity to process the specified number of
events simultaneously, and prevents it from scaling beyond that level. Use
`GetFunction' to see the current setting for a function.
Use `GetAccountSettings' to see your Regional concurrency limit. You
can reserve concurrency for as many functions as you like, as long as you
aren't configured with a per-function limit. For more information, see
Lambda function scaling.
@doc Configures options for asynchronous invocation on a function,
version, or alias.
If a configuration already exists for a function, version, or alias, this
operation overwrites it. If you exclude any settings, they are removed. To
`UpdateFunctionEventInvokeConfig'.
By default, Lambda retries an asynchronous invocation twice if the
hours. When an event fails all processing attempts or stays in the
asynchronous invocation queue for too long, Lambda discards it. To retain
discarded events, configure a dead-letter queue with
To send an invocation record to a queue, topic, function, or event bus,
specify a destination. You can configure separate destinations for
successful invocations (on-success) and events that fail all processing
attempts (on-failure). You can configure destinations in addition to or
instead of a dead-letter queue.
@doc Adds a provisioned concurrency configuration to a function's
alias or version.
@doc Sets the runtime management configuration for a function's
version.
For more information, see Runtime updates.
@doc Removes a statement from the permissions policy for a version of an
Lambda layer.
For more information, see `AddLayerVersionPermission'.
@doc Adds tags to a function.
@doc Removes tags from a function.
@doc Updates the configuration of a Lambda function alias.
@doc Update the code signing configuration.
Changes to the code signing configuration take effect the next time a user
tries to deploy a code package to the function.
@doc Updates an event source mapping.
You can change the function that Lambda invokes, or pause invocation and
resume later from the same location.
For details about how to configure different event sources, see the
following topics.
</li> <li> Apache Kafka
</li> </ul> The following error handling options are available only for
<ul> <li> `BisectBatchOnFunctionError' – If the function returns an
</li> <li> `MaximumRecordAgeInSeconds' – Discard records older than
the specified age. The default value is infinite (-1). When set to
infinite (-1), failed records are retried until the record expires
</li> <li> `MaximumRetryAttempts' – Discard records after the
specified number of retries. The default value is infinite (-1). When set
to infinite (-1), failed records are retried until the record expires.
</li> <li> `ParallelizationFactor' – Process multiple batches from
each shard concurrently.
</li> </ul> For information about which configuration parameters apply to
each event source, see the following topics.
</li> <li> Apache Kafka
</li> </ul>
@doc Updates a Lambda function's code.
If code signing is enabled for the function, the code package must be
signed by a trusted publisher. For more information, see Configuring code
signing for Lambda.
If the function's package type is `Image', then you must specify
If the function's package type is `Zip', then you must specify the
key of the code .zip file location. You can also provide the function code
The code in the deployment package must be compatible with the target
instruction set architecture of the function (`x86-64' or
`arm64').
The function's code is locked when you publish a version. You
can't modify the code of a published version, only the unpublished
version.
For a function defined as a container image, Lambda resolves the image tag
image, Lambda does not automatically update the function.
@doc Modify the version-specific settings of a Lambda function.
When you update a function, Lambda provisions an instance of the function
function, but you can still invoke it. The `LastUpdateStatus',
`LastUpdateStatusReason', and `LastUpdateStatusReasonCode' fields
in the response from `GetFunctionConfiguration' indicate when the
update is complete and the function is processing events with the new
configuration. For more information, see Lambda function states.
These settings can vary between versions of a function and are locked when
you publish a version. You can't modify the configuration of a
published version, only the unpublished version.
To configure function concurrency, use `PutFunctionConcurrency'. To
@doc Updates the configuration for asynchronous invocation for a function,
version, or alias.
To configure options for asynchronous invocation, use
`PutFunctionEventInvokeConfig'.
@doc Updates the configuration for a Lambda function URL.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
The Lambda API Reference provides information about each of the API
You can use Software Development Kits ( SDKs ) , Integrated Development
installation instructions , see Tools for Amazon Web Services .
endpoints and quotas in the Amazon Web Services General Reference ..
providing a signature . Lambda supports signature version 4 . For more
information , see Signature Version 4 signing process in the Amazon Web
CA certificates
Because Amazon Web Services SDKs use the CA certificates from your
computer , changes to the certificates on the Amazon Web Services servers
prevent these failures by keeping your computer 's CA certificates and
minimum operating system and Java versions :
< ul > < li > Microsoft Windows versions that have updates from January 2005
or later installed contain at least one of the required CAs in their trust
< /li > < li > Mac OS X 10.4 with Java for Release 5 ( February
2007 ) , ( October 2007 ) , and later versions contain at least
one of the required CAs in their trust list .
< /li > < li > Red Hat Enterprise Linux 5 ( March 2007 ) , 6 , and 7 and CentOS 5 ,
6 , and 7 all contain at least one of the required CAs in their default
trusted CA list .
< /li > < li > Java 1.4.2_12 ( May 2006 ) , 5 Update 2 ( March 2005 ) , and all
later versions , including Java 6 ( December 2006 ) , 7 , and 8 , contain at
least one of the required CAs in their default trusted CA list .
< /li > < /ul > When accessing the Lambda management console or Lambda API
< ul > < li > Amazon Root CA 1
< /li > < li > Starfield Class 2 Certification Authority
< /li > < /ul > Root certificates from the first two authorities are available
from Amazon trust services , but keeping your computer up - to - date is the
more straightforward solution . To learn more about ACM - provided
certificates , see Amazon Web Services Certificate Manager FAQs .
-module(aws_lambda).
-export([add_layer_version_permission/4,
add_layer_version_permission/5,
add_permission/3,
add_permission/4,
create_alias/3,
create_alias/4,
create_code_signing_config/2,
create_code_signing_config/3,
create_event_source_mapping/2,
create_event_source_mapping/3,
create_function/2,
create_function/3,
create_function_url_config/3,
create_function_url_config/4,
delete_alias/4,
delete_alias/5,
delete_code_signing_config/3,
delete_code_signing_config/4,
delete_event_source_mapping/3,
delete_event_source_mapping/4,
delete_function/3,
delete_function/4,
delete_function_code_signing_config/3,
delete_function_code_signing_config/4,
delete_function_concurrency/3,
delete_function_concurrency/4,
delete_function_event_invoke_config/3,
delete_function_event_invoke_config/4,
delete_function_url_config/3,
delete_function_url_config/4,
delete_layer_version/4,
delete_layer_version/5,
delete_provisioned_concurrency_config/3,
delete_provisioned_concurrency_config/4,
get_account_settings/1,
get_account_settings/3,
get_account_settings/4,
get_alias/3,
get_alias/5,
get_alias/6,
get_code_signing_config/2,
get_code_signing_config/4,
get_code_signing_config/5,
get_event_source_mapping/2,
get_event_source_mapping/4,
get_event_source_mapping/5,
get_function/2,
get_function/4,
get_function/5,
get_function_code_signing_config/2,
get_function_code_signing_config/4,
get_function_code_signing_config/5,
get_function_concurrency/2,
get_function_concurrency/4,
get_function_concurrency/5,
get_function_configuration/2,
get_function_configuration/4,
get_function_configuration/5,
get_function_event_invoke_config/2,
get_function_event_invoke_config/4,
get_function_event_invoke_config/5,
get_function_url_config/2,
get_function_url_config/4,
get_function_url_config/5,
get_layer_version/3,
get_layer_version/5,
get_layer_version/6,
get_layer_version_by_arn/2,
get_layer_version_by_arn/4,
get_layer_version_by_arn/5,
get_layer_version_policy/3,
get_layer_version_policy/5,
get_layer_version_policy/6,
get_policy/2,
get_policy/4,
get_policy/5,
get_provisioned_concurrency_config/3,
get_provisioned_concurrency_config/5,
get_provisioned_concurrency_config/6,
get_runtime_management_config/2,
get_runtime_management_config/4,
get_runtime_management_config/5,
invoke/3,
invoke/4,
invoke_async/3,
invoke_async/4,
list_aliases/2,
list_aliases/4,
list_aliases/5,
list_code_signing_configs/1,
list_code_signing_configs/3,
list_code_signing_configs/4,
list_event_source_mappings/1,
list_event_source_mappings/3,
list_event_source_mappings/4,
list_function_event_invoke_configs/2,
list_function_event_invoke_configs/4,
list_function_event_invoke_configs/5,
list_function_url_configs/2,
list_function_url_configs/4,
list_function_url_configs/5,
list_functions/1,
list_functions/3,
list_functions/4,
list_functions_by_code_signing_config/2,
list_functions_by_code_signing_config/4,
list_functions_by_code_signing_config/5,
list_layer_versions/2,
list_layer_versions/4,
list_layer_versions/5,
list_layers/1,
list_layers/3,
list_layers/4,
list_provisioned_concurrency_configs/2,
list_provisioned_concurrency_configs/4,
list_provisioned_concurrency_configs/5,
list_tags/2,
list_tags/4,
list_tags/5,
list_versions_by_function/2,
list_versions_by_function/4,
list_versions_by_function/5,
publish_layer_version/3,
publish_layer_version/4,
publish_version/3,
publish_version/4,
put_function_code_signing_config/3,
put_function_code_signing_config/4,
put_function_concurrency/3,
put_function_concurrency/4,
put_function_event_invoke_config/3,
put_function_event_invoke_config/4,
put_provisioned_concurrency_config/3,
put_provisioned_concurrency_config/4,
put_runtime_management_config/3,
put_runtime_management_config/4,
remove_layer_version_permission/5,
remove_layer_version_permission/6,
remove_permission/4,
remove_permission/5,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_alias/4,
update_alias/5,
update_code_signing_config/3,
update_code_signing_config/4,
update_event_source_mapping/3,
update_event_source_mapping/4,
update_function_code/3,
update_function_code/4,
update_function_configuration/3,
update_function_configuration/4,
update_function_event_invoke_config/3,
update_function_event_invoke_config/4,
update_function_url_config/3,
update_function_url_config/4]).
-include_lib("hackney/include/hackney_lib.hrl").
all Amazon Web Services accounts .
add_layer_version_permission(Client, LayerName, VersionNumber, Input) ->
add_layer_version_permission(Client, LayerName, VersionNumber, Input, []).
add_layer_version_permission(Client, LayerName, VersionNumber, Input0, Options0) ->
Method = post,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), "/policy"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"RevisionId">>, <<"RevisionId">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Grants an Amazon Web Service , Amazon Web Services account , or Amazon
Web Services organization permission to use a function .
invoker must use the full Amazon Resource Name ( ARN ) of that version or
Organizations , specify the organization ID as the ` PrincipalOrgID ' .
For Amazon Web Services , the principal is a domain - style identifier that
` sns.amazonaws.com ' . For Amazon Web Services , you can also specify the
ARN of the associated resource as the ` SourceArn ' . If you grant
add_permission(Client, FunctionName, Input) ->
add_permission(Client, FunctionName, Input, []).
add_permission(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/policy"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
You can also map an alias to split invocation requests between two
versions . Use the ` RoutingConfig ' parameter to specify a second
create_alias(Client, FunctionName, Input) ->
create_alias(Client, FunctionName, Input, []).
create_alias(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
create_code_signing_config(Client, Input) ->
create_code_signing_config(Client, Input, []).
create_code_signing_config(Client, Input0, Options0) ->
Method = post,
Path = ["/2020-04-22/code-signing-configs/"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
< ul > < li > Amazon DynamoDB Streams
< /li > < li > Amazon Kinesis
< /li > < li > Amazon SQS
< /li > < li > Amazon MQ and RabbitMQ
< /li > < li > Amazon MSK
stream sources ( DynamoDB and ):
error , split the batch in two and retry .
< /li > < li > ` DestinationConfig ' – Send discarded records to an Amazon
SQS queue or Amazon SNS topic .
< ul > < li > Amazon DynamoDB Streams
< /li > < li > Amazon Kinesis
< /li > < li > Amazon SQS
< /li > < li > Amazon MQ and RabbitMQ
< /li > < li > Amazon MSK
create_event_source_mapping(Client, Input) ->
create_event_source_mapping(Client, Input, []).
create_event_source_mapping(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/event-source-mappings/"],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
permission to use Amazon Web Services , such as Amazon CloudWatch Logs for
log streaming and X - Ray for request tracing .
the URI of a container image in the Amazon ECR registry . You do not need
and its supporting resources . If your function connects to a VPC , this
process can take a minute or so . During this time , you ca n't invoke or
modify the function . The ` State ' , ` StateReason ' , and
` UpdateFunctionConfiguration ' . Function - level settings apply to both
( ` TagResource ' ) and per - function concurrency limits
archive . To enable code signing for this function , specify the ARN of a
with ` UpdateFunctionCode ' , Lambda checks that the code package has a
If another Amazon Web Services account or an Amazon Web Service invokes
your function , use ` AddPermission ' to grant permission by creating a
resource - based Identity and Access Management ( IAM ) policy . You can grant
function in response to events in other Amazon Web Services , create an
create_function(Client, Input) ->
create_function(Client, Input, []).
create_function(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
A function URL is a dedicated HTTP(S ) endpoint that you can use to invoke
create_function_url_config(Client, FunctionName, Input) ->
create_function_url_config(Client, FunctionName, Input, []).
create_function_url_config(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_alias(Client, FunctionName, Name, Input) ->
delete_alias(Client, FunctionName, Name, Input, []).
delete_alias(Client, FunctionName, Name, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_code_signing_config(Client, CodeSigningConfigArn, Input) ->
delete_code_signing_config(Client, CodeSigningConfigArn, Input, []).
delete_code_signing_config(Client, CodeSigningConfigArn, Input0, Options0) ->
Method = delete,
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
and might not be completely deleted for several seconds .
delete_event_source_mapping(Client, UUID, Input) ->
delete_event_source_mapping(Client, UUID, Input, []).
delete_event_source_mapping(Client, UUID, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/event-source-mappings/", aws_util:encode_uri(UUID), ""],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
` DeleteEventSourceMapping ' . For Amazon Web Services and resources that
delete_function(Client, FunctionName, Input) ->
delete_function(Client, FunctionName, Input, []).
delete_function(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_function_code_signing_config(Client, FunctionName, Input) ->
delete_function_code_signing_config(Client, FunctionName, Input, []).
delete_function_code_signing_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2020-06-30/functions/", aws_util:encode_uri(FunctionName), "/code-signing-config"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_function_concurrency(Client, FunctionName, Input) ->
delete_function_concurrency(Client, FunctionName, Input, []).
delete_function_concurrency(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2017-10-31/functions/", aws_util:encode_uri(FunctionName), "/concurrency"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_function_event_invoke_config(Client, FunctionName, Input) ->
delete_function_event_invoke_config(Client, FunctionName, Input, []).
delete_function_event_invoke_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_function_url_config(Client, FunctionName, Input) ->
delete_function_url_config(Client, FunctionName, Input, []).
delete_function_url_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_layer_version(Client, LayerName, VersionNumber, Input) ->
delete_layer_version(Client, LayerName, VersionNumber, Input, []).
delete_layer_version(Client, LayerName, VersionNumber, Input0, Options0) ->
Method = delete,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_provisioned_concurrency_config(Client, FunctionName, Input) ->
delete_provisioned_concurrency_config(Client, FunctionName, Input, []).
delete_provisioned_concurrency_config(Client, FunctionName, Input0, Options0) ->
Method = delete,
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency"],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Amazon Web Services Region .
get_account_settings(Client)
when is_map(Client) ->
get_account_settings(Client, #{}, #{}).
get_account_settings(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_account_settings(Client, QueryMap, HeadersMap, []).
get_account_settings(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2016-08-19/account-settings/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_alias(Client, FunctionName, Name)
when is_map(Client) ->
get_alias(Client, FunctionName, Name, #{}, #{}).
get_alias(Client, FunctionName, Name, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_alias(Client, FunctionName, Name, QueryMap, HeadersMap, []).
get_alias(Client, FunctionName, Name, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_code_signing_config(Client, CodeSigningConfigArn)
when is_map(Client) ->
get_code_signing_config(Client, CodeSigningConfigArn, #{}, #{}).
get_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, []).
get_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_event_source_mapping(Client, UUID)
when is_map(Client) ->
get_event_source_mapping(Client, UUID, #{}, #{}).
get_event_source_mapping(Client, UUID, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_event_source_mapping(Client, UUID, QueryMap, HeadersMap, []).
get_event_source_mapping(Client, UUID, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/event-source-mappings/", aws_util:encode_uri(UUID), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
link to download the deployment package that 's valid for 10 minutes .
get_function(Client, FunctionName)
when is_map(Client) ->
get_function(Client, FunctionName, #{}, #{}).
get_function(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function(Client, FunctionName, QueryMap, HeadersMap, []).
get_function(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_function_code_signing_config(Client, FunctionName)
when is_map(Client) ->
get_function_code_signing_config(Client, FunctionName, #{}, #{}).
get_function_code_signing_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_code_signing_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_code_signing_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-06-30/functions/", aws_util:encode_uri(FunctionName), "/code-signing-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_function_concurrency(Client, FunctionName)
when is_map(Client) ->
get_function_concurrency(Client, FunctionName, #{}, #{}).
get_function_concurrency(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_concurrency(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_concurrency(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/concurrency"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
function . To modify these settings , use ` UpdateFunctionConfiguration ' .
get_function_configuration(Client, FunctionName)
when is_map(Client) ->
get_function_configuration(Client, FunctionName, #{}, #{}).
get_function_configuration(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_configuration(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_configuration(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/configuration"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_function_event_invoke_config(Client, FunctionName)
when is_map(Client) ->
get_function_event_invoke_config(Client, FunctionName, #{}, #{}).
get_function_event_invoke_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_event_invoke_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_event_invoke_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_function_url_config(Client, FunctionName)
when is_map(Client) ->
get_function_url_config(Client, FunctionName, #{}, #{}).
get_function_url_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_function_url_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_function_url_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
to download the layer archive that 's valid for 10 minutes .
get_layer_version(Client, LayerName, VersionNumber)
when is_map(Client) ->
get_layer_version(Client, LayerName, VersionNumber, #{}, #{}).
get_layer_version(Client, LayerName, VersionNumber, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_layer_version(Client, LayerName, VersionNumber, QueryMap, HeadersMap, []).
get_layer_version(Client, LayerName, VersionNumber, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
to download the layer archive that 's valid for 10 minutes .
get_layer_version_by_arn(Client, Arn)
when is_map(Client) ->
get_layer_version_by_arn(Client, Arn, #{}, #{}).
get_layer_version_by_arn(Client, Arn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_layer_version_by_arn(Client, Arn, QueryMap, HeadersMap, []).
get_layer_version_by_arn(Client, Arn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers?find=LayerVersion"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Arn">>, Arn}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_layer_version_policy(Client, LayerName, VersionNumber)
when is_map(Client) ->
get_layer_version_policy(Client, LayerName, VersionNumber, #{}, #{}).
get_layer_version_policy(Client, LayerName, VersionNumber, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_layer_version_policy(Client, LayerName, VersionNumber, QueryMap, HeadersMap, []).
get_layer_version_policy(Client, LayerName, VersionNumber, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), "/policy"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the resource - based IAM policy for a function , version , or
get_policy(Client, FunctionName)
when is_map(Client) ->
get_policy(Client, FunctionName, #{}, #{}).
get_policy(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_policy(Client, FunctionName, QueryMap, HeadersMap, []).
get_policy(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/policy"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_provisioned_concurrency_config(Client, FunctionName, Qualifier)
when is_map(Client) ->
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, #{}, #{}).
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, QueryMap, HeadersMap, []).
get_provisioned_concurrency_config(Client, FunctionName, Qualifier, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, Qualifier}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
If the runtime update mode is Manual , this includes the ARN of the runtime
returned for the ARN . For more information , see Runtime updates .
get_runtime_management_config(Client, FunctionName)
when is_map(Client) ->
get_runtime_management_config(Client, FunctionName, #{}, #{}).
get_runtime_management_config(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_runtime_management_config(Client, FunctionName, QueryMap, HeadersMap, []).
get_runtime_management_config(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2021-07-20/functions/", aws_util:encode_uri(FunctionName), "/runtime-management-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Qualifier">>, maps:get(<<"Qualifier">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Invokes a Lambda function .
` InvocationType ' to ` Event ' .
error , Lambda executes the function up to two more times . For more
This operation requires permission for the lambda : InvokeFunction action .
invoke(Client, FunctionName, Input) ->
invoke(Client, FunctionName, Input, []).
invoke(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/invocations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
HeadersMapping = [
{<<"X-Amz-Client-Context">>, <<"ClientContext">>},
{<<"X-Amz-Invocation-Type">>, <<"InvocationType">>},
{<<"X-Amz-Log-Type">>, <<"LogType">>}
],
{Headers, Input1} = aws_request:build_headers(HeadersMapping, Input0),
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
case request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode) of
{ok, Body0, {_, ResponseHeaders, _} = Response} ->
ResponseHeadersParams =
[
{<<"X-Amz-Executed-Version">>, <<"ExecutedVersion">>},
{<<"X-Amz-Function-Error">>, <<"FunctionError">>},
{<<"X-Amz-Log-Result">>, <<"LogResult">>}
],
FoldFun = fun({Name_, Key_}, Acc_) ->
case lists:keyfind(Name_, 1, ResponseHeaders) of
false -> Acc_;
{_, Value_} -> Acc_#{Key_ => Value_}
end
end,
Body = lists:foldl(FoldFun, Body0, ResponseHeadersParams),
{ok, Body, Response};
Result ->
Result
end.
Invokes a function asynchronously .
invoke_async(Client, FunctionName, Input) ->
invoke_async(Client, FunctionName, Input, []).
invoke_async(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2014-11-13/functions/", aws_util:encode_uri(FunctionName), "/invoke-async/"],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
list_aliases(Client, FunctionName)
when is_map(Client) ->
list_aliases(Client, FunctionName, #{}, #{}).
list_aliases(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_aliases(Client, FunctionName, QueryMap, HeadersMap, []).
list_aliases(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"FunctionVersion">>, maps:get(<<"FunctionVersion">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
A request returns up to 10,000 configurations per call . You can use the
` MaxItems ' parameter to return fewer configurations per call .
list_code_signing_configs(Client)
when is_map(Client) ->
list_code_signing_configs(Client, #{}, #{}).
list_code_signing_configs(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_code_signing_configs(Client, QueryMap, HeadersMap, []).
list_code_signing_configs(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-04-22/code-signing-configs/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_event_source_mappings(Client)
when is_map(Client) ->
list_event_source_mappings(Client, #{}, #{}).
list_event_source_mappings(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_event_source_mappings(Client, QueryMap, HeadersMap, []).
list_event_source_mappings(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/event-source-mappings/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"EventSourceArn">>, maps:get(<<"EventSourceArn">>, QueryMap, undefined)},
{<<"FunctionName">>, maps:get(<<"FunctionName">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_function_event_invoke_configs(Client, FunctionName)
when is_map(Client) ->
list_function_event_invoke_configs(Client, FunctionName, #{}, #{}).
list_function_event_invoke_configs(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_function_event_invoke_configs(Client, FunctionName, QueryMap, HeadersMap, []).
list_function_event_invoke_configs(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config/list"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_function_url_configs(Client, FunctionName)
when is_map(Client) ->
list_function_url_configs(Client, FunctionName, #{}, #{}).
list_function_url_configs(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_function_url_configs(Client, FunctionName, QueryMap, HeadersMap, []).
list_function_url_configs(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/urls"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
Lambda returns up to 50 functions per call .
The ` ListFunctions ' operation returns a subset of the
` FunctionConfiguration ' fields . To get the additional fields ( State ,
StateReasonCode , StateReason , LastUpdateStatus , LastUpdateStatusReason ,
list_functions(Client)
when is_map(Client) ->
list_functions(Client, #{}, #{}).
list_functions(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_functions(Client, QueryMap, HeadersMap, []).
list_functions(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"FunctionVersion">>, maps:get(<<"FunctionVersion">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MasterRegion">>, maps:get(<<"MasterRegion">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_functions_by_code_signing_config(Client, CodeSigningConfigArn)
when is_map(Client) ->
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, #{}, #{}).
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, []).
list_functions_by_code_signing_config(Client, CodeSigningConfigArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), "/functions"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_layer_versions(Client, LayerName)
when is_map(Client) ->
list_layer_versions(Client, LayerName, #{}, #{}).
list_layer_versions(Client, LayerName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_layer_versions(Client, LayerName, QueryMap, HeadersMap, []).
list_layer_versions(Client, LayerName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"CompatibleArchitecture">>, maps:get(<<"CompatibleArchitecture">>, QueryMap, undefined)},
{<<"CompatibleRuntime">>, maps:get(<<"CompatibleRuntime">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_layers(Client)
when is_map(Client) ->
list_layers(Client, #{}, #{}).
list_layers(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_layers(Client, QueryMap, HeadersMap, []).
list_layers(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2018-10-31/layers"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"CompatibleArchitecture">>, maps:get(<<"CompatibleArchitecture">>, QueryMap, undefined)},
{<<"CompatibleRuntime">>, maps:get(<<"CompatibleRuntime">>, QueryMap, undefined)},
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_provisioned_concurrency_configs(Client, FunctionName)
when is_map(Client) ->
list_provisioned_concurrency_configs(Client, FunctionName, #{}, #{}).
list_provisioned_concurrency_configs(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_provisioned_concurrency_configs(Client, FunctionName, QueryMap, HeadersMap, []).
list_provisioned_concurrency_configs(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency?List=ALL"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_tags(Client, Resource)
when is_map(Client) ->
list_tags(Client, Resource, #{}, #{}).
list_tags(Client, Resource, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags(Client, Resource, QueryMap, HeadersMap, []).
list_tags(Client, Resource, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2017-03-31/tags/", aws_util:encode_uri(Resource), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
Lambda returns up to 50 versions per call .
list_versions_by_function(Client, FunctionName)
when is_map(Client) ->
list_versions_by_function(Client, FunctionName, #{}, #{}).
list_versions_by_function(Client, FunctionName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_versions_by_function(Client, FunctionName, QueryMap, HeadersMap, []).
list_versions_by_function(Client, FunctionName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/versions"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"Marker">>, maps:get(<<"Marker">>, QueryMap, undefined)},
{<<"MaxItems">>, maps:get(<<"MaxItems">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
` UpdateFunctionConfiguration ' .
publish_layer_version(Client, LayerName, Input) ->
publish_layer_version(Client, LayerName, Input, []).
publish_layer_version(Client, LayerName, Input0, Options0) ->
Method = post,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
` UpdateFunctionCode ' or ` UpdateFunctionConfiguration ' to update
publish_version(Client, FunctionName, Input) ->
publish_version(Client, FunctionName, Input, []).
publish_version(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/versions"],
SuccessStatusCode = 201,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
put_function_code_signing_config(Client, FunctionName, Input) ->
put_function_code_signing_config(Client, FunctionName, Input, []).
put_function_code_signing_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2020-06-30/functions/", aws_util:encode_uri(FunctionName), "/code-signing-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
leave at least 100 simultaneous executions unreserved for functions that
put_function_concurrency(Client, FunctionName, Input) ->
put_function_concurrency(Client, FunctionName, Input, []).
put_function_concurrency(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2017-10-31/functions/", aws_util:encode_uri(FunctionName), "/concurrency"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
set one option without affecting existing settings for other options , use
function returns an error . It retains events in a queue for up to six
` UpdateFunctionConfiguration ' .
put_function_event_invoke_config(Client, FunctionName, Input) ->
put_function_event_invoke_config(Client, FunctionName, Input, []).
put_function_event_invoke_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
put_provisioned_concurrency_config(Client, FunctionName, Input) ->
put_provisioned_concurrency_config(Client, FunctionName, Input, []).
put_provisioned_concurrency_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2019-09-30/functions/", aws_util:encode_uri(FunctionName), "/provisioned-concurrency"],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
put_runtime_management_config(Client, FunctionName, Input) ->
put_runtime_management_config(Client, FunctionName, Input, []).
put_runtime_management_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2021-07-20/functions/", aws_util:encode_uri(FunctionName), "/runtime-management-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
remove_layer_version_permission(Client, LayerName, StatementId, VersionNumber, Input) ->
remove_layer_version_permission(Client, LayerName, StatementId, VersionNumber, Input, []).
remove_layer_version_permission(Client, LayerName, StatementId, VersionNumber, Input0, Options0) ->
Method = delete,
Path = ["/2018-10-31/layers/", aws_util:encode_uri(LayerName), "/versions/", aws_util:encode_uri(VersionNumber), "/policy/", aws_util:encode_uri(StatementId), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"RevisionId">>, <<"RevisionId">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Revokes function - use permission from an Amazon Web Service or another
Amazon Web Services account .
You can get the ID of the statement from the output of ` GetPolicy ' .
remove_permission(Client, FunctionName, StatementId, Input) ->
remove_permission(Client, FunctionName, StatementId, Input, []).
remove_permission(Client, FunctionName, StatementId, Input0, Options0) ->
Method = delete,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/policy/", aws_util:encode_uri(StatementId), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>},
{<<"RevisionId">>, <<"RevisionId">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
tag_resource(Client, Resource, Input) ->
tag_resource(Client, Resource, Input, []).
tag_resource(Client, Resource, Input0, Options0) ->
Method = post,
Path = ["/2017-03-31/tags/", aws_util:encode_uri(Resource), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
untag_resource(Client, Resource, Input) ->
untag_resource(Client, Resource, Input, []).
untag_resource(Client, Resource, Input0, Options0) ->
Method = delete,
Path = ["/2017-03-31/tags/", aws_util:encode_uri(Resource), ""],
SuccessStatusCode = 204,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"TagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_alias(Client, FunctionName, Name, Input) ->
update_alias(Client, FunctionName, Name, Input, []).
update_alias(Client, FunctionName, Name, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/aliases/", aws_util:encode_uri(Name), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_code_signing_config(Client, CodeSigningConfigArn, Input) ->
update_code_signing_config(Client, CodeSigningConfigArn, Input, []).
update_code_signing_config(Client, CodeSigningConfigArn, Input0, Options0) ->
Method = put,
Path = ["/2020-04-22/code-signing-configs/", aws_util:encode_uri(CodeSigningConfigArn), ""],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
< ul > < li > Amazon DynamoDB Streams
< /li > < li > Amazon Kinesis
< /li > < li > Amazon SQS
< /li > < li > Amazon MQ and RabbitMQ
< /li > < li > Amazon MSK
stream sources ( DynamoDB and ):
error , split the batch in two and retry .
< /li > < li > ` DestinationConfig ' – Send discarded records to an Amazon
SQS queue or Amazon SNS topic .
< ul > < li > Amazon DynamoDB Streams
< /li > < li > Amazon Kinesis
< /li > < li > Amazon SQS
< /li > < li > Amazon MQ and RabbitMQ
< /li > < li > Amazon MSK
update_event_source_mapping(Client, UUID, Input) ->
update_event_source_mapping(Client, UUID, Input, []).
update_event_source_mapping(Client, UUID, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/event-source-mappings/", aws_util:encode_uri(UUID), ""],
SuccessStatusCode = 202,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
the code package in ` ImageUri ' as the URI of a container image in the
Amazon ECR registry .
deployment package as a .zip file archive . Enter the Amazon S3 bucket and
inline using the ` ZipFile ' field .
to an image digest . In Amazon ECR , if you update the image tag to a new
update_function_code(Client, FunctionName, Input) ->
update_function_code(Client, FunctionName, Input, []).
update_function_code(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/code"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
and its supporting resources . If your function connects to a VPC , this
process can take a minute . During this time , you ca n't modify the
grant invoke permissions to an Amazon Web Services account or Amazon Web
Service , use ` AddPermission ' .
update_function_configuration(Client, FunctionName, Input) ->
update_function_configuration(Client, FunctionName, Input, []).
update_function_configuration(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2015-03-31/functions/", aws_util:encode_uri(FunctionName), "/configuration"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_function_event_invoke_config(Client, FunctionName, Input) ->
update_function_event_invoke_config(Client, FunctionName, Input, []).
update_function_event_invoke_config(Client, FunctionName, Input0, Options0) ->
Method = post,
Path = ["/2019-09-25/functions/", aws_util:encode_uri(FunctionName), "/event-invoke-config"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_function_url_config(Client, FunctionName, Input) ->
update_function_url_config(Client, FunctionName, Input, []).
update_function_url_config(Client, FunctionName, Input0, Options0) ->
Method = put,
Path = ["/2021-10-31/functions/", aws_util:encode_uri(FunctionName), "/url"],
SuccessStatusCode = 200,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"Qualifier">>, <<"Qualifier">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Internal functions
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"lambda">>},
Host = build_host(<<"lambda">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
|
37a51949240ec426ddfead293ce009712928164d48487c0308757c13ae11b5ac | ssor/erlangDemos | myapp_app.erl | %% @author {{author}}
myapp { { author } }
%% @doc Callbacks for the myapp application.
-module(myapp_app).
% -author("{{author}}").
-behaviour(application).
-export([start/2,stop/1]).
, _ ) - > ServerRet
%% @doc application start callback for myapp.
start(_Type, _StartArgs) ->
io:format("type => ~p _StartArgs => ~p~n",[_Type,_StartArgs]),
myapp_deps:ensure(),
io:format("start_link ->~n"),
myapp_sup:start_link().
@spec stop(_State ) - > ServerRet
%% @doc application stop callback for myapp.
stop(_State) ->
ok.
| null | https://raw.githubusercontent.com/ssor/erlangDemos/632cd905be2c4f275f1c1ae15238e711d7bb9147/myapp/src/myapp_app.erl | erlang | @author {{author}}
@doc Callbacks for the myapp application.
-author("{{author}}").
@doc application start callback for myapp.
@doc application stop callback for myapp. | myapp { { author } }
-module(myapp_app).
-behaviour(application).
-export([start/2,stop/1]).
, _ ) - > ServerRet
start(_Type, _StartArgs) ->
io:format("type => ~p _StartArgs => ~p~n",[_Type,_StartArgs]),
myapp_deps:ensure(),
io:format("start_link ->~n"),
myapp_sup:start_link().
@spec stop(_State ) - > ServerRet
stop(_State) ->
ok.
|
02aa44b436f4216b0b19cbecc090ffaa1f8372675208fd4757b6ff477c88a83d | SmallImprovements/spring-clean | Class.hs | module Java.Class (
getQualifyingClassName
) where
import Language.Java.Syntax
import Java.Helper (concatIdent, getPackageDeclName)
import Data.Maybe (listToMaybe)
getQualifyingClassName :: CompilationUnit -> String
getQualifyingClassName (CompilationUnit package _ typeDecls) =
getPackageDeclName package ++ maybe "" getClassName (listToMaybe typeDecls)
getClassName :: TypeDecl -> String
getClassName (ClassTypeDecl (ClassDecl _ name _ _ _ _)) =
"." ++ concatIdent (Name [name])
getClassName (ClassTypeDecl (EnumDecl _ name _ _)) =
"." ++ concatIdent (Name [name])
getClassName (InterfaceTypeDecl (InterfaceDecl _ name _ _ _)) =
"." ++ concatIdent (Name [name])
| null | https://raw.githubusercontent.com/SmallImprovements/spring-clean/000b10dcb570847d60fed3eb4539781ab20ae5b5/src/Java/Class.hs | haskell | module Java.Class (
getQualifyingClassName
) where
import Language.Java.Syntax
import Java.Helper (concatIdent, getPackageDeclName)
import Data.Maybe (listToMaybe)
getQualifyingClassName :: CompilationUnit -> String
getQualifyingClassName (CompilationUnit package _ typeDecls) =
getPackageDeclName package ++ maybe "" getClassName (listToMaybe typeDecls)
getClassName :: TypeDecl -> String
getClassName (ClassTypeDecl (ClassDecl _ name _ _ _ _)) =
"." ++ concatIdent (Name [name])
getClassName (ClassTypeDecl (EnumDecl _ name _ _)) =
"." ++ concatIdent (Name [name])
getClassName (InterfaceTypeDecl (InterfaceDecl _ name _ _ _)) =
"." ++ concatIdent (Name [name])
|
|
df7dbf3b74fcb609201f436da363019435fe54addb8c2cd28b043fbc0288c797 | teknql/wing | walk_test.cljc | (ns wing.core.walk-test
(:require [wing.core.walk :as sut]
#?(:clj [clojure.test :as t :refer [deftest testing is]]
:cljs [cljs.test :as t :include-macros true :refer [deftest testing is]])
[clojure.walk :as walk]))
(deftest pathwalk-pre-test
(testing "calls in the same order as prewalk, minus map-keys + raw map-entries"
(let [prewalk-calls (transient [])
sut-calls (transient [])
transform #(if (int? %)
(inc %)
%)
record-prewalk-call #(do (when-not (or (map-entry? %) (keyword? %))
(conj! prewalk-calls %))
(transform %))
record-sut-calls #(do (conj! sut-calls %2)
(transform %2))
data {:a 5 :b 6 :c {:d true
:e [{:foo nil}]}}
walk-result (walk/prewalk record-prewalk-call data)
sut-result (sut/pathwalk-pre record-sut-calls data)
prewalk-calls (persistent! prewalk-calls)
sut-calls (persistent! sut-calls)]
(is (= prewalk-calls
sut-calls))
(is (= walk-result sut-result)))))
(deftest pathwalk-post-test
(testing "calls in the same order as postwalk, minus map-keys + raw map-entries"
(let [postwalk-calls (transient [])
sut-calls (transient [])
transform #(if (int? %)
(inc %)
%)
record-postwalk-call #(do (when-not (or (map-entry? %) (keyword? %))
(conj! postwalk-calls %))
(transform %))
record-sut-calls #(do (conj! sut-calls %2)
(transform %2))
data {:a 5 :b 6 :c {:d true
:e [{:foo nil}]}}
walk-result (walk/postwalk record-postwalk-call data)
sut-result (sut/pathwalk-post record-sut-calls data)
postwalk-calls (persistent! postwalk-calls)
sut-calls (persistent! sut-calls)]
(is (= postwalk-calls
sut-calls))
(is (= walk-result sut-result)))))
| null | https://raw.githubusercontent.com/teknql/wing/de8148bf48210eac0c0e3f8e31346b5b2ead39c2/test/wing/core/walk_test.cljc | clojure | (ns wing.core.walk-test
(:require [wing.core.walk :as sut]
#?(:clj [clojure.test :as t :refer [deftest testing is]]
:cljs [cljs.test :as t :include-macros true :refer [deftest testing is]])
[clojure.walk :as walk]))
(deftest pathwalk-pre-test
(testing "calls in the same order as prewalk, minus map-keys + raw map-entries"
(let [prewalk-calls (transient [])
sut-calls (transient [])
transform #(if (int? %)
(inc %)
%)
record-prewalk-call #(do (when-not (or (map-entry? %) (keyword? %))
(conj! prewalk-calls %))
(transform %))
record-sut-calls #(do (conj! sut-calls %2)
(transform %2))
data {:a 5 :b 6 :c {:d true
:e [{:foo nil}]}}
walk-result (walk/prewalk record-prewalk-call data)
sut-result (sut/pathwalk-pre record-sut-calls data)
prewalk-calls (persistent! prewalk-calls)
sut-calls (persistent! sut-calls)]
(is (= prewalk-calls
sut-calls))
(is (= walk-result sut-result)))))
(deftest pathwalk-post-test
(testing "calls in the same order as postwalk, minus map-keys + raw map-entries"
(let [postwalk-calls (transient [])
sut-calls (transient [])
transform #(if (int? %)
(inc %)
%)
record-postwalk-call #(do (when-not (or (map-entry? %) (keyword? %))
(conj! postwalk-calls %))
(transform %))
record-sut-calls #(do (conj! sut-calls %2)
(transform %2))
data {:a 5 :b 6 :c {:d true
:e [{:foo nil}]}}
walk-result (walk/postwalk record-postwalk-call data)
sut-result (sut/pathwalk-post record-sut-calls data)
postwalk-calls (persistent! postwalk-calls)
sut-calls (persistent! sut-calls)]
(is (= postwalk-calls
sut-calls))
(is (= walk-result sut-result)))))
|
|
57d12f603cf4b1cc359106329135591719528fe3ffb1d539c7cc74290c1f0bfc | blambo/accelerate-repa | Repa.hs | -- {-# LANGUAGE GADTs #-}
-- |
Module : Data . Array . Accelerate .
--
Maintainer : < blambo+ >
--
This module implements the back - end for the accelerate EDSL
-- The current structure follows closely on
-- Data.Array.Accelerate.Interpreter
module Data.Array.Accelerate.Repa
( Arrays
, accToRepa
, compile
, exec
, run
)
where
import Data.Array.Accelerate
import qualified Data.Array.Accelerate.Smart as Smart
import Data.Array.Accelerate.Repa.Evaluations (evalAcc)
import Data.Array.Accelerate.Repa.Stencil (stencilDoc)
import Text.PrettyPrint
import qualified Data.Array.Repa as Repa
import GHC
import GHC.Paths (libdir)
import DynFlags
import Unsafe.Coerce
import System.IO
import System.Directory (removeFile)
| Using the Accelerate program given as an argument , run will compile ,
execute and return the result of a given Accelerate program using
-- for execution
--
run :: (Arrays a, Repa.Shape sh, Repa.Repr r e)
^ The Accelerate program
-> IO (Repa.Array r sh e)
run acc = do
Generate source code from Accelerate program
let src = accToRepa acc
-- Write source code to temporary file in /tmp
(name, handle) <- openTempFile tempDir fileName
hPutStr handle src
hClose handle
Perform GHC API operations
result <- runGhc (Just libdir) $ do
-- compile
err <- compile name
-- execute compiled, checking for error
case err of
Just errStr -> error errStr
Nothing -> exec modName fnName
-- Delete temporary file
removeFile name
Return result of Accelerate program
return result
-- | Compiles the given file name
compile :: String -- ^ The source file
-> Ghc (Maybe String)
compile path = do
dflags <- getSessionDynFlags
setSessionDynFlags (dflags{
optLevel = 2,
ghcLink = LinkInMemory,
hscTarget = HscInterpreted
})
target <- guessTarget path Nothing
addTarget target
r <- load LoadAllTargets
return $ case r of
Failed -> Just "Error in module loading"
Succeeded -> Nothing
-- | Executes the given function in the given module, must already be
-- compiled and loaded
--
exec :: (Repa.Shape sh, Repa.Repr r e)
=> String -- ^ The module name
-> String -- ^ The function name
-> Ghc (Repa.Array r sh e)
exec modName fnName = do
mod <- findModule (mkModuleName modName) Nothing
setContext [mod] []
value <- compileExpr (modName Prelude.++ "." Prelude.++ fnName)
let value' = (unsafeCoerce value) :: Repa.Array r sh e
return value'
| Converts an Accelerate program to a Repa program and returns the
-- source as a String
--
accToRepa :: (Arrays a)
^ The Accelerate program
-> String
accToRepa acc = show $
headD $$ (nest 1 (evalAcc (Smart.convertAcc acc)))
$$ tailD
$$ stencilDoc
headD :: Doc
# INLINE headD #
headD =
text "{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, TypeOperators #-}" $+$
text "{-# LANGUAGE FlexibleContexts #-}" $+$
text "module" <+> text modName <+> text "where" $+$
text "import Data.Array.Repa as Repa" $+$
text "import Data.Bits -- required for Prim ops" $+$
text "import Data.Char -- required for Prim ops" $+$
text "import Data.Int -- required for Prim ops" $+$
text "import Data.List (sortBy) -- required for permute" $+$
text "import Data.Ord (compare) -- required for permute" $+$
text " " $+$
text "main = putStrLn $ show $" <+> text fnName $+$
text fnName <+> equals
tailD :: Doc
# INLINE tailD #
tailD = empty
modName :: String
{-# INLINE modName #-}
modName = "RepaTest"
fnName :: String
# INLINE fnName #
fnName = "repa"
fileName :: String
# INLINE fileName #
fileName = modName Prelude.++ ".hs"
tempDir :: FilePath
# INLINE tempDir #
tempDir = "/tmp/"
| null | https://raw.githubusercontent.com/blambo/accelerate-repa/5ea4d40ebcca50d5b952e8783a56749cea4431a4/Data/Array/Accelerate/Repa.hs | haskell | {-# LANGUAGE GADTs #-}
|
The current structure follows closely on
Data.Array.Accelerate.Interpreter
for execution
Write source code to temporary file in /tmp
compile
execute compiled, checking for error
Delete temporary file
| Compiles the given file name
^ The source file
| Executes the given function in the given module, must already be
compiled and loaded
^ The module name
^ The function name
source as a String
# INLINE modName # | Module : Data . Array . Accelerate .
Maintainer : < blambo+ >
This module implements the back - end for the accelerate EDSL
module Data.Array.Accelerate.Repa
( Arrays
, accToRepa
, compile
, exec
, run
)
where
import Data.Array.Accelerate
import qualified Data.Array.Accelerate.Smart as Smart
import Data.Array.Accelerate.Repa.Evaluations (evalAcc)
import Data.Array.Accelerate.Repa.Stencil (stencilDoc)
import Text.PrettyPrint
import qualified Data.Array.Repa as Repa
import GHC
import GHC.Paths (libdir)
import DynFlags
import Unsafe.Coerce
import System.IO
import System.Directory (removeFile)
| Using the Accelerate program given as an argument , run will compile ,
execute and return the result of a given Accelerate program using
run :: (Arrays a, Repa.Shape sh, Repa.Repr r e)
^ The Accelerate program
-> IO (Repa.Array r sh e)
run acc = do
Generate source code from Accelerate program
let src = accToRepa acc
(name, handle) <- openTempFile tempDir fileName
hPutStr handle src
hClose handle
Perform GHC API operations
result <- runGhc (Just libdir) $ do
err <- compile name
case err of
Just errStr -> error errStr
Nothing -> exec modName fnName
removeFile name
Return result of Accelerate program
return result
-> Ghc (Maybe String)
compile path = do
dflags <- getSessionDynFlags
setSessionDynFlags (dflags{
optLevel = 2,
ghcLink = LinkInMemory,
hscTarget = HscInterpreted
})
target <- guessTarget path Nothing
addTarget target
r <- load LoadAllTargets
return $ case r of
Failed -> Just "Error in module loading"
Succeeded -> Nothing
exec :: (Repa.Shape sh, Repa.Repr r e)
-> Ghc (Repa.Array r sh e)
exec modName fnName = do
mod <- findModule (mkModuleName modName) Nothing
setContext [mod] []
value <- compileExpr (modName Prelude.++ "." Prelude.++ fnName)
let value' = (unsafeCoerce value) :: Repa.Array r sh e
return value'
| Converts an Accelerate program to a Repa program and returns the
accToRepa :: (Arrays a)
^ The Accelerate program
-> String
accToRepa acc = show $
headD $$ (nest 1 (evalAcc (Smart.convertAcc acc)))
$$ tailD
$$ stencilDoc
headD :: Doc
# INLINE headD #
headD =
text "{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, TypeOperators #-}" $+$
text "{-# LANGUAGE FlexibleContexts #-}" $+$
text "module" <+> text modName <+> text "where" $+$
text "import Data.Array.Repa as Repa" $+$
text "import Data.Bits -- required for Prim ops" $+$
text "import Data.Char -- required for Prim ops" $+$
text "import Data.Int -- required for Prim ops" $+$
text "import Data.List (sortBy) -- required for permute" $+$
text "import Data.Ord (compare) -- required for permute" $+$
text " " $+$
text "main = putStrLn $ show $" <+> text fnName $+$
text fnName <+> equals
tailD :: Doc
# INLINE tailD #
tailD = empty
modName :: String
modName = "RepaTest"
fnName :: String
# INLINE fnName #
fnName = "repa"
fileName :: String
# INLINE fileName #
fileName = modName Prelude.++ ".hs"
tempDir :: FilePath
# INLINE tempDir #
tempDir = "/tmp/"
|
389bbb9c3e23df9a4b01e7fc60a272b2eb77b30be87c945a958ff7da384d2da7 | cooldaemon/ermlia | ermlia_data_store.erl | @author < > [ / ]
@copyright 2008
%% @doc This module is store for key-value data.
Copyright 2008
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(ermlia_data_store).
-export([start_link/1, stop/1]).
-export([put/4, get/2]).
-export([dump/1]).
-export([clean/1]).
start_link(I) ->
ermlia_ets_server:start_link(?MODULE, I).
stop(ServerRef) ->
ermlia_ets_server:stop(ServerRef).
put(I, Key, Value, TTL) ->
ermlia_ets_server:put(?MODULE, I, Key, Value, TTL).
get(I, Key) ->
ermlia_ets_server:get(?MODULE, I, Key).
dump(I) ->
ermlia_ets_server:dump(?MODULE, I).
clean(I) ->
ermlia_ets_server:clean(?MODULE, I).
| null | https://raw.githubusercontent.com/cooldaemon/ermlia/fa5f8acf5965893b33f405de048e6d570d4aba53/src/ermlia_data_store.erl | erlang | @doc This module is store for key-value data.
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | @author < > [ / ]
@copyright 2008
Copyright 2008
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(ermlia_data_store).
-export([start_link/1, stop/1]).
-export([put/4, get/2]).
-export([dump/1]).
-export([clean/1]).
start_link(I) ->
ermlia_ets_server:start_link(?MODULE, I).
stop(ServerRef) ->
ermlia_ets_server:stop(ServerRef).
put(I, Key, Value, TTL) ->
ermlia_ets_server:put(?MODULE, I, Key, Value, TTL).
get(I, Key) ->
ermlia_ets_server:get(?MODULE, I, Key).
dump(I) ->
ermlia_ets_server:dump(?MODULE, I).
clean(I) ->
ermlia_ets_server:clean(?MODULE, I).
|
a9481b2057bb1262cd9fdc8f990d98ce1b3b67d7bd984be34bd3b56d0fa4346b | jeopard/haskell-checking-account | StatementSpec.hs | module Models.StatementSpec (spec) where
import Data.Scientific
import Data.Time.Calendar
import Data.UUID
import Test.Hspec
import Models.Statement
import Models.StatementDate
import qualified Models.Operation as O
spec :: Spec
spec = do
fromOperationsSpec
fromOperationsSpec :: Spec
fromOperationsSpec = do
describe "fromOperations" $ do
context "when all operations are between the dates" $ do
it "creates a Statement with the correct data" $ do
let startDate = fromGregorian 2018 5 1
endDate = fromGregorian 2020 1 1
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` [sDateA, sDateBC, sDateD, sDateEF, sDateG, sDateH]
context "when all operations are before the dates" $ do
it "creates a Statement with the correct data" $ do
let startDate = fromGregorian 2020 5 1
endDate = fromGregorian 2021 1 1
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` []
context "when all operations are after the dates" $ do
it "creates a Statement with the correct data" $ do
let startDate = fromGregorian 2010 5 1
endDate = fromGregorian 2011 1 1
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` []
context "when some operations are on the dates" $ do
it "creates a Statement including these operations" $ do
let startDate = fromGregorian 2019 7 16
endDate = fromGregorian 2019 7 23
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` [sDateBC, sDateD, sDateEF]
context "when fromDate and toDate are on the same day" $ do
it "creates a Statement including the operations of the day" $ do
let startDate = fromGregorian 2019 7 16
endDate = fromGregorian 2019 7 16
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` [sDateBC]
allOps :: [O.Operation]
allOps = [opA, opB, opC, opD, opE, opF, opG, opH]
the only Operation data that matter here are the operation type , date , and amount ,
so we 'll just create one operation and use it as a default for creating others
op :: O.Operation
op = let opId = read "c2cc10e1-57d6-4b6f-9899-38d972112d8c" :: UUID
in O.Operation { O.operationId = opId
, O.accountId = accId
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 14
, O.amount = scientific 100 0
, O.description = "Amazon transaction" }
opA :: O.Operation
opA = op
sDateA :: StatementDate
sDateA = StatementDate (O.date opA) [opA] (scientific 100 0)
opB :: O.Operation
opB = op { O.operationId = read "df9abf28-78ce-483d-8de5-368d67763cb8" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 16
, O.amount = scientific 455 (-1) }
opC :: O.Operation
opC = op { O.operationId = read "7ce81ac6-1abb-4008-9009-dec28f95629c" :: UUID
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 16
, O.amount = scientific 10 0 }
100 - 45.50 + 10 = 64.50
sDateBC :: StatementDate
sDateBC = StatementDate { date = (O.date opB), operations = [opB, opC], endOfDayBalance= (scientific 645 (-1))}
opD :: O.Operation
opD = op { O.operationId = read "3657d672-92ed-430d-aed8-0bcd54b0a4d8" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 20
, O.amount = scientific 24 0 }
64.50 - 24 = 40.50
sDateD :: StatementDate
sDateD = StatementDate (O.date opD) [opD] (scientific 405 (-1))
opE :: O.Operation
opE = op { O.operationId = read "ca681fe0-1561-4604-b3a9-941c86c6916c" :: UUID
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 23
, O.amount = scientific 1405 (-1) }
opF :: O.Operation
opF = op { O.operationId = read "62814544-4aba-47b1-b36c-87d0d03e39b7" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 23
, O.amount = scientific 53 0 }
40.50 + 140.50 - 53 = 128.00
sDateEF :: StatementDate
sDateEF = StatementDate (O.date opE) [opE, opF] (scientific 128 0)
opG :: O.Operation
opG = op { O.operationId = read "6c181235-9d51-4a17-81c1-95fbae0e7c70" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 28
, O.amount = scientific 22 0 }
128 - 22 = 106
sDateG :: StatementDate
sDateG = StatementDate (O.date opG) [opG] (scientific 106 0)
opH :: O.Operation
opH = op { O.operationId = read "28f06237-a873-4311-b97f-7ead932896f5" :: UUID
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 31
, O.amount = scientific 5 (-1) }
106 + 0.50 = 106.50
sDateH :: StatementDate
sDateH = StatementDate (O.date opH) [opH] (scientific 1065 (-1))
accId :: UUID
accId = read "2ad34674-7825-49cc-b985-342eb48285c4" :: UUID
| null | https://raw.githubusercontent.com/jeopard/haskell-checking-account/27a889e507ad830ccb476a9663a5ab62aba8baa7/test/Models/StatementSpec.hs | haskell | module Models.StatementSpec (spec) where
import Data.Scientific
import Data.Time.Calendar
import Data.UUID
import Test.Hspec
import Models.Statement
import Models.StatementDate
import qualified Models.Operation as O
spec :: Spec
spec = do
fromOperationsSpec
fromOperationsSpec :: Spec
fromOperationsSpec = do
describe "fromOperations" $ do
context "when all operations are between the dates" $ do
it "creates a Statement with the correct data" $ do
let startDate = fromGregorian 2018 5 1
endDate = fromGregorian 2020 1 1
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` [sDateA, sDateBC, sDateD, sDateEF, sDateG, sDateH]
context "when all operations are before the dates" $ do
it "creates a Statement with the correct data" $ do
let startDate = fromGregorian 2020 5 1
endDate = fromGregorian 2021 1 1
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` []
context "when all operations are after the dates" $ do
it "creates a Statement with the correct data" $ do
let startDate = fromGregorian 2010 5 1
endDate = fromGregorian 2011 1 1
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` []
context "when some operations are on the dates" $ do
it "creates a Statement including these operations" $ do
let startDate = fromGregorian 2019 7 16
endDate = fromGregorian 2019 7 23
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` [sDateBC, sDateD, sDateEF]
context "when fromDate and toDate are on the same day" $ do
it "creates a Statement including the operations of the day" $ do
let startDate = fromGregorian 2019 7 16
endDate = fromGregorian 2019 7 16
result = fromOperations allOps startDate endDate
(fromDate result) `shouldBe` startDate
(toDate result) `shouldBe` endDate
(statementDates result) `shouldBe` [sDateBC]
allOps :: [O.Operation]
allOps = [opA, opB, opC, opD, opE, opF, opG, opH]
the only Operation data that matter here are the operation type , date , and amount ,
so we 'll just create one operation and use it as a default for creating others
op :: O.Operation
op = let opId = read "c2cc10e1-57d6-4b6f-9899-38d972112d8c" :: UUID
in O.Operation { O.operationId = opId
, O.accountId = accId
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 14
, O.amount = scientific 100 0
, O.description = "Amazon transaction" }
opA :: O.Operation
opA = op
sDateA :: StatementDate
sDateA = StatementDate (O.date opA) [opA] (scientific 100 0)
opB :: O.Operation
opB = op { O.operationId = read "df9abf28-78ce-483d-8de5-368d67763cb8" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 16
, O.amount = scientific 455 (-1) }
opC :: O.Operation
opC = op { O.operationId = read "7ce81ac6-1abb-4008-9009-dec28f95629c" :: UUID
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 16
, O.amount = scientific 10 0 }
100 - 45.50 + 10 = 64.50
sDateBC :: StatementDate
sDateBC = StatementDate { date = (O.date opB), operations = [opB, opC], endOfDayBalance= (scientific 645 (-1))}
opD :: O.Operation
opD = op { O.operationId = read "3657d672-92ed-430d-aed8-0bcd54b0a4d8" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 20
, O.amount = scientific 24 0 }
64.50 - 24 = 40.50
sDateD :: StatementDate
sDateD = StatementDate (O.date opD) [opD] (scientific 405 (-1))
opE :: O.Operation
opE = op { O.operationId = read "ca681fe0-1561-4604-b3a9-941c86c6916c" :: UUID
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 23
, O.amount = scientific 1405 (-1) }
opF :: O.Operation
opF = op { O.operationId = read "62814544-4aba-47b1-b36c-87d0d03e39b7" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 23
, O.amount = scientific 53 0 }
40.50 + 140.50 - 53 = 128.00
sDateEF :: StatementDate
sDateEF = StatementDate (O.date opE) [opE, opF] (scientific 128 0)
opG :: O.Operation
opG = op { O.operationId = read "6c181235-9d51-4a17-81c1-95fbae0e7c70" :: UUID
, O.operationType = O.Debit
, O.date = fromGregorian 2019 7 28
, O.amount = scientific 22 0 }
128 - 22 = 106
sDateG :: StatementDate
sDateG = StatementDate (O.date opG) [opG] (scientific 106 0)
opH :: O.Operation
opH = op { O.operationId = read "28f06237-a873-4311-b97f-7ead932896f5" :: UUID
, O.operationType = O.Credit
, O.date = fromGregorian 2019 7 31
, O.amount = scientific 5 (-1) }
106 + 0.50 = 106.50
sDateH :: StatementDate
sDateH = StatementDate (O.date opH) [opH] (scientific 1065 (-1))
accId :: UUID
accId = read "2ad34674-7825-49cc-b985-342eb48285c4" :: UUID
|
|
0f8f7edf5bea97b425505a94280abf4dd2d934fdbf740663441c9a113cf67319 | aziem/binaryninja-ocaml | functiongraph.ml | open Ctypes
open Foreign
module B = Ffi_bindings.Bindings(Ffi_generated_types)(Ffi_generated)
open Ffi_bindings
open B
type bn_functiongraph = Typedefs.bn_functiongraph Ctypes.structure Ctypes_static.ptr
type bn_functiongraphblock = Typedefs.bn_functiongraphblock Ctypes.structure Ctypes_static.ptr
type bn_function_graph_type = Typedefs.bn_function_graph_type =
| BN_NormalFunctionGraph
| BN_LowLevelILFunctionGraph
| BN_LiftedILFunctionGraph
| BN_LowLevelILSSAFormFunctionGraph
| BN_MediumLevelILFunctionGraph
| BN_MediumLevelILSSAFormFunctionGraph
| BN_MappedMediumLevelILFunctionGraph
| BN_MappedMediumLevelILSSAFormFunctionGraph
let create_function_graph f =
B.bn_create_function_graph f
let get_function g =
B.bn_get_function_for_function_graph g
let get_graph_blocks f =
let i = allocate size_t (Unsigned.Size_t.of_int 0) in
let lst = B.bn_get_function_graph_blocks f i in
let sz = Unsigned.Size_t.to_int !@i in
let rec loop acc i =
match i with
| _ when i=sz -> acc
| _ as n ->
let block = !@(lst +@ n) in
loop (block :: acc) (n+1)
in
loop [] 0
| null | https://raw.githubusercontent.com/aziem/binaryninja-ocaml/5773d791ebb717816b8c47863bce8122f39764b4/lib/functiongraph.ml | ocaml | open Ctypes
open Foreign
module B = Ffi_bindings.Bindings(Ffi_generated_types)(Ffi_generated)
open Ffi_bindings
open B
type bn_functiongraph = Typedefs.bn_functiongraph Ctypes.structure Ctypes_static.ptr
type bn_functiongraphblock = Typedefs.bn_functiongraphblock Ctypes.structure Ctypes_static.ptr
type bn_function_graph_type = Typedefs.bn_function_graph_type =
| BN_NormalFunctionGraph
| BN_LowLevelILFunctionGraph
| BN_LiftedILFunctionGraph
| BN_LowLevelILSSAFormFunctionGraph
| BN_MediumLevelILFunctionGraph
| BN_MediumLevelILSSAFormFunctionGraph
| BN_MappedMediumLevelILFunctionGraph
| BN_MappedMediumLevelILSSAFormFunctionGraph
let create_function_graph f =
B.bn_create_function_graph f
let get_function g =
B.bn_get_function_for_function_graph g
let get_graph_blocks f =
let i = allocate size_t (Unsigned.Size_t.of_int 0) in
let lst = B.bn_get_function_graph_blocks f i in
let sz = Unsigned.Size_t.to_int !@i in
let rec loop acc i =
match i with
| _ when i=sz -> acc
| _ as n ->
let block = !@(lst +@ n) in
loop (block :: acc) (n+1)
in
loop [] 0
|
|
8ff0d35035ef779ab0789f4c2b8032f5eba612379f9e2b0fe44b646a2ccc0837 | gchrupala/morfette | Assoc.hs | # LANGUAGE MultiParamTypeClasses , FunctionalDependencies
, FlexibleInstances #
, FlexibleInstances #-}
module GramLab.Data.Assoc ( Assoc
, fromAssoc
)
where
import qualified Data.Map as Map
import qualified Data.IntMap as IntMap
import qualified Data.List as List
class (Ord k) => Assoc assoc k v | assoc -> k v where
toList :: assoc -> [(k,v)]
fromList :: [(k,v)] -> assoc
fromAssoc :: (Assoc assoc2 k v) => assoc -> assoc2
fromAssoc = fromList . toList
instance (Ord k) => Assoc [(k,v)] k v where
toList = id
fromList = id
instance Assoc (IntMap.IntMap v) Int v where
toList = IntMap.toAscList
fromList = IntMap.fromList
instance (Ord k) => Assoc (Map.Map k v) k v where
toList = Map.toAscList
fromList = Map.fromList
| null | https://raw.githubusercontent.com/gchrupala/morfette/be40676c975d660bbb893953d354168506069862/src/GramLab/Data/Assoc.hs | haskell | # LANGUAGE MultiParamTypeClasses , FunctionalDependencies
, FlexibleInstances #
, FlexibleInstances #-}
module GramLab.Data.Assoc ( Assoc
, fromAssoc
)
where
import qualified Data.Map as Map
import qualified Data.IntMap as IntMap
import qualified Data.List as List
class (Ord k) => Assoc assoc k v | assoc -> k v where
toList :: assoc -> [(k,v)]
fromList :: [(k,v)] -> assoc
fromAssoc :: (Assoc assoc2 k v) => assoc -> assoc2
fromAssoc = fromList . toList
instance (Ord k) => Assoc [(k,v)] k v where
toList = id
fromList = id
instance Assoc (IntMap.IntMap v) Int v where
toList = IntMap.toAscList
fromList = IntMap.fromList
instance (Ord k) => Assoc (Map.Map k v) k v where
toList = Map.toAscList
fromList = Map.fromList
|
|
c2775eae2b221dd11a70e67dd7da57d7fc03716de0f7b6741f04f884600e4fc1 | eburlingame/arinc-parser | sample_records.clj | (ns arinc424.sample-records
(:require [clojure.test :refer :all]))
(def sample-records
[{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 113.1,
:vor-longitude -83.63869444444445,
:vor-name "WATERVILLE",
:vor-ndb-ident "VWV",
:dme-latitude 41.451486111111116,
:dme-longitude -83.63869444444445,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 41.451486111111116,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22156,
:subsection-code "",
:dme-elevation 664,
:station-declination (:west-of-true-north 2.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K7",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 116.4,
:vor-longitude -83.89472777777779,
:vor-name "VOLUNTEER",
:vor-ndb-ident "VXV",
:dme-latitude 35.90483888888889,
:dme-longitude -83.89472777777779,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 35.90483888888889,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22157,
:subsection-code "",
:dme-elevation 1290,
:station-declination (:west-of-true-north 3.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K1",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 116.0,
:vor-longitude -120.44463611111111,
:vor-name "YAKIMA",
:vor-ndb-ident "YKM",
:dme-latitude 46.57024166666667,
:dme-longitude -120.44463611111111,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 46.57024166666667,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22160,
:subsection-code "",
:dme-elevation 984,
:station-declination (:east-of-true-north 21.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K3",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 111.4,
:vor-longitude -97.38497777777778,
:vor-name "YANKTON",
:vor-ndb-ident "YKN",
:dme-latitude 42.918375,
:dme-longitude -97.38497777777778,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 42.918375,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22161,
:subsection-code "",
:dme-elevation 1301,
:station-declination (:east-of-true-north 7.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 109.0,
:vor-longitude -80.67466388888889,
:vor-name "YOUNGSTOWN",
:vor-ndb-ident "YNG",
:dme-latitude 41.331025000000004,
:dme-longitude -80.67466388888889,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 11, :year 17},
:vor-latitude 41.331025000000004,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22162,
:subsection-code "",
:dme-elevation 1159,
:station-declination (:west-of-true-north 5.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 112.8,
:vor-longitude -82.97833888888889,
:vor-name "YORK",
:vor-ndb-ident "YRK",
:dme-latitude 38.644133333333336,
:dme-longitude -82.97833888888889,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 38.644133333333336,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22163,
:subsection-code "",
:dme-elevation 1040,
:station-declination (:west-of-true-north 5.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 116.7,
:vor-longitude -79.29444444444444,
:vor-name "BIMINI",
:vor-ndb-ident "ZBV",
:dme-latitude 25.704166666666666,
:dme-longitude -79.29444444444444,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 12, :year 16},
:vor-latitude 25.704166666666666,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22164,
:subsection-code "",
:dme-elevation 10,
:station-declination (:west-of-true-north 4.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 113.2,
:vor-longitude -78.69784722222222,
:vor-name "FREEPORT",
:vor-ndb-ident "ZFP",
:dme-latitude 26.555347222222224,
:dme-longitude -78.69784722222222,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 26.555347222222224,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22165,
:subsection-code "",
:dme-elevation 7,
:station-declination (:west-of-true-north 3.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 112.7,
:vor-longitude -77.44642777777779,
:vor-name "NASSAU",
:vor-ndb-ident "ZQA",
:dme-latitude 25.025516666666665,
:dme-longitude -77.44642777777779,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 11, :year 17},
:vor-latitude 25.025516666666665,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22166,
:subsection-code "",
:dme-elevation 7,
:station-declination (:west-of-true-north 6.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 112.9,
:vor-longitude -77.37916666666666,
:vor-name "TREASURE CAY",
:vor-ndb-ident "ZTC",
:dme-latitude 26.734722222222224,
:dme-longitude -77.37916666666666,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 3, :year 17},
:vor-latitude 26.734722222222224,
:airport-icao-ident "",
:figure-of-merit :extended-high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22167,
:subsection-code "",
:dme-elevation 11,
:station-declination (:west-of-true-north 5.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K2",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 113.4,
:vor-longitude -109.15450833333334,
:vor-name "ZUNI",
:vor-ndb-ident "ZUN",
:dme-latitude 34.96575277777778,
:dme-longitude -109.15450833333334,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 34.96575277777778,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22168,
:subsection-code "",
:dme-elevation 6550,
:station-declination (:east-of-true-north 14.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 111.4,
:vor-longitude -81.8926,
:vor-name "ZANESVILLE",
:vor-ndb-ident "ZZV",
:dme-latitude 39.940861111111104,
:dme-longitude -81.8926,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 8, :year 16},
:vor-latitude 39.940861111111104,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22169,
:subsection-code "",
:dme-elevation 898,
:station-declination (:west-of-true-north 6.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}]) | null | https://raw.githubusercontent.com/eburlingame/arinc-parser/1bef86924aef21888c27301bf51af90262ec4c52/test/arinc424/sample_records.clj | clojure | (ns arinc424.sample-records
(:require [clojure.test :refer :all]))
(def sample-records
[{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 113.1,
:vor-longitude -83.63869444444445,
:vor-name "WATERVILLE",
:vor-ndb-ident "VWV",
:dme-latitude 41.451486111111116,
:dme-longitude -83.63869444444445,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 41.451486111111116,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22156,
:subsection-code "",
:dme-elevation 664,
:station-declination (:west-of-true-north 2.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K7",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 116.4,
:vor-longitude -83.89472777777779,
:vor-name "VOLUNTEER",
:vor-ndb-ident "VXV",
:dme-latitude 35.90483888888889,
:dme-longitude -83.89472777777779,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 35.90483888888889,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22157,
:subsection-code "",
:dme-elevation 1290,
:station-declination (:west-of-true-north 3.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K1",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 116.0,
:vor-longitude -120.44463611111111,
:vor-name "YAKIMA",
:vor-ndb-ident "YKM",
:dme-latitude 46.57024166666667,
:dme-longitude -120.44463611111111,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 46.57024166666667,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22160,
:subsection-code "",
:dme-elevation 984,
:station-declination (:east-of-true-north 21.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K3",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 111.4,
:vor-longitude -97.38497777777778,
:vor-name "YANKTON",
:vor-ndb-ident "YKN",
:dme-latitude 42.918375,
:dme-longitude -97.38497777777778,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 42.918375,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22161,
:subsection-code "",
:dme-elevation 1301,
:station-declination (:east-of-true-north 7.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 109.0,
:vor-longitude -80.67466388888889,
:vor-name "YOUNGSTOWN",
:vor-ndb-ident "YNG",
:dme-latitude 41.331025000000004,
:dme-longitude -80.67466388888889,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 11, :year 17},
:vor-latitude 41.331025000000004,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22162,
:subsection-code "",
:dme-elevation 1159,
:station-declination (:west-of-true-north 5.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 112.8,
:vor-longitude -82.97833888888889,
:vor-name "YORK",
:vor-ndb-ident "YRK",
:dme-latitude 38.644133333333336,
:dme-longitude -82.97833888888889,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 38.644133333333336,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22163,
:subsection-code "",
:dme-elevation 1040,
:station-declination (:west-of-true-north 5.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 116.7,
:vor-longitude -79.29444444444444,
:vor-name "BIMINI",
:vor-ndb-ident "ZBV",
:dme-latitude 25.704166666666666,
:dme-longitude -79.29444444444444,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 12, :year 16},
:vor-latitude 25.704166666666666,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22164,
:subsection-code "",
:dme-elevation 10,
:station-declination (:west-of-true-north 4.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 113.2,
:vor-longitude -78.69784722222222,
:vor-name "FREEPORT",
:vor-ndb-ident "ZFP",
:dme-latitude 26.555347222222224,
:dme-longitude -78.69784722222222,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 26.555347222222224,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22165,
:subsection-code "",
:dme-elevation 7,
:station-declination (:west-of-true-north 3.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 112.7,
:vor-longitude -77.44642777777779,
:vor-name "NASSAU",
:vor-ndb-ident "ZQA",
:dme-latitude 25.025516666666665,
:dme-longitude -77.44642777777779,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 11, :year 17},
:vor-latitude 25.025516666666665,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22166,
:subsection-code "",
:dme-elevation 7,
:station-declination (:west-of-true-north 6.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "MY",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 112.9,
:vor-longitude -77.37916666666666,
:vor-name "TREASURE CAY",
:vor-ndb-ident "ZTC",
:dme-latitude 26.734722222222224,
:dme-longitude -77.37916666666666,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 3, :year 17},
:vor-latitude 26.734722222222224,
:airport-icao-ident "",
:figure-of-merit :extended-high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22167,
:subsection-code "",
:dme-elevation 11,
:station-declination (:west-of-true-north 5.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K2",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 113.4,
:vor-longitude -109.15450833333334,
:vor-name "ZUNI",
:vor-ndb-ident "ZUN",
:dme-latitude 34.96575277777778,
:dme-longitude -109.15450833333334,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 5, :year 16},
:vor-latitude 34.96575277777778,
:airport-icao-ident "",
:figure-of-merit :high-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22168,
:subsection-code "",
:dme-elevation 6550,
:station-declination (:east-of-true-north 14.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}
{:icao-code "K5",
:frequency-protection-distance nil,
:dme-ident "",
:vor-ndb-freq 111.4,
:vor-longitude -81.8926,
:vor-name "ZANESVILLE",
:vor-ndb-ident "ZZV",
:dme-latitude 39.940861111111104,
:dme-longitude -81.8926,
:navaid-class nil,
:ils-dme-bias nil,
:cycle-data {:cycle 8, :year 16},
:vor-latitude 39.940861111111104,
:airport-icao-ident "",
:figure-of-merit :low-altitude-use,
:continuation-record-num 0,
:section-code "D",
:file-record-num 22169,
:subsection-code "",
:dme-elevation 898,
:station-declination (:west-of-true-north 6.0),
:record-type :standard,
:datum-code "NAW",
:customer-area-code :usa}]) |
|
ee817ef7deefdc01dc183e12ee17200255cdd7bf4fd9d8a660e2e6fb1ec84f27 | roehst/tapl-implementations | core.mli | module Core
Core typechecking and evaluation functions
Core typechecking and evaluation functions
*)
open Syntax
open Support.Error
val typeof : context -> term -> ty
type store
val emptystore : store
val shiftstore : int -> store -> store
val eval : context -> store -> term -> term * store
val kindof : context -> ty -> kind
val tyeqv : context -> ty -> ty -> bool
val simplifyty : context -> ty -> ty
val evalbinding : context -> store -> binding -> binding * store
| null | https://raw.githubusercontent.com/roehst/tapl-implementations/23c0dc505a8c0b0a797201a7e4e3e5b939dd8fdb/fullomega/core.mli | ocaml | module Core
Core typechecking and evaluation functions
Core typechecking and evaluation functions
*)
open Syntax
open Support.Error
val typeof : context -> term -> ty
type store
val emptystore : store
val shiftstore : int -> store -> store
val eval : context -> store -> term -> term * store
val kindof : context -> ty -> kind
val tyeqv : context -> ty -> ty -> bool
val simplifyty : context -> ty -> ty
val evalbinding : context -> store -> binding -> binding * store
|
|
ba61b830a5e6f3d692e1b763d26471039e6cc219c0d26e2fa3e72c5985fba767 | fp-works/2019-winter-Haskell-school | PartySpec.hs | import Data.Tree
import Employee
import Party
import Test.Hspec
main :: IO ()
main =
hspec $ do
describe "Party" $ do
let gl1 = GL [Emp {empName = "Emp 1", empFun = 10}] 10
let gl2 = GL [Emp {empName = "Emp 2", empFun = 15}] 15
describe "exercie 1" $ do
describe "1.1" $ do
it "should return a correct guestList with an added Employee" $ do
let initEL = [Emp {empName = "John", empFun = 8}]
let initGL = GL initEL 8
let newEmp = Emp {empName = "Tien", empFun = 15}
glCons newEmp initGL `shouldBe` (GL (newEmp : initEL) 23)
describe "1.2" $ do
it "should do mconcat to add to GuestLists" $ do
mconcat [gl1, gl2] `shouldBe`
(GL
[ Emp {empName = "Emp 1", empFun = 10}
, Emp {empName = "Emp 2", empFun = 15}
]
25)
describe "1.3" $ do
it "should return more fun guestList" $ do
moreFun gl1 gl2 `shouldBe` gl2
describe "exercie 2" $ do
it "should work on the treeFold" $ do
let mockTree =
Node
{ rootLabel = 1
, subForest =
[ Node {rootLabel = 5, subForest = []}
, Node {rootLabel = 3, subForest = []}
]
}
treeFold (\x y -> x + sum (y)) mockTree `shouldBe` 9
describe "exercie 3" $ do
it "should return a pair of optimal guestList with and without the boss" $ do
let boss = Emp {empName = "Boss", empFun = 20}
let employee1 = Emp {empName = "Emp1", empFun = 5}
let employee2 = Emp {empName = "Emp2", empFun = 17}
let gls = [(GL [employee1] 5, mempty), (GL [employee2] 17, mempty)]
(nextLevel boss gls) `shouldBe`
(GL [boss] 20, GL [employee1, employee2] 22)
describe "exercie 4" $ do
it "should return maxFun guest list from a company" $ do
(getFun . maxFun $ testCompany) `shouldBe` 26
| null | https://raw.githubusercontent.com/fp-works/2019-winter-Haskell-school/823b67f019b9e7bc0d3be36711c0cc7da4eba7d2/cis194/week8/tien/test/PartySpec.hs | haskell | import Data.Tree
import Employee
import Party
import Test.Hspec
main :: IO ()
main =
hspec $ do
describe "Party" $ do
let gl1 = GL [Emp {empName = "Emp 1", empFun = 10}] 10
let gl2 = GL [Emp {empName = "Emp 2", empFun = 15}] 15
describe "exercie 1" $ do
describe "1.1" $ do
it "should return a correct guestList with an added Employee" $ do
let initEL = [Emp {empName = "John", empFun = 8}]
let initGL = GL initEL 8
let newEmp = Emp {empName = "Tien", empFun = 15}
glCons newEmp initGL `shouldBe` (GL (newEmp : initEL) 23)
describe "1.2" $ do
it "should do mconcat to add to GuestLists" $ do
mconcat [gl1, gl2] `shouldBe`
(GL
[ Emp {empName = "Emp 1", empFun = 10}
, Emp {empName = "Emp 2", empFun = 15}
]
25)
describe "1.3" $ do
it "should return more fun guestList" $ do
moreFun gl1 gl2 `shouldBe` gl2
describe "exercie 2" $ do
it "should work on the treeFold" $ do
let mockTree =
Node
{ rootLabel = 1
, subForest =
[ Node {rootLabel = 5, subForest = []}
, Node {rootLabel = 3, subForest = []}
]
}
treeFold (\x y -> x + sum (y)) mockTree `shouldBe` 9
describe "exercie 3" $ do
it "should return a pair of optimal guestList with and without the boss" $ do
let boss = Emp {empName = "Boss", empFun = 20}
let employee1 = Emp {empName = "Emp1", empFun = 5}
let employee2 = Emp {empName = "Emp2", empFun = 17}
let gls = [(GL [employee1] 5, mempty), (GL [employee2] 17, mempty)]
(nextLevel boss gls) `shouldBe`
(GL [boss] 20, GL [employee1, employee2] 22)
describe "exercie 4" $ do
it "should return maxFun guest list from a company" $ do
(getFun . maxFun $ testCompany) `shouldBe` 26
|
|
95ac5179c4345e483d79e63f39d734bbba12440ee4ce6874f33b715d4121c22e | justin2004/git_to_rdf | project.clj | (defproject git_to_rdf "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "TODO"
:url "TODO"}
:dependencies [[org.clojure/clojure "1.10.0"]
[org.apache.jena/apache-jena-libs "4.5.0" :extension "pom"]
[org.clojure/data.csv "1.0.1"]
[org.clojure/data.json "2.4.0"]
[progrock "0.1.2"]
[metosin/jsonista "0.3.5"]
[org.clojure/tools.cli "1.0.206"]
]
:main git-to-rdf.core
:uberjar {:aot [git-to-rdf.core]}
:clean-targets ^{:protect false} ["/app/target"]
:jvm-opts ["-Dorg.slf4j.simpleLogger.logFile=git_to_rdf.log"]
:resource-paths ["sparql-anything-0.8.0.jar"]
:repl-options {:init-ns git-to-rdf.core})
| null | https://raw.githubusercontent.com/justin2004/git_to_rdf/708e5d86fd723642482e0685f0cf73f554c21529/project.clj | clojure | (defproject git_to_rdf "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "TODO"
:url "TODO"}
:dependencies [[org.clojure/clojure "1.10.0"]
[org.apache.jena/apache-jena-libs "4.5.0" :extension "pom"]
[org.clojure/data.csv "1.0.1"]
[org.clojure/data.json "2.4.0"]
[progrock "0.1.2"]
[metosin/jsonista "0.3.5"]
[org.clojure/tools.cli "1.0.206"]
]
:main git-to-rdf.core
:uberjar {:aot [git-to-rdf.core]}
:clean-targets ^{:protect false} ["/app/target"]
:jvm-opts ["-Dorg.slf4j.simpleLogger.logFile=git_to_rdf.log"]
:resource-paths ["sparql-anything-0.8.0.jar"]
:repl-options {:init-ns git-to-rdf.core})
|
|
ca77bfbfcf82c9ec78d5932cde6f517453235fab2441d0e166a9af484f71c518 | kowainik/piece-of-cake-slayer | Schema.hs | -- | Helper functions to create and drop database from @.sql@ files.
module Piece.Db.Schema
( prepareDb
) where
import CakeSlayer.Db (WithDb, executeRaw_)
| Prepare data base for the testing environment :
1 . Drop all existing tables .
2 . Created tables from scratch .
1. Drop all existing tables.
2. Created tables from scratch.
-}
prepareDb :: (WithDb env m) => m ()
prepareDb = teardownDb >> setupDb >> seedDb
-- | Create tables from the @sql/schema.sql@ file.
setupDb :: (WithDb env m) => m ()
setupDb = executeFile "sql/schema.sql"
-- | Insert values from the @sql/seed.sql@ file.
seedDb :: (WithDb env m) => m ()
seedDb = executeFile "sql/seed.sql"
-- | Delete tables using the @sql/drop.sql@ file.
teardownDb :: (WithDb env m) => m ()
teardownDb = executeFile "sql/drop.sql"
executeFile :: (WithDb env m) => FilePath -> m ()
executeFile path = do
sqlStatements <- readFile path
executeRaw_ (fromString sqlStatements)
| null | https://raw.githubusercontent.com/kowainik/piece-of-cake-slayer/98c44dae5cf8ffce896292beb042c315800ed0ae/src/Piece/Db/Schema.hs | haskell | | Helper functions to create and drop database from @.sql@ files.
| Create tables from the @sql/schema.sql@ file.
| Insert values from the @sql/seed.sql@ file.
| Delete tables using the @sql/drop.sql@ file. |
module Piece.Db.Schema
( prepareDb
) where
import CakeSlayer.Db (WithDb, executeRaw_)
| Prepare data base for the testing environment :
1 . Drop all existing tables .
2 . Created tables from scratch .
1. Drop all existing tables.
2. Created tables from scratch.
-}
prepareDb :: (WithDb env m) => m ()
prepareDb = teardownDb >> setupDb >> seedDb
setupDb :: (WithDb env m) => m ()
setupDb = executeFile "sql/schema.sql"
seedDb :: (WithDb env m) => m ()
seedDb = executeFile "sql/seed.sql"
teardownDb :: (WithDb env m) => m ()
teardownDb = executeFile "sql/drop.sql"
executeFile :: (WithDb env m) => FilePath -> m ()
executeFile path = do
sqlStatements <- readFile path
executeRaw_ (fromString sqlStatements)
|
372cce1838e93e25c77f84894d90cedea8d6a0f99add6e17ff810cfd5ccde8d8 | ogaml/ogaml | OS_impl_osx.ml |
type os =
| Windows
| Linux
| OSX
let os = OSX
let resources_dir = Cocoa.resource_path () ^ "/"
let canonical_path s =
if Sys.file_exists s then
Cocoa.realpath s
else
invalid_arg ("File not found : " ^ s)
| null | https://raw.githubusercontent.com/ogaml/ogaml/5e74597521abf7ba2833a9247e55780eabfbab78/src/core/OS_impl_osx.ml | ocaml |
type os =
| Windows
| Linux
| OSX
let os = OSX
let resources_dir = Cocoa.resource_path () ^ "/"
let canonical_path s =
if Sys.file_exists s then
Cocoa.realpath s
else
invalid_arg ("File not found : " ^ s)
|
|
2a3ddbf4d2b9b1b3225f051455845bf9a5325db4ada300021ab5aae10c81e2d5 | returntocorp/ocaml-tree-sitter-core | Json_rule_adapter.mli |
Convert between tree - sitter 's representation of variants
e.g. { " type " : " SYMBOL " , " name " : " foo " } and atd 's convention
e.g. [ " SYMBOL " , " foo " ] .
This is used in Tree_sitter.atd .
Convert between tree-sitter's representation of variants
e.g. {"type": "SYMBOL", "name": "foo"} and atd's convention
e.g. ["SYMBOL", "foo"].
This is used in Tree_sitter.atd.
*)
type json = Yojson.Safe.t
val normalize : json -> json
val restore : json -> json
| null | https://raw.githubusercontent.com/returntocorp/ocaml-tree-sitter-core/28f750bb894ea4c0a7f6b911e568ab9d731cc0b5/src/gen/lib/Json_rule_adapter.mli | ocaml |
Convert between tree - sitter 's representation of variants
e.g. { " type " : " SYMBOL " , " name " : " foo " } and atd 's convention
e.g. [ " SYMBOL " , " foo " ] .
This is used in Tree_sitter.atd .
Convert between tree-sitter's representation of variants
e.g. {"type": "SYMBOL", "name": "foo"} and atd's convention
e.g. ["SYMBOL", "foo"].
This is used in Tree_sitter.atd.
*)
type json = Yojson.Safe.t
val normalize : json -> json
val restore : json -> json
|
|
46e1bf6325dbc5e9f06147d9d41661030dda395b7d5d98ea113b861c33b6102f | haskell/parsec | Text.hs | {-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
Module : Text . Parsec . String
-- Copyright : (c) Antoine Latter 2011
-- License : BSD-style (see the file libraries/parsec/LICENSE)
--
-- Maintainer :
-- Stability : provisional
-- Portability : portable
--
-- Convenience definitions for working with 'Text.Text'.
--
-----------------------------------------------------------------------------
module Text.Parsec.Text
( Parser, GenParser, parseFromFile
) where
import qualified Data.Text as Text
import qualified Data.Text.IO as T
import Text.Parsec.Prim
import Text.Parsec.Error
type Parser = Parsec Text.Text ()
type GenParser st = Parsec Text.Text st
-- | @parseFromFile p filePath@ runs a strict text parser @p@ on the
input read from @filePath@ using ' Data . Text . IO.readFile ' . Returns either a ' ParseError '
-- ('Left') or a value of type @a@ ('Right').
--
-- > main = do{ result <- parseFromFile numbers "digits.txt"
-- > ; case result of
-- > Left err -> print err
-- > Right xs -> print (sum xs)
-- > }
--
@since 3.1.14.0
parseFromFile :: Parser a -> FilePath -> IO (Either ParseError a)
parseFromFile p fname
= do input <- T.readFile fname
return (runP p () fname input)
| null | https://raw.githubusercontent.com/haskell/parsec/88cc9e006ddf944da74285471bd7120237d8c191/src/Text/Parsec/Text.hs | haskell | # LANGUAGE Safe #
---------------------------------------------------------------------------
|
Copyright : (c) Antoine Latter 2011
License : BSD-style (see the file libraries/parsec/LICENSE)
Maintainer :
Stability : provisional
Portability : portable
Convenience definitions for working with 'Text.Text'.
---------------------------------------------------------------------------
| @parseFromFile p filePath@ runs a strict text parser @p@ on the
('Left') or a value of type @a@ ('Right').
> main = do{ result <- parseFromFile numbers "digits.txt"
> ; case result of
> Left err -> print err
> Right xs -> print (sum xs)
> }
|
Module : Text . Parsec . String
module Text.Parsec.Text
( Parser, GenParser, parseFromFile
) where
import qualified Data.Text as Text
import qualified Data.Text.IO as T
import Text.Parsec.Prim
import Text.Parsec.Error
type Parser = Parsec Text.Text ()
type GenParser st = Parsec Text.Text st
input read from @filePath@ using ' Data . Text . IO.readFile ' . Returns either a ' ParseError '
@since 3.1.14.0
parseFromFile :: Parser a -> FilePath -> IO (Either ParseError a)
parseFromFile p fname
= do input <- T.readFile fname
return (runP p () fname input)
|
09b21145c8fd0bdc5483fcee290cdea0bef4a44b19d701634be98d83d9f5be68 | ppaml-op3/insomnia | Summary.hs | | Intermediate results collected in the course of the Insomnia→FΩ translation .
module Insomnia.ToF.Summary where
import Data.Monoid (Endo)
import Unbound.Generics.LocallyNameless (Embed)
import qualified FOmega.Syntax as F
import qualified FOmega.SemanticSig as F
| An unpacked form of an ' FOmega . SemanticSig . AbstractSig ' .
∃ αs : κs . fs : Σs
--
-- We make use of the fact that a 'SigSummary' is a 'Monoid'.
--
type ExposedAbstractSig v = ([(F.TyVar, Embed F.Kind)], [(v, F.SemanticSig)])
-- | The result of translating a signature, in unpacked form.
type SigSummary = ExposedAbstractSig F.Field
| A module summary is a triple ( sigSummary , fieldAssigns , )
-- the signature summary gives the type of the module. The field
-- assignments provide a list of terms that will be used to populate
the module record . The termCtx sets up the environment for the
-- field assigns (for example by unpacking existentials, etc). The
-- abstract type variables from the sig summary are assumed to scope
-- over the field assigns and will be packed when the module record is
-- constructed.
--
-- example:
-- @@@
-- (([α:⋆], "M" : ..., "x" : [:α])
-- , ["M" = m, "x" = x]
-- , unpack α,m = M in let x = {val = m.z.val} in •)
-- @@@
-- is the mod summary for something like:
-- @@@
module { module M : { type t ; z : t } = ... ; = M.z }
-- @@@
-- and will be packaged up as the term
-- @@@
-- unpack α,m = M in
-- let x = {val = m.z.val} in
-- pack α, {"M" = m, "x" = x}
-- as ∃α:⋆.{"M" : ..., "x" : [:α]}
-- @@@
--
-- (We partition the term into a field assignment and a termCtx in
-- order to avoid some gratuitous intermediate unpacking and repacking
-- of existential types. Ie, we're doing some of the commuting
-- conversions for unpack-let, unpack-unpack, and let-let at
-- construction-time.)
--
Note that this type is also a Monoid . So we can build up ModSummary values incrementally
type ModSummary' a = (SigSummary, [(F.Field, F.Term)], Endo a)
type ModSummary = ModSummary' F.Term
| null | https://raw.githubusercontent.com/ppaml-op3/insomnia/5fc6eb1d554e8853d2fc929a957c7edce9e8867d/src/Insomnia/ToF/Summary.hs | haskell |
We make use of the fact that a 'SigSummary' is a 'Monoid'.
| The result of translating a signature, in unpacked form.
the signature summary gives the type of the module. The field
assignments provide a list of terms that will be used to populate
field assigns (for example by unpacking existentials, etc). The
abstract type variables from the sig summary are assumed to scope
over the field assigns and will be packed when the module record is
constructed.
example:
@@@
(([α:⋆], "M" : ..., "x" : [:α])
, ["M" = m, "x" = x]
, unpack α,m = M in let x = {val = m.z.val} in •)
@@@
is the mod summary for something like:
@@@
@@@
and will be packaged up as the term
@@@
unpack α,m = M in
let x = {val = m.z.val} in
pack α, {"M" = m, "x" = x}
as ∃α:⋆.{"M" : ..., "x" : [:α]}
@@@
(We partition the term into a field assignment and a termCtx in
order to avoid some gratuitous intermediate unpacking and repacking
of existential types. Ie, we're doing some of the commuting
conversions for unpack-let, unpack-unpack, and let-let at
construction-time.)
| | Intermediate results collected in the course of the Insomnia→FΩ translation .
module Insomnia.ToF.Summary where
import Data.Monoid (Endo)
import Unbound.Generics.LocallyNameless (Embed)
import qualified FOmega.Syntax as F
import qualified FOmega.SemanticSig as F
| An unpacked form of an ' FOmega . SemanticSig . AbstractSig ' .
∃ αs : κs . fs : Σs
type ExposedAbstractSig v = ([(F.TyVar, Embed F.Kind)], [(v, F.SemanticSig)])
type SigSummary = ExposedAbstractSig F.Field
| A module summary is a triple ( sigSummary , fieldAssigns , )
the module record . The termCtx sets up the environment for the
module { module M : { type t ; z : t } = ... ; = M.z }
Note that this type is also a Monoid . So we can build up ModSummary values incrementally
type ModSummary' a = (SigSummary, [(F.Field, F.Term)], Endo a)
type ModSummary = ModSummary' F.Term
|
b117314949585f6924e827790fa484e671ac266a1b3b6eda4e66f18ff960a5ca | nikita-volkov/rebase | Bind.hs | module Rebase.Data.Functor.Bind
(
module Data.Functor.Bind
)
where
import Data.Functor.Bind
| null | https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/Data/Functor/Bind.hs | haskell | module Rebase.Data.Functor.Bind
(
module Data.Functor.Bind
)
where
import Data.Functor.Bind
|
|
77e7c46bc41ec6e188a86ee918ffae06536f22bec729cb7ed7bfb3d8c9f2bb95 | phantomics/seed | package.lisp | package.lisp
(defpackage #:demo-blog
(:use #:cl))
| null | https://raw.githubusercontent.com/phantomics/seed/f128969c671c078543574395d6b23a1a5f2723f8/demo-blog/package.lisp | lisp | package.lisp
(defpackage #:demo-blog
(:use #:cl))
|
|
d2398cb366358436157e7e090dbe796ebc31c942a484b2493f8c07405d7fa80a | frenchy64/fully-satisfies | everyp_test.clj | (ns io.github.frenchy64.fully-satisfies.everyp-test
(:refer-clojure :exclude [every-pred])
(:require [clojure.test :refer [is]]
[clojure.math.combinatorics :as comb]
[com.gfredericks.test.chuck.clojure-test :refer [checking]]
[io.github.frenchy64.fully-satisfies.never :refer [never?]]
[io.github.frenchy64.fully-satisfies.uncaught-testing-contexts :refer [testing deftest]]
[io.github.frenchy64.fully-satisfies.everyp :refer [everyp every-pred]]))
(defn everyp-reference [& ps]
(fn [& args] (every? #(every? % args) ps)))
TODO order of operations
TODO test zero arity
(deftest everyp-test
(doseq [i (range 7)
false-args (map set (comb/subsets (range i)))
:let [args (map (fn [i] (if (false-args i) true false))
(range i))]
false-preds (map set (comb/subsets (range i)))
:let [preds (map (fn [i] (if (false-preds i) true? false?))
(range i))]]
(is (= (apply (apply everyp preds) args)
(apply (apply every-pred preds) args)
(apply (apply everyp-reference preds) args))
[args preds]))
FIXME
(doseq [everyp [everyp every-pred everyp-reference]]
(testing everyp
(testing "found match"
(doseq [v [true 1 42 :a 'a]
vs [[v]
[false v]
[nil v false]
[false nil v]]
ps [[identity]
[never? identity]
[identity never?]
[never? identity never?]]]
(is (true? (apply (apply everyp ps) vs)))))
(testing "no match"
(doseq [ret-gen [[false]
[nil]
[false nil]]
pred-returns (map (fn [i] (repeatedly i #(rand-nth ret-gen)))
(range 1 7))
args (map (fn [i] (range i))
(range 6))]
(is (false? (apply (apply everyp (map constantly pred-returns))
args))))))))
| null | https://raw.githubusercontent.com/frenchy64/fully-satisfies/ccaa09a62f2f3454d856b4f47be40b0e628fea6b/test/io/github/frenchy64/fully_satisfies/everyp_test.clj | clojure | (ns io.github.frenchy64.fully-satisfies.everyp-test
(:refer-clojure :exclude [every-pred])
(:require [clojure.test :refer [is]]
[clojure.math.combinatorics :as comb]
[com.gfredericks.test.chuck.clojure-test :refer [checking]]
[io.github.frenchy64.fully-satisfies.never :refer [never?]]
[io.github.frenchy64.fully-satisfies.uncaught-testing-contexts :refer [testing deftest]]
[io.github.frenchy64.fully-satisfies.everyp :refer [everyp every-pred]]))
(defn everyp-reference [& ps]
(fn [& args] (every? #(every? % args) ps)))
TODO order of operations
TODO test zero arity
(deftest everyp-test
(doseq [i (range 7)
false-args (map set (comb/subsets (range i)))
:let [args (map (fn [i] (if (false-args i) true false))
(range i))]
false-preds (map set (comb/subsets (range i)))
:let [preds (map (fn [i] (if (false-preds i) true? false?))
(range i))]]
(is (= (apply (apply everyp preds) args)
(apply (apply every-pred preds) args)
(apply (apply everyp-reference preds) args))
[args preds]))
FIXME
(doseq [everyp [everyp every-pred everyp-reference]]
(testing everyp
(testing "found match"
(doseq [v [true 1 42 :a 'a]
vs [[v]
[false v]
[nil v false]
[false nil v]]
ps [[identity]
[never? identity]
[identity never?]
[never? identity never?]]]
(is (true? (apply (apply everyp ps) vs)))))
(testing "no match"
(doseq [ret-gen [[false]
[nil]
[false nil]]
pred-returns (map (fn [i] (repeatedly i #(rand-nth ret-gen)))
(range 1 7))
args (map (fn [i] (range i))
(range 6))]
(is (false? (apply (apply everyp (map constantly pred-returns))
args))))))))
|
|
cb8f67a9de87412d776936c230d14e8e81e3d07b9e10b5e6b899ed7ae2bcc0d6 | windorg/app-old | Reply.hs | module Web.Controller.Reply where
import Data.Functor (void)
import qualified Data.Set as Set
import Debug.Trace (traceShowId)
import qualified Optics
import Web.Controller.Authorization
import Web.Controller.Prelude
import Web.Helper.Common
import Web.Helper.ReplySource
import Web.View.Reply.Edit
import Web.View.Reply.New
import Prelude (read)
instance (Controller CardController, Controller InboxController) => Controller ReplyController where
action NewReplyAction{cardUpdateId, replySourceSerialized} = do
ensureIsUser -- will redirect to login when logged out
accessDeniedUnless =<< userCanReply cardUpdateId
let replySource = read (cs replySourceSerialized)
let reply =
(newRecord :: Reply)
|> set #cardUpdateId cardUpdateId
setModal NewView{..}
jumpToReplySource replySource
action EditReplyAction{replySourceSerialized, replyId} = do
accessDeniedUnless =<< userCanEdit @Reply replyId
let replySource = read (cs replySourceSerialized)
reply <- fetch replyId
setModal EditView{..}
jumpToReplySource replySource
action UpdateReplyAction{replySourceSerialized, replyId} = do
accessDeniedUnless =<< userCanEdit @Reply replyId
let replySource = read (cs replySourceSerialized)
reply <- fetch replyId
reply
|> buildReply
|> ifValid \case
Left reply -> do
setModal EditView{..}
jumpToReplySource replySource
Right reply -> do
reply <- reply |> updateRecord
redirectToReplySource replySource
action CreateReplyAction{cardUpdateId, replySourceSerialized} = do
accessDeniedUnless =<< userCanReply cardUpdateId
cardOwner <- getOwnerById @CardUpdate cardUpdateId
let replySource = read (cs replySourceSerialized)
let reply =
(newRecord :: Reply)
|> set #cardUpdateId cardUpdateId
|> set #authorId (Just currentUserId)
reply
|> buildReply
|> traceShowId
|> ifValid \case
Left reply -> do
setModal NewView{..}
jumpToReplySource replySource
Right reply -> do
reply <- reply |> createRecord
cardUpdate <- fetch cardUpdateId
-- When replying to a thread: subscribe the user to that thread
cardUpdate <-
if (currentUserId /= cardOwner)
then
cardUpdate
|> Optics.over #settings_ (#subscribers Optics.%~ Set.insert currentUserId)
|> updateRecord
else pure cardUpdate
-- Send out notifications
let subscribers' = Set.insert cardOwner (cardUpdate ^. #settings_ % #subscribers)
forM_ subscribers' \subscriber -> do
-- ON DELETE CASCADE won't work on subscribers, so we just delete the subscriber when we know
-- the user doesn't exist anymore
subscriberExists <- query @User |> filterWhere (#id, subscriber) |> fetchExists
if subscriberExists
then when (subscriber /= currentUserId) do
let subscriptionUpdate =
(newRecord :: SubscriptionUpdate)
|> set #subscriberId subscriber
|> set #updateKind SukReply
-- TODO: not sure why we fill both cardUpdateId and replyId
|> set #cardUpdateId (Just cardUpdateId)
|> set #replyId (Just (get #id reply))
subscriptionUpdate <- subscriptionUpdate |> createRecord
pure ()
else
cardUpdate
|> Optics.over #settings_ (#subscribers Optics.%~ Set.delete subscriber)
|> updateRecord
|> void
redirectToReplySource replySource
action DeleteReplyAction{replySourceSerialized, replyId} = do
accessDeniedUnless =<< userCanDelete @Reply replyId
let replySource = read (cs replySourceSerialized)
reply <- fetch replyId
deleteRecord reply
redirectToReplySource replySource
action UpdateMarkReplyAsReadAction{replySourceSerialized, replyId} = do
-- TODO: am I sure this should be userCanView? Probably not.
accessDeniedUnless =<< userCanView @Reply replyId
mbUpdate <-
query @SubscriptionUpdate
|> filterWhere (#subscriberId, currentUserId)
|> filterWhere (#replyId, Just replyId)
|> filterWhere (#updateKind, SukReply)
|> fetchOneOrNothing
forM_ mbUpdate \update ->
update |> set #isRead True |> updateRecord
let replySource = read (cs replySourceSerialized)
redirectToReplySource replySource
buildReply reply =
reply
|> fill @'["content"]
|> Optics.over #settings_ \settings ->
(settings :: ReplySettings)
{ visibility = if paramOrDefault False "private" then VisibilityPrivate else VisibilityPublic
}
| null | https://raw.githubusercontent.com/windorg/app-old/ed9c5322c8ab8a0275bdcd479be12a3f230da8c9/Web/Controller/Reply.hs | haskell | will redirect to login when logged out
When replying to a thread: subscribe the user to that thread
Send out notifications
ON DELETE CASCADE won't work on subscribers, so we just delete the subscriber when we know
the user doesn't exist anymore
TODO: not sure why we fill both cardUpdateId and replyId
TODO: am I sure this should be userCanView? Probably not. | module Web.Controller.Reply where
import Data.Functor (void)
import qualified Data.Set as Set
import Debug.Trace (traceShowId)
import qualified Optics
import Web.Controller.Authorization
import Web.Controller.Prelude
import Web.Helper.Common
import Web.Helper.ReplySource
import Web.View.Reply.Edit
import Web.View.Reply.New
import Prelude (read)
instance (Controller CardController, Controller InboxController) => Controller ReplyController where
action NewReplyAction{cardUpdateId, replySourceSerialized} = do
accessDeniedUnless =<< userCanReply cardUpdateId
let replySource = read (cs replySourceSerialized)
let reply =
(newRecord :: Reply)
|> set #cardUpdateId cardUpdateId
setModal NewView{..}
jumpToReplySource replySource
action EditReplyAction{replySourceSerialized, replyId} = do
accessDeniedUnless =<< userCanEdit @Reply replyId
let replySource = read (cs replySourceSerialized)
reply <- fetch replyId
setModal EditView{..}
jumpToReplySource replySource
action UpdateReplyAction{replySourceSerialized, replyId} = do
accessDeniedUnless =<< userCanEdit @Reply replyId
let replySource = read (cs replySourceSerialized)
reply <- fetch replyId
reply
|> buildReply
|> ifValid \case
Left reply -> do
setModal EditView{..}
jumpToReplySource replySource
Right reply -> do
reply <- reply |> updateRecord
redirectToReplySource replySource
action CreateReplyAction{cardUpdateId, replySourceSerialized} = do
accessDeniedUnless =<< userCanReply cardUpdateId
cardOwner <- getOwnerById @CardUpdate cardUpdateId
let replySource = read (cs replySourceSerialized)
let reply =
(newRecord :: Reply)
|> set #cardUpdateId cardUpdateId
|> set #authorId (Just currentUserId)
reply
|> buildReply
|> traceShowId
|> ifValid \case
Left reply -> do
setModal NewView{..}
jumpToReplySource replySource
Right reply -> do
reply <- reply |> createRecord
cardUpdate <- fetch cardUpdateId
cardUpdate <-
if (currentUserId /= cardOwner)
then
cardUpdate
|> Optics.over #settings_ (#subscribers Optics.%~ Set.insert currentUserId)
|> updateRecord
else pure cardUpdate
let subscribers' = Set.insert cardOwner (cardUpdate ^. #settings_ % #subscribers)
forM_ subscribers' \subscriber -> do
subscriberExists <- query @User |> filterWhere (#id, subscriber) |> fetchExists
if subscriberExists
then when (subscriber /= currentUserId) do
let subscriptionUpdate =
(newRecord :: SubscriptionUpdate)
|> set #subscriberId subscriber
|> set #updateKind SukReply
|> set #cardUpdateId (Just cardUpdateId)
|> set #replyId (Just (get #id reply))
subscriptionUpdate <- subscriptionUpdate |> createRecord
pure ()
else
cardUpdate
|> Optics.over #settings_ (#subscribers Optics.%~ Set.delete subscriber)
|> updateRecord
|> void
redirectToReplySource replySource
action DeleteReplyAction{replySourceSerialized, replyId} = do
accessDeniedUnless =<< userCanDelete @Reply replyId
let replySource = read (cs replySourceSerialized)
reply <- fetch replyId
deleteRecord reply
redirectToReplySource replySource
action UpdateMarkReplyAsReadAction{replySourceSerialized, replyId} = do
accessDeniedUnless =<< userCanView @Reply replyId
mbUpdate <-
query @SubscriptionUpdate
|> filterWhere (#subscriberId, currentUserId)
|> filterWhere (#replyId, Just replyId)
|> filterWhere (#updateKind, SukReply)
|> fetchOneOrNothing
forM_ mbUpdate \update ->
update |> set #isRead True |> updateRecord
let replySource = read (cs replySourceSerialized)
redirectToReplySource replySource
buildReply reply =
reply
|> fill @'["content"]
|> Optics.over #settings_ \settings ->
(settings :: ReplySettings)
{ visibility = if paramOrDefault False "private" then VisibilityPrivate else VisibilityPublic
}
|
564f09171b226d796299a52f5667f2636780004966372ee5146d741b6efa2cf9 | ralsei/bingus | info.rkt | #lang info
(define collection "bingus-quickscript")
(define pkg-desc "A DrRacket Quickscript to run Bingus")
(define version "0.0001")
(define deps '("bingus-lib"
"base"
"quickscript"))
| null | https://raw.githubusercontent.com/ralsei/bingus/c08cf67534088a19987c6c35bf7006d9a3a39171/bingus-quickscript/info.rkt | racket | #lang info
(define collection "bingus-quickscript")
(define pkg-desc "A DrRacket Quickscript to run Bingus")
(define version "0.0001")
(define deps '("bingus-lib"
"base"
"quickscript"))
|
|
cd68566fe0674b49fe7da4a8a175761d94a536882ca0a58bfed6ed4e9eeb09cf | acieroid/scala-am | countA-1.scm | (letrec ((count (lambda (n) (if (= n 0) "done" (count (- n 1))))))
(count 10))
| null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/changesBenevolPaper/countA-1.scm | scheme | (letrec ((count (lambda (n) (if (= n 0) "done" (count (- n 1))))))
(count 10))
|
|
95ec9300fab4c7bf518b39cce8b13bde339163e53ca059b6fa605ab39cfc1987 | tip-org/tools | SplitFormulas.hs | -- Split up formulas into smaller parts.
1 . forall x1 .. xn . ( t & u ) = = = > ( forall x1 .. xn . t ) & ( forall x1 .. xn ) . u
2 . forall x1 .. xn . ( t = u ) = = = > ( forall x1 .. xn . t = > u ) & ( forall x1 .. xn . u = > t )
-- if t, u have boolean type.
module Tip.Pass.SplitFormulas where
import Tip.Types
import Tip.Core
splitFormulas :: Ord a => Theory a -> Theory a
splitFormulas thy =
thy { thy_asserts = concatMap splitForm (thy_asserts thy) }
where
splitForm form =
[form{fm_body = body} | body <- split (fm_body form)]
split (Quant info Forall xs body) =
map (Quant info Forall xs) (split body)
split (Builtin And :@: ts) =
concatMap split ts
split (Builtin Equal :@: ts@(t:_))
| exprType t == BuiltinType Boolean =
[ Builtin Implies :@: [t, u] | t <- ts, u <- ts, t /= u]
split t = [t]
| null | https://raw.githubusercontent.com/tip-org/tools/34350072587bd29157d18331eb895a1b2819555f/tip-lib/src/Tip/Pass/SplitFormulas.hs | haskell | Split up formulas into smaller parts.
if t, u have boolean type. | 1 . forall x1 .. xn . ( t & u ) = = = > ( forall x1 .. xn . t ) & ( forall x1 .. xn ) . u
2 . forall x1 .. xn . ( t = u ) = = = > ( forall x1 .. xn . t = > u ) & ( forall x1 .. xn . u = > t )
module Tip.Pass.SplitFormulas where
import Tip.Types
import Tip.Core
splitFormulas :: Ord a => Theory a -> Theory a
splitFormulas thy =
thy { thy_asserts = concatMap splitForm (thy_asserts thy) }
where
splitForm form =
[form{fm_body = body} | body <- split (fm_body form)]
split (Quant info Forall xs body) =
map (Quant info Forall xs) (split body)
split (Builtin And :@: ts) =
concatMap split ts
split (Builtin Equal :@: ts@(t:_))
| exprType t == BuiltinType Boolean =
[ Builtin Implies :@: [t, u] | t <- ts, u <- ts, t /= u]
split t = [t]
|
1a451d036a81f16445c333e4b2544261c269ba21fb975ad6b70e7e3edf47fd27 | hamidreza-s/Tnesia | tnesia_common_bench_SUITE.erl | -module(tnesia_common_bench_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include("tnesia.hrl").
-record(tnesia_record, {id, value}).
-define(VALUE(ByteSize), [X || X <- lists:seq(1, ByteSize)]).
-define(TIMELINE(Int), list_to_binary("timeline-" ++ integer_to_list(Int))).
-define(DEBUG(Format, Args), ct:print(default, 50, Format, Args)).
%%====================================================================
%% CT Callbacks
%%====================================================================
%%--------------------------------------------------------------------
%% suite | groups | all
%%--------------------------------------------------------------------
suite() -> [].
groups() ->
[
{light_benchmark, [sequential], [get_ready, write_records, read_records]},
{normal_benchmark, [sequential], [get_ready, write_records, read_records]},
{heavy_benchmark, [sequential], [get_ready, write_records, read_records]}
].
all() ->
[
{group, light_benchmark},
{group, normal_benchmark},
{group, heavy_benchmark}
].
%%--------------------------------------------------------------------
%% init_per_suite | end_per_suite
%%--------------------------------------------------------------------
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
%%--------------------------------------------------------------------
%% init_per_group | end_per_group
%%--------------------------------------------------------------------
init_per_group(light_benchmark, Config) ->
TnesiaBenchConfig = {tnesia_bench_config,
[
{read_concurrency, 1},
{read_total_queries, 1000},
{read_count_limit, 50},
{read_time_length, {10, second}},
{write_concurrency, 1},
{write_total_queries, 10000},
{write_record_bytesize, 32}
]},
[TnesiaBenchConfig|Config];
init_per_group(normal_benchmark, Config) ->
TnesiaBenchConfig = {tnesia_bench_config,
[
{read_concurrency, 2},
{read_total_queries, 1000},
{read_count_limit, 50},
{read_time_length, {10, second}},
{write_concurrency, 2},
{write_total_queries, 10000},
{write_record_bytesize, 32}
]},
[TnesiaBenchConfig|Config];
init_per_group(heavy_benchmark, Config) ->
TnesiaBenchConfig = {tnesia_bench_config,
[
{read_concurrency, 4},
{read_total_queries, 1000},
{read_count_limit, 50},
{read_time_length, {10, second}},
{write_concurrency, 4},
{write_total_queries, 10000},
{write_record_bytesize, 32}
]},
[TnesiaBenchConfig|Config];
init_per_group(_GroupName, Config) ->
Config.
end_per_group(GroupName, Config) ->
print_report(GroupName, Config),
Config.
%%--------------------------------------------------------------------
init_per_testcase | end_per_testcase
%%--------------------------------------------------------------------
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(_TestCase, Config) ->
Config.
%%====================================================================
%% Test Cases
%%====================================================================
%%--------------------------------------------------------------------
%% get_ready
%%--------------------------------------------------------------------
get_ready(_Config) ->
tnesia_lib:delete_table(),
application:stop(tnesia),
application:start(tnesia),
ok.
%%--------------------------------------------------------------------
%% write_records
%%--------------------------------------------------------------------
write_records(Config) ->
TnesiaBenchConfig = ?config(tnesia_bench_config, Config),
WriteConcurrency = ?config(write_concurrency, TnesiaBenchConfig),
WriteRecordByteSize = ?config(write_record_bytesize, TnesiaBenchConfig),
WriteTotalQueries = ?config(write_total_queries, TnesiaBenchConfig),
WriteTotalQueriesPerThread = WriteTotalQueries / WriteConcurrency,
QueryInfo = [{write_record_bytesize, WriteRecordByteSize}],
Self = self(),
T1 = tnesia_lib:get_micro_timestamp(now()),
lists:foreach(
fun(ThreadNumber) ->
spawn(fun() ->
writer_loop(
Self,
ThreadNumber,
WriteTotalQueriesPerThread,
QueryInfo
)
end)
end,
lists:seq(1, WriteConcurrency)
),
ThreadsResult = wait_for_result(WriteConcurrency),
T2 = tnesia_lib:get_micro_timestamp(now()),
TimeDiff = micro_to_second(T2 - T1),
WriterResult = {write_result, ThreadsResult},
TimeResult = {time_result, [
{start, T1},
{finish, T2},
{diff, TimeDiff}
]},
Result = [WriterResult, TimeResult],
TnesiaWriteResult = {tnesia_write_result, Result},
SavedConfig = raw_saved_config(Config),
NewConfig = [TnesiaWriteResult|SavedConfig],
{save_config, NewConfig}.
%%--------------------------------------------------------------------
%% read_records
%%--------------------------------------------------------------------
read_records(Config) ->
TnesiaBenchConfig = ?config(tnesia_bench_config, Config),
ReadConcurrency = ?config(read_concurrency, TnesiaBenchConfig),
ReadTotalQueries = ?config(read_total_queries, TnesiaBenchConfig),
ReadTotalQueriesPerThread = ReadTotalQueries / ReadConcurrency,
ReadCountLimit = ?config(read_count_limit, TnesiaBenchConfig),
ReadTimeLength = ?config(read_time_length, TnesiaBenchConfig),
RawSavedConfig = raw_saved_config(Config),
TnesiaWriteResult = proplists:get_value(tnesia_write_result, RawSavedConfig),
TnesiaWriteTimes = proplists:get_value(time_result, TnesiaWriteResult),
TnesiaWriteStart = proplists:get_value(start, TnesiaWriteTimes),
TnesiaWriteFinish = proplists:get_value(finish, TnesiaWriteTimes),
log("times:~n ~ p " , [ { s , TnesiaWriteStart , f , } ] ) ,
QueryInfo = [
{time_start, TnesiaWriteStart},
{time_finish, TnesiaWriteFinish},
{read_total_queries, ReadTotalQueriesPerThread},
{read_count_limit, ReadCountLimit},
{read_time_length, ReadTimeLength}
],
Self = self(),
T1 = tnesia_lib:get_micro_timestamp(now()),
lists:foreach(
fun(ThreadNumber) ->
spawn(fun() ->
reader_loop(
Self,
ThreadNumber,
ReadTotalQueriesPerThread,
QueryInfo
)
end)
end,
lists:seq(1, ReadConcurrency)
),
ThreadsResult = wait_for_result(ReadConcurrency),
T2 = tnesia_lib:get_micro_timestamp(now()),
TimeDiff = micro_to_second(T2 - T1),
ReaderResult = {read_result, ThreadsResult},
TimeResult = {time_result, [
{start, T1},
{finish, T2},
{diff, TimeDiff}
]},
Result = [ReaderResult, TimeResult],
TnesiaReadResult = {tnesia_read_result, Result},
SavedConfig = raw_saved_config(Config),
NewConfig = [TnesiaReadResult|SavedConfig],
{save_config, NewConfig}.
%%====================================================================
Workers
%%====================================================================
%%--------------------------------------------------------------------
%% writer_loop
%%--------------------------------------------------------------------
writer_loop(CallerPID, ThreadNumber, WriteTotalQueries, QueryInfo)
when WriteTotalQueries > 0 ->
Timeline = ?TIMELINE(ThreadNumber),
RecordByteSize = proplists:get_value(write_record_bytesize, QueryInfo),
Record = #tnesia_record{
id = WriteTotalQueries,
value = ?VALUE(RecordByteSize)
},
tnesia_api:write(
Timeline,
Record
),
writer_loop(CallerPID, ThreadNumber, WriteTotalQueries - 1, QueryInfo);
writer_loop(CallerPID, ThreadNumber, _WriteTotalQueries, _QueryInfo) ->
CallerPID ! {finish, {tread, ThreadNumber}}.
%%--------------------------------------------------------------------
%% reader_loop
%%--------------------------------------------------------------------
reader_loop(CallerPID, ThreadNumber, RemainingReadQueries, QueryInfo)
when RemainingReadQueries > 0 ->
Timeline = ?TIMELINE(ThreadNumber),
TimeStart = proplists:get_value(time_start, QueryInfo),
TimeFinish = proplists:get_value(time_finish, QueryInfo),
ReadTimeLength = proplists:get_value(read_time_length, QueryInfo),
ReadTimeLengthValue = get_micro_second(ReadTimeLength),
X = TimeFinish - TimeStart,
TimeSince = random:uniform(X) + TimeStart,
TimeTill = TimeSince + ReadTimeLengthValue,
%% log("start - end: ~p - ~p~nsince - till: ~p - ~p",
[ TimeStart , TimeFinish , TimeSince , TimeTill ] ) ,
_QueryResult = tnesia_api:query_fetch(
[
{timeline, Timeline},
{since, TimeSince},
{till, TimeTill},
{order, asc},
{limit, unlimited}
]
),
log("read query result:~n ~ p " , [ QueryResult ] ) ,
reader_loop(CallerPID, ThreadNumber, RemainingReadQueries - 1, QueryInfo);
reader_loop(CallerPID, ThreadNumber, _RemainingReadQueries, _QueryInfo) ->
CallerPID ! {finish, {thread, ThreadNumber}}.
%%--------------------------------------------------------------------
wait_for_result
%%--------------------------------------------------------------------
wait_for_result(WriteConcurrency) ->
wait_for_result(WriteConcurrency, []).
wait_for_result(WriteConcurrency, State)
when WriteConcurrency > 0 ->
receive
{finish, Result} ->
wait_for_result(WriteConcurrency - 1, [Result|State]);
_ ->
wait_for_result(WriteConcurrency, State)
end;
wait_for_result(_WriteConcurrency, State) ->
State.
%%====================================================================
Utilities
%%====================================================================
%%--------------------------------------------------------------------
%% log
%%--------------------------------------------------------------------
log(Format, Arguments) ->
ct:print(default, 50, Format, Arguments).
%%--------------------------------------------------------------------
print_report
%%--------------------------------------------------------------------
print_report(GroupName, Config) ->
TnesiaBenchConfig = ?config(tnesia_bench_config, Config),
TnesiaBenchResult = raw_saved_config(Config),
TnesiaWriteResult = ?config(tnesia_write_result, TnesiaBenchResult),
TnesiaReadResult = ?config(tnesia_read_result, TnesiaBenchResult),
ct:print(
default,
50,
"Benchmark: ~p~n" ++
"--------------------------- ~nConfig:~n~p~n" ++
"--------------------------- ~nWrite Result:~n~p~n" ++
"--------------------------- ~nRead Result:~n~p~n",
[GroupName, TnesiaBenchConfig, TnesiaWriteResult, TnesiaReadResult]
).
%%--------------------------------------------------------------------
%% raw_saved_config
%%--------------------------------------------------------------------
raw_saved_config(Config) ->
case ?config(saved_config, Config) of
{_SuiteName, SavedConfig} -> SavedConfig;
_ -> []
end.
%%--------------------------------------------------------------------
%% get_micro_second
%%--------------------------------------------------------------------
get_micro_second({Int, second}) ->
Int * 1000000;
get_micro_second({Int, minute}) ->
Int * 60 * 1000000.
%%--------------------------------------------------------------------
micro_to_second
%%--------------------------------------------------------------------
micro_to_second(Micro) ->
{Micro / 1000000, second}.
| null | https://raw.githubusercontent.com/hamidreza-s/Tnesia/52a3a88a52b423fbc5959978c0bba61654166d47/bench/tnesia_common_bench_SUITE.erl | erlang | ====================================================================
CT Callbacks
====================================================================
--------------------------------------------------------------------
suite | groups | all
--------------------------------------------------------------------
--------------------------------------------------------------------
init_per_suite | end_per_suite
--------------------------------------------------------------------
--------------------------------------------------------------------
init_per_group | end_per_group
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
====================================================================
Test Cases
====================================================================
--------------------------------------------------------------------
get_ready
--------------------------------------------------------------------
--------------------------------------------------------------------
write_records
--------------------------------------------------------------------
--------------------------------------------------------------------
read_records
--------------------------------------------------------------------
====================================================================
====================================================================
--------------------------------------------------------------------
writer_loop
--------------------------------------------------------------------
--------------------------------------------------------------------
reader_loop
--------------------------------------------------------------------
log("start - end: ~p - ~p~nsince - till: ~p - ~p",
--------------------------------------------------------------------
--------------------------------------------------------------------
====================================================================
====================================================================
--------------------------------------------------------------------
log
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
raw_saved_config
--------------------------------------------------------------------
--------------------------------------------------------------------
get_micro_second
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | -module(tnesia_common_bench_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include("tnesia.hrl").
-record(tnesia_record, {id, value}).
-define(VALUE(ByteSize), [X || X <- lists:seq(1, ByteSize)]).
-define(TIMELINE(Int), list_to_binary("timeline-" ++ integer_to_list(Int))).
-define(DEBUG(Format, Args), ct:print(default, 50, Format, Args)).
suite() -> [].
groups() ->
[
{light_benchmark, [sequential], [get_ready, write_records, read_records]},
{normal_benchmark, [sequential], [get_ready, write_records, read_records]},
{heavy_benchmark, [sequential], [get_ready, write_records, read_records]}
].
all() ->
[
{group, light_benchmark},
{group, normal_benchmark},
{group, heavy_benchmark}
].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(light_benchmark, Config) ->
TnesiaBenchConfig = {tnesia_bench_config,
[
{read_concurrency, 1},
{read_total_queries, 1000},
{read_count_limit, 50},
{read_time_length, {10, second}},
{write_concurrency, 1},
{write_total_queries, 10000},
{write_record_bytesize, 32}
]},
[TnesiaBenchConfig|Config];
init_per_group(normal_benchmark, Config) ->
TnesiaBenchConfig = {tnesia_bench_config,
[
{read_concurrency, 2},
{read_total_queries, 1000},
{read_count_limit, 50},
{read_time_length, {10, second}},
{write_concurrency, 2},
{write_total_queries, 10000},
{write_record_bytesize, 32}
]},
[TnesiaBenchConfig|Config];
init_per_group(heavy_benchmark, Config) ->
TnesiaBenchConfig = {tnesia_bench_config,
[
{read_concurrency, 4},
{read_total_queries, 1000},
{read_count_limit, 50},
{read_time_length, {10, second}},
{write_concurrency, 4},
{write_total_queries, 10000},
{write_record_bytesize, 32}
]},
[TnesiaBenchConfig|Config];
init_per_group(_GroupName, Config) ->
Config.
end_per_group(GroupName, Config) ->
print_report(GroupName, Config),
Config.
init_per_testcase | end_per_testcase
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(_TestCase, Config) ->
Config.
get_ready(_Config) ->
tnesia_lib:delete_table(),
application:stop(tnesia),
application:start(tnesia),
ok.
write_records(Config) ->
TnesiaBenchConfig = ?config(tnesia_bench_config, Config),
WriteConcurrency = ?config(write_concurrency, TnesiaBenchConfig),
WriteRecordByteSize = ?config(write_record_bytesize, TnesiaBenchConfig),
WriteTotalQueries = ?config(write_total_queries, TnesiaBenchConfig),
WriteTotalQueriesPerThread = WriteTotalQueries / WriteConcurrency,
QueryInfo = [{write_record_bytesize, WriteRecordByteSize}],
Self = self(),
T1 = tnesia_lib:get_micro_timestamp(now()),
lists:foreach(
fun(ThreadNumber) ->
spawn(fun() ->
writer_loop(
Self,
ThreadNumber,
WriteTotalQueriesPerThread,
QueryInfo
)
end)
end,
lists:seq(1, WriteConcurrency)
),
ThreadsResult = wait_for_result(WriteConcurrency),
T2 = tnesia_lib:get_micro_timestamp(now()),
TimeDiff = micro_to_second(T2 - T1),
WriterResult = {write_result, ThreadsResult},
TimeResult = {time_result, [
{start, T1},
{finish, T2},
{diff, TimeDiff}
]},
Result = [WriterResult, TimeResult],
TnesiaWriteResult = {tnesia_write_result, Result},
SavedConfig = raw_saved_config(Config),
NewConfig = [TnesiaWriteResult|SavedConfig],
{save_config, NewConfig}.
read_records(Config) ->
TnesiaBenchConfig = ?config(tnesia_bench_config, Config),
ReadConcurrency = ?config(read_concurrency, TnesiaBenchConfig),
ReadTotalQueries = ?config(read_total_queries, TnesiaBenchConfig),
ReadTotalQueriesPerThread = ReadTotalQueries / ReadConcurrency,
ReadCountLimit = ?config(read_count_limit, TnesiaBenchConfig),
ReadTimeLength = ?config(read_time_length, TnesiaBenchConfig),
RawSavedConfig = raw_saved_config(Config),
TnesiaWriteResult = proplists:get_value(tnesia_write_result, RawSavedConfig),
TnesiaWriteTimes = proplists:get_value(time_result, TnesiaWriteResult),
TnesiaWriteStart = proplists:get_value(start, TnesiaWriteTimes),
TnesiaWriteFinish = proplists:get_value(finish, TnesiaWriteTimes),
log("times:~n ~ p " , [ { s , TnesiaWriteStart , f , } ] ) ,
QueryInfo = [
{time_start, TnesiaWriteStart},
{time_finish, TnesiaWriteFinish},
{read_total_queries, ReadTotalQueriesPerThread},
{read_count_limit, ReadCountLimit},
{read_time_length, ReadTimeLength}
],
Self = self(),
T1 = tnesia_lib:get_micro_timestamp(now()),
lists:foreach(
fun(ThreadNumber) ->
spawn(fun() ->
reader_loop(
Self,
ThreadNumber,
ReadTotalQueriesPerThread,
QueryInfo
)
end)
end,
lists:seq(1, ReadConcurrency)
),
ThreadsResult = wait_for_result(ReadConcurrency),
T2 = tnesia_lib:get_micro_timestamp(now()),
TimeDiff = micro_to_second(T2 - T1),
ReaderResult = {read_result, ThreadsResult},
TimeResult = {time_result, [
{start, T1},
{finish, T2},
{diff, TimeDiff}
]},
Result = [ReaderResult, TimeResult],
TnesiaReadResult = {tnesia_read_result, Result},
SavedConfig = raw_saved_config(Config),
NewConfig = [TnesiaReadResult|SavedConfig],
{save_config, NewConfig}.
Workers
writer_loop(CallerPID, ThreadNumber, WriteTotalQueries, QueryInfo)
when WriteTotalQueries > 0 ->
Timeline = ?TIMELINE(ThreadNumber),
RecordByteSize = proplists:get_value(write_record_bytesize, QueryInfo),
Record = #tnesia_record{
id = WriteTotalQueries,
value = ?VALUE(RecordByteSize)
},
tnesia_api:write(
Timeline,
Record
),
writer_loop(CallerPID, ThreadNumber, WriteTotalQueries - 1, QueryInfo);
writer_loop(CallerPID, ThreadNumber, _WriteTotalQueries, _QueryInfo) ->
CallerPID ! {finish, {tread, ThreadNumber}}.
reader_loop(CallerPID, ThreadNumber, RemainingReadQueries, QueryInfo)
when RemainingReadQueries > 0 ->
Timeline = ?TIMELINE(ThreadNumber),
TimeStart = proplists:get_value(time_start, QueryInfo),
TimeFinish = proplists:get_value(time_finish, QueryInfo),
ReadTimeLength = proplists:get_value(read_time_length, QueryInfo),
ReadTimeLengthValue = get_micro_second(ReadTimeLength),
X = TimeFinish - TimeStart,
TimeSince = random:uniform(X) + TimeStart,
TimeTill = TimeSince + ReadTimeLengthValue,
[ TimeStart , TimeFinish , TimeSince , TimeTill ] ) ,
_QueryResult = tnesia_api:query_fetch(
[
{timeline, Timeline},
{since, TimeSince},
{till, TimeTill},
{order, asc},
{limit, unlimited}
]
),
log("read query result:~n ~ p " , [ QueryResult ] ) ,
reader_loop(CallerPID, ThreadNumber, RemainingReadQueries - 1, QueryInfo);
reader_loop(CallerPID, ThreadNumber, _RemainingReadQueries, _QueryInfo) ->
CallerPID ! {finish, {thread, ThreadNumber}}.
wait_for_result
wait_for_result(WriteConcurrency) ->
wait_for_result(WriteConcurrency, []).
wait_for_result(WriteConcurrency, State)
when WriteConcurrency > 0 ->
receive
{finish, Result} ->
wait_for_result(WriteConcurrency - 1, [Result|State]);
_ ->
wait_for_result(WriteConcurrency, State)
end;
wait_for_result(_WriteConcurrency, State) ->
State.
Utilities
log(Format, Arguments) ->
ct:print(default, 50, Format, Arguments).
print_report
print_report(GroupName, Config) ->
TnesiaBenchConfig = ?config(tnesia_bench_config, Config),
TnesiaBenchResult = raw_saved_config(Config),
TnesiaWriteResult = ?config(tnesia_write_result, TnesiaBenchResult),
TnesiaReadResult = ?config(tnesia_read_result, TnesiaBenchResult),
ct:print(
default,
50,
"Benchmark: ~p~n" ++
"--------------------------- ~nConfig:~n~p~n" ++
"--------------------------- ~nWrite Result:~n~p~n" ++
"--------------------------- ~nRead Result:~n~p~n",
[GroupName, TnesiaBenchConfig, TnesiaWriteResult, TnesiaReadResult]
).
raw_saved_config(Config) ->
case ?config(saved_config, Config) of
{_SuiteName, SavedConfig} -> SavedConfig;
_ -> []
end.
get_micro_second({Int, second}) ->
Int * 1000000;
get_micro_second({Int, minute}) ->
Int * 60 * 1000000.
micro_to_second
micro_to_second(Micro) ->
{Micro / 1000000, second}.
|
3879b6e6e2740a8af013221c63eaefe46109a533cb9609ebb14d0208b302ccf2 | gulige/neuroevo | gstk_editor.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% ------------------------------------------------------------
%% Basic Editor Type
%% ------------------------------------------------------------
-module(gstk_editor).
-compile([{nowarn_deprecated_function,{gs,assq,2}},
{nowarn_deprecated_function,{gs,error,2}},
{nowarn_deprecated_function,{gs,val,2}}]).
%%------------------------------------------------------------------------------
%% CANVAS OPTIONS
%%
%% Attributes:
activebg Color
anchor n , w , s , e , nw , , ne , sw , center
%% bc Color
%% bg Color
%% bw Wth
%% data Data
%% fg Color
%% font Font
%% height Int
highlightbg Color
%% highlightbw Wth
%% highlightfg Color
%% hscroll Bool | top | bottom
insertbg Color
%% insertbw Wth
insertpos { Row , Col}|'end ' ( Row : 1 .. , Col : 0 .. )
%% justify left|right|center
padx Int ( Pixels )
pady Int ( Pixels )
%% relief Relief
%% scrollbg Color
%% scrollfg Color
%% selectbg Color
%% selectbw Width
%% selectfg Color
%% vscroll Bool | left | right
%% width Int
%% wrap none | char | word
%% x Int
%% y Int
%%
%%
%% Commands:
%% clear
del { FromIdx , ToIdx }
enable
%% file String
get { FromIdx , ToIdx } = > Text
insert { Index , = [ insert,{Row , lineend},end,{Row , Col } ]
%% setfocus Bool
%%
%% Events:
%% buttonpress [Bool | {Bool, Data}]
%% buttonrelease [Bool | {Bool, Data}]
%% destroy [Bool | {Bool, Data}]
%% enter [Bool | {Bool, Data}]
%% focus [Bool | {Bool, Data}]
%% keypress [Bool | {Bool, Data}]
keyrelease [ Bool | { Bool , Data } ]
%% leave [Bool | {Bool, Data}]
%% motion [Bool | {Bool, Data}]
%%
Read Options :
%% children
%% id
%% parent
%% type
%%
.t tag names 2.7 - > red blue ( blue is the colour )
.t tag add blue 2.1 2.10 tag the text
%.t tag configure blue -foregr blue create tag
% .t index end -> MaxRows.cols
% .t yview moveto (Row-1)/MaxRows
-export([create/3, config/3, read/3, delete/2,event/5,option/5,read_option/5]).
-include("gstk.hrl").
%%-----------------------------------------------------------------------------
%% MANDATORY INTERFACE FUNCTIONS
%%-----------------------------------------------------------------------------
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Function : create/3
%% Purpose : Create a widget of the type defined in this module.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
create(DB, Gstkid, Opts) ->
MainW = gstk_generic:mk_tkw_child(DB,Gstkid),
Editor = lists:append(MainW,".z"),
{Vscroll, Hscroll, NewOpts} = gstk_generic:parse_scrolls(Opts),
WidgetD = #so{main=MainW, object=Editor,
hscroll=Hscroll, vscroll=Vscroll,misc=[{1,white}]},
NGstkid=Gstkid#gstkid{widget=MainW, widget_data=WidgetD},
gstk_db:insert_widget(DB,NGstkid),
MandatoryCmd = ["so_create text ", MainW],
case gstk:call(MandatoryCmd) of
{result, _} ->
SimplePreCmd = [MainW, " conf"],
PlacePreCmd = [";place ", MainW],
case gstk_generic:make_command(NewOpts, NGstkid, MainW, SimplePreCmd,
PlacePreCmd, DB,Editor) of
{error,Reason} -> {error,Reason};
Cmd ->
gstk:exec(Cmd),
gstk:exec(
[Editor," conf -bo 2 -relief sunken -highlightth 2;",
MainW,".sy conf -rel sunken -bo 2;",
MainW,".pad.sx conf -rel sunken -bo 2;",
Editor, " tag co c1 -for white;"]),
ok
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Function : config/3
%% Purpose : Configure a widget of the type defined in this module.
: DB - The Database
- The gstkid of the widget
%% Opts - A list of options for configuring the widget
%%
%% Return : [true | {bad_result, Reason}]
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
config(DB, Gstkid, Options) ->
SO = Gstkid#gstkid.widget_data,
MainW = Gstkid#gstkid.widget,
Editor = SO#so.object,
NewOpts =
case {gs:assq(vscroll,Options),gs:assq(hscroll,Options)} of
{false,false} -> Options;
_ -> gstk_generic:parse_scrolls(Gstkid, Options)
end,
SimplePreCmd = [MainW, " conf"],
PlacePreCmd = [";place ", MainW],
gstk_generic:mk_cmd_and_exec(NewOpts, Gstkid, MainW, SimplePreCmd,
PlacePreCmd, DB, Editor).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Function : read/3
Purpose : Read one option from a widget
: DB - The Database
- The gstkid of the widget
%% Opt - An option to read
%%
Return : [ OptionValue | { bad_result , Reason } ]
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
read(DB, Gstkid, Opt) ->
SO = Gstkid#gstkid.widget_data,
gstk_generic:read_option(DB, Gstkid, Opt,SO#so.object).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Function : delete/2
%% Purpose : Delete widget from databas and return tkwidget to destroy
: DB - The Database
- The gstkid of the widget
%%
%% Return : TkWidget to destroy
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
delete(DB, Gstkid) ->
gstk_db:delete_widget(DB, Gstkid),
Gstkid#gstkid.widget.
event(DB, Gstkid, Etype, Edata, Args) ->
gstk_generic:event(DB, Gstkid, Etype, Edata, Args).
%%-----------------------------------------------------------------------------
%% MANDATORY FUNCTIONS
%%-----------------------------------------------------------------------------
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Function : option/5
%% Purpose : Take care of options
: Option - An option tuple
- The gstkid of the widget
MainW - The main tk - widget
%% Editor - The Editor tk-widget
%% DB - The Database
%%
Return : A tuple { OptionType , OptionCmd }
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
option(Option, Gstkid, _MainW, DB, Editor) ->
case Option of
{font,Font} when is_tuple(Font) ->
gstk_db:insert_opt(DB,Gstkid,Option),
{c, [Editor, " conf -font ", gstk_font:choose_ascii(DB,Font)]};
{font_style, {{Start,End},Font}} -> % should be only style
{Tag,Ngstkid} = get_style_tag(DB,Editor,Font,Gstkid),
gstk_db:update_widget(DB,Ngstkid),
{c, Ngstkid, [Editor, " tag ad ", Tag, " ", p_index(Start), " ",
p_index(End)]};
{fg, {{Start,End},Color}} ->
{Tag,Ngstkid} = get_color_tag(Editor,Color,Gstkid),
gstk_db:update_widget(DB,Ngstkid),
{c, Ngstkid, [Editor, " tag ad ", Tag, " ", p_index(Start), " ",
p_index(End)]};
{padx, Pad} -> {c, [Editor," conf -padx ",gstk:to_ascii(Pad)]};
{pady, Pad} -> {c, [Editor," conf -pady ",gstk:to_ascii(Pad)]};
{selection, {From, To}} ->
{c, [Editor," tag ad sel ",p_index(From)," ", p_index(To)]};
{vscrollpos, Row} ->
{MaxRow,_Col} = ret_ed_index([Editor," ind end"]),
{c, [Editor, " yv mo ",gstk:to_ascii(Row/MaxRow)]};
{wrap, How} ->
{c, [Editor, " conf -wrap ", gstk:to_ascii(How)]};
{fg, Color} ->
{c, [Editor, " conf -fg ", gstk:to_color(Color)]};
{insertbw, Wth} ->
{c, [Editor, " conf -insertbo ", gstk:to_ascii(Wth)]};
{insertbg, Color} ->
{c, [Editor, " conf -insertba ", gstk:to_color(Color)]};
{insertpos, Index} ->
{c, [Editor, " m s insert ", p_index(Index)]};
{insert, {Index, Text}} ->
{c, [Editor, " ins ", p_index(Index), " ", gstk:to_ascii(Text)]};
{del, {From, To}} ->
{c, [Editor, " del ", p_index(From), " ", p_index(To)]};
{overwrite, {Index, Text}} ->
AI = p_index(Index),
Len = gstk:to_ascii(lists:flatlength(Text)),
{c, [Editor, " del ",AI," \"",AI,"+",Len,"c\";",
Editor, " ins ",AI," ", gstk:to_ascii(Text)]};
clear -> {c, [Editor, " delete 1.0 end"]};
{load, File} ->
F2 = re:replace(File, [92,92], "/", [global,{return,list}]),
case gstk:call(["ed_load ", Editor, " ", gstk:to_ascii(F2)]) of
{result, _} -> none;
{bad_result,Re} ->
{error,{no_such_file,editor,load,F2,Re}}
end;
{save, File} ->
F2 = re:replace(File, [92,92], "/", [global,{return,list}]),
case gstk:call(["ed_save ",Editor," ",gstk:to_ascii(F2)]) of
{result, _} -> none;
{bad_result,Re} ->
{error,{no_such_file,editor,save,F2,Re}}
end;
{enable, true} -> {c, [Editor, " conf -state normal"]};
{enable, false} -> {c, [Editor, " conf -state disabled"]};
{setfocus, true} -> {c, ["focus ", Editor]};
{setfocus, false} -> {c, ["focus ."]};
_ -> invalid_option
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Function : read_option/5
%% Purpose : Take care of a read option
%% Return : The value of the option or invalid_option
[ OptionValue | { bad_result , Reason } ]
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
read_option(Option,GstkId,_MainW,DB,Editor) ->
case Option of
font -> gstk_db:opt(DB,GstkId,font,undefined);
padx -> tcl2erl:ret_atom([Editor," cg -padx"]);
pady -> tcl2erl:ret_atom([Editor," cg -pady"]);
enable -> tcl2erl:ret_enable([Editor," cg -st"]);
fg -> tcl2erl:ret_color([Editor," cg -fg"]);
{fg, Pos} ->
L=tcl2erl:ret_list([Editor," tag nam ", p_index(Pos)]),
SO = GstkId#gstkid.widget_data,
case last_tag_val(undefined, $c, L, SO#so.misc) of
undefined -> tcl2erl:ret_color([Editor," cg -fg"]);
Color -> Color
end;
{font_style, Pos} ->
L=tcl2erl:ret_list([Editor," tag nam ", p_index(Pos)]),
SO = GstkId#gstkid.widget_data,
case last_tag_val(undefined, $f, L, SO#so.misc) of
undefined -> 'my style? nyi';
Style -> Style
end;
selection -> ret_ed_indexes([Editor," tag ne sel 1.0"]);
char_height -> tcl2erl:ret_int([Editor, " cg -he"]);
char_width -> tcl2erl:ret_int([Editor, " cg -wi"]);
insertbg -> tcl2erl:ret_color([Editor," cg -insertba"]);
insertbw -> tcl2erl:ret_int([Editor," cg -insertbo"]);
insertpos -> ret_ed_index([Editor, " ind insert"]);
setfocus -> tcl2erl:ret_focus(Editor, "focus");
wrap -> tcl2erl:ret_atom([Editor," cg -wrap"]);
size -> {MaxRow,_Col} = ret_ed_index([Editor," ind end"]),
MaxRow-1;
vscrollpos ->
{MaxRow,_Col} = ret_ed_index([Editor," ind end"]),
[Top,_Bot] = tcl2erl:ret_list([Editor," yvi"]),
round(Top*(MaxRow-1))+1;
{get, {From, To}} ->
tcl2erl:ret_str([Editor, " get ", p_index(From), " ", p_index(To)]);
_ -> {bad_result, {GstkId#gstkid.objtype, invalid_option, Option}}
end.
%%------------------------------------------------------------------------------
%% PRIMITIVES
%%------------------------------------------------------------------------------
p_index({Line, lineend}) -> [$",gstk:to_ascii(Line), ".1 lineend",$"];
p_index({Line, Char}) -> [gstk:to_ascii(Line), $., gstk:to_ascii(Char)];
p_index(insert) -> "insert";
p_index('end') -> "end";
p_index(Idx) -> gs:error("bad index in editor: ~w~n",[Idx]),0.
ret_ed_index(Cmd) ->
case gstk:call(Cmd) of
{result, Val} ->
case io_lib:fread("~d.~d", Val) of
{ok, [Row,Col], []} -> {Row, Col};
Other -> {bad_result, Other}
end;
Bad_result -> Bad_result
end.
ret_ed_indexes(Cmd) ->
case gstk:call(Cmd) of
{result, ""} -> undefined;
{result, Val} ->
case io_lib:fread("~d.~d ~d.~d", Val) of
{ok, [Row1,Col1,Row2,Col2], []} -> {{Row1, Col1}, {Row2,Col2}};
Other -> {bad_result, Other}
end;
Bad_result -> Bad_result
end.
%%----------------------------------------------------------------------
Returns : { Tag text ( ) , NewGstkId }
%%----------------------------------------------------------------------
%% The misc field of the so record is a list of {ColorNo, Color|Font|...}
get_color_tag(Editor,Color,Gstkid) ->
SO = Gstkid#gstkid.widget_data,
Tags = SO#so.misc,
case lists:keysearch(Color, 2, Tags) of
{ value , { No , _ } } - > { [ " c",gstk : to_ascii(No ) ] , } ;
% false -> % don't reuse tags, priority order spoils that
_Any ->
{No,_} = lists:max(Tags),
N=No+1,
SO2 = SO#so{misc=[{N,Color}|Tags]},
TagStr=["c",gstk:to_ascii(N)],
gstk:exec([Editor," tag co ",TagStr," -for ", gstk:to_color(Color)]),
{TagStr,Gstkid#gstkid{widget_data=SO2}}
end.
get_style_tag(DB,Editor,Style,Gstkid) ->
SO = Gstkid#gstkid.widget_data,
Tags = SO#so.misc,
case lists:keysearch(Style, 2, Tags) of
{ value , { No , _ } } - > { [ " f",gstk : to_ascii(No ) ] , } ;
% false -> % don't reuse tags, priority order spoils that
_Any ->
{No,_} = lists:max(Tags),
N=No+1,
SO2 = SO#so{misc=[{N,Style}|Tags]},
TagStr=["f",gstk:to_ascii(N)],
gstk:exec([Editor," tag co ",TagStr," -font ",
gstk_font:choose_ascii(DB,Style)]), % should be style only
{TagStr,Gstkid#gstkid{widget_data=SO2}}
end.
%%----------------------------------------------------------------------
%% Purpose: Given a list of tags for a char, return its visible color
%% (that is that last color tag in the list).
%%----------------------------------------------------------------------
last_tag_val(TagVal, _Chr, [], _TagDict) -> TagVal;
last_tag_val(TagVal, Chr, [Tag|Ts],TagDict) ->
case atom_to_list(Tag) of
[Chr|ANo] ->
No = list_to_integer(ANo),
last_tag_val(gs:val(No, TagDict),Chr,Ts,TagDict);
_NoAcolor ->
last_tag_val(TagVal,Chr, Ts,TagDict)
end.
%%% ----- Done -----
| null | https://raw.githubusercontent.com/gulige/neuroevo/09e67928c2417f2b27ec6522acc82f8b3c844949/apps/gs/src/gstk_editor.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
------------------------------------------------------------
Basic Editor Type
------------------------------------------------------------
------------------------------------------------------------------------------
CANVAS OPTIONS
Attributes:
bc Color
bg Color
bw Wth
data Data
fg Color
font Font
height Int
highlightbw Wth
highlightfg Color
hscroll Bool | top | bottom
insertbw Wth
justify left|right|center
relief Relief
scrollbg Color
scrollfg Color
selectbg Color
selectbw Width
selectfg Color
vscroll Bool | left | right
width Int
wrap none | char | word
x Int
y Int
Commands:
clear
file String
setfocus Bool
Events:
buttonpress [Bool | {Bool, Data}]
buttonrelease [Bool | {Bool, Data}]
destroy [Bool | {Bool, Data}]
enter [Bool | {Bool, Data}]
focus [Bool | {Bool, Data}]
keypress [Bool | {Bool, Data}]
leave [Bool | {Bool, Data}]
motion [Bool | {Bool, Data}]
children
id
parent
type
.t tag configure blue -foregr blue create tag
.t index end -> MaxRows.cols
.t yview moveto (Row-1)/MaxRows
-----------------------------------------------------------------------------
MANDATORY INTERFACE FUNCTIONS
-----------------------------------------------------------------------------
Purpose : Create a widget of the type defined in this module.
Purpose : Configure a widget of the type defined in this module.
Opts - A list of options for configuring the widget
Return : [true | {bad_result, Reason}]
Opt - An option to read
Purpose : Delete widget from databas and return tkwidget to destroy
Return : TkWidget to destroy
-----------------------------------------------------------------------------
MANDATORY FUNCTIONS
-----------------------------------------------------------------------------
Purpose : Take care of options
Editor - The Editor tk-widget
DB - The Database
should be only style
Purpose : Take care of a read option
Return : The value of the option or invalid_option
------------------------------------------------------------------------------
PRIMITIVES
------------------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
The misc field of the so record is a list of {ColorNo, Color|Font|...}
false -> % don't reuse tags, priority order spoils that
false -> % don't reuse tags, priority order spoils that
should be style only
----------------------------------------------------------------------
Purpose: Given a list of tags for a char, return its visible color
(that is that last color tag in the list).
----------------------------------------------------------------------
----- Done ----- | Copyright Ericsson AB 1996 - 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(gstk_editor).
-compile([{nowarn_deprecated_function,{gs,assq,2}},
{nowarn_deprecated_function,{gs,error,2}},
{nowarn_deprecated_function,{gs,val,2}}]).
activebg Color
anchor n , w , s , e , nw , , ne , sw , center
highlightbg Color
insertbg Color
insertpos { Row , Col}|'end ' ( Row : 1 .. , Col : 0 .. )
padx Int ( Pixels )
pady Int ( Pixels )
del { FromIdx , ToIdx }
enable
get { FromIdx , ToIdx } = > Text
insert { Index , = [ insert,{Row , lineend},end,{Row , Col } ]
keyrelease [ Bool | { Bool , Data } ]
Read Options :
.t tag names 2.7 - > red blue ( blue is the colour )
.t tag add blue 2.1 2.10 tag the text
-export([create/3, config/3, read/3, delete/2,event/5,option/5,read_option/5]).
-include("gstk.hrl").
Function : create/3
create(DB, Gstkid, Opts) ->
MainW = gstk_generic:mk_tkw_child(DB,Gstkid),
Editor = lists:append(MainW,".z"),
{Vscroll, Hscroll, NewOpts} = gstk_generic:parse_scrolls(Opts),
WidgetD = #so{main=MainW, object=Editor,
hscroll=Hscroll, vscroll=Vscroll,misc=[{1,white}]},
NGstkid=Gstkid#gstkid{widget=MainW, widget_data=WidgetD},
gstk_db:insert_widget(DB,NGstkid),
MandatoryCmd = ["so_create text ", MainW],
case gstk:call(MandatoryCmd) of
{result, _} ->
SimplePreCmd = [MainW, " conf"],
PlacePreCmd = [";place ", MainW],
case gstk_generic:make_command(NewOpts, NGstkid, MainW, SimplePreCmd,
PlacePreCmd, DB,Editor) of
{error,Reason} -> {error,Reason};
Cmd ->
gstk:exec(Cmd),
gstk:exec(
[Editor," conf -bo 2 -relief sunken -highlightth 2;",
MainW,".sy conf -rel sunken -bo 2;",
MainW,".pad.sx conf -rel sunken -bo 2;",
Editor, " tag co c1 -for white;"]),
ok
end
end.
Function : config/3
: DB - The Database
- The gstkid of the widget
config(DB, Gstkid, Options) ->
SO = Gstkid#gstkid.widget_data,
MainW = Gstkid#gstkid.widget,
Editor = SO#so.object,
NewOpts =
case {gs:assq(vscroll,Options),gs:assq(hscroll,Options)} of
{false,false} -> Options;
_ -> gstk_generic:parse_scrolls(Gstkid, Options)
end,
SimplePreCmd = [MainW, " conf"],
PlacePreCmd = [";place ", MainW],
gstk_generic:mk_cmd_and_exec(NewOpts, Gstkid, MainW, SimplePreCmd,
PlacePreCmd, DB, Editor).
Function : read/3
Purpose : Read one option from a widget
: DB - The Database
- The gstkid of the widget
Return : [ OptionValue | { bad_result , Reason } ]
read(DB, Gstkid, Opt) ->
SO = Gstkid#gstkid.widget_data,
gstk_generic:read_option(DB, Gstkid, Opt,SO#so.object).
Function : delete/2
: DB - The Database
- The gstkid of the widget
delete(DB, Gstkid) ->
gstk_db:delete_widget(DB, Gstkid),
Gstkid#gstkid.widget.
event(DB, Gstkid, Etype, Edata, Args) ->
gstk_generic:event(DB, Gstkid, Etype, Edata, Args).
Function : option/5
: Option - An option tuple
- The gstkid of the widget
MainW - The main tk - widget
Return : A tuple { OptionType , OptionCmd }
option(Option, Gstkid, _MainW, DB, Editor) ->
case Option of
{font,Font} when is_tuple(Font) ->
gstk_db:insert_opt(DB,Gstkid,Option),
{c, [Editor, " conf -font ", gstk_font:choose_ascii(DB,Font)]};
{Tag,Ngstkid} = get_style_tag(DB,Editor,Font,Gstkid),
gstk_db:update_widget(DB,Ngstkid),
{c, Ngstkid, [Editor, " tag ad ", Tag, " ", p_index(Start), " ",
p_index(End)]};
{fg, {{Start,End},Color}} ->
{Tag,Ngstkid} = get_color_tag(Editor,Color,Gstkid),
gstk_db:update_widget(DB,Ngstkid),
{c, Ngstkid, [Editor, " tag ad ", Tag, " ", p_index(Start), " ",
p_index(End)]};
{padx, Pad} -> {c, [Editor," conf -padx ",gstk:to_ascii(Pad)]};
{pady, Pad} -> {c, [Editor," conf -pady ",gstk:to_ascii(Pad)]};
{selection, {From, To}} ->
{c, [Editor," tag ad sel ",p_index(From)," ", p_index(To)]};
{vscrollpos, Row} ->
{MaxRow,_Col} = ret_ed_index([Editor," ind end"]),
{c, [Editor, " yv mo ",gstk:to_ascii(Row/MaxRow)]};
{wrap, How} ->
{c, [Editor, " conf -wrap ", gstk:to_ascii(How)]};
{fg, Color} ->
{c, [Editor, " conf -fg ", gstk:to_color(Color)]};
{insertbw, Wth} ->
{c, [Editor, " conf -insertbo ", gstk:to_ascii(Wth)]};
{insertbg, Color} ->
{c, [Editor, " conf -insertba ", gstk:to_color(Color)]};
{insertpos, Index} ->
{c, [Editor, " m s insert ", p_index(Index)]};
{insert, {Index, Text}} ->
{c, [Editor, " ins ", p_index(Index), " ", gstk:to_ascii(Text)]};
{del, {From, To}} ->
{c, [Editor, " del ", p_index(From), " ", p_index(To)]};
{overwrite, {Index, Text}} ->
AI = p_index(Index),
Len = gstk:to_ascii(lists:flatlength(Text)),
{c, [Editor, " del ",AI," \"",AI,"+",Len,"c\";",
Editor, " ins ",AI," ", gstk:to_ascii(Text)]};
clear -> {c, [Editor, " delete 1.0 end"]};
{load, File} ->
F2 = re:replace(File, [92,92], "/", [global,{return,list}]),
case gstk:call(["ed_load ", Editor, " ", gstk:to_ascii(F2)]) of
{result, _} -> none;
{bad_result,Re} ->
{error,{no_such_file,editor,load,F2,Re}}
end;
{save, File} ->
F2 = re:replace(File, [92,92], "/", [global,{return,list}]),
case gstk:call(["ed_save ",Editor," ",gstk:to_ascii(F2)]) of
{result, _} -> none;
{bad_result,Re} ->
{error,{no_such_file,editor,save,F2,Re}}
end;
{enable, true} -> {c, [Editor, " conf -state normal"]};
{enable, false} -> {c, [Editor, " conf -state disabled"]};
{setfocus, true} -> {c, ["focus ", Editor]};
{setfocus, false} -> {c, ["focus ."]};
_ -> invalid_option
end.
Function : read_option/5
[ OptionValue | { bad_result , Reason } ]
read_option(Option,GstkId,_MainW,DB,Editor) ->
case Option of
font -> gstk_db:opt(DB,GstkId,font,undefined);
padx -> tcl2erl:ret_atom([Editor," cg -padx"]);
pady -> tcl2erl:ret_atom([Editor," cg -pady"]);
enable -> tcl2erl:ret_enable([Editor," cg -st"]);
fg -> tcl2erl:ret_color([Editor," cg -fg"]);
{fg, Pos} ->
L=tcl2erl:ret_list([Editor," tag nam ", p_index(Pos)]),
SO = GstkId#gstkid.widget_data,
case last_tag_val(undefined, $c, L, SO#so.misc) of
undefined -> tcl2erl:ret_color([Editor," cg -fg"]);
Color -> Color
end;
{font_style, Pos} ->
L=tcl2erl:ret_list([Editor," tag nam ", p_index(Pos)]),
SO = GstkId#gstkid.widget_data,
case last_tag_val(undefined, $f, L, SO#so.misc) of
undefined -> 'my style? nyi';
Style -> Style
end;
selection -> ret_ed_indexes([Editor," tag ne sel 1.0"]);
char_height -> tcl2erl:ret_int([Editor, " cg -he"]);
char_width -> tcl2erl:ret_int([Editor, " cg -wi"]);
insertbg -> tcl2erl:ret_color([Editor," cg -insertba"]);
insertbw -> tcl2erl:ret_int([Editor," cg -insertbo"]);
insertpos -> ret_ed_index([Editor, " ind insert"]);
setfocus -> tcl2erl:ret_focus(Editor, "focus");
wrap -> tcl2erl:ret_atom([Editor," cg -wrap"]);
size -> {MaxRow,_Col} = ret_ed_index([Editor," ind end"]),
MaxRow-1;
vscrollpos ->
{MaxRow,_Col} = ret_ed_index([Editor," ind end"]),
[Top,_Bot] = tcl2erl:ret_list([Editor," yvi"]),
round(Top*(MaxRow-1))+1;
{get, {From, To}} ->
tcl2erl:ret_str([Editor, " get ", p_index(From), " ", p_index(To)]);
_ -> {bad_result, {GstkId#gstkid.objtype, invalid_option, Option}}
end.
p_index({Line, lineend}) -> [$",gstk:to_ascii(Line), ".1 lineend",$"];
p_index({Line, Char}) -> [gstk:to_ascii(Line), $., gstk:to_ascii(Char)];
p_index(insert) -> "insert";
p_index('end') -> "end";
p_index(Idx) -> gs:error("bad index in editor: ~w~n",[Idx]),0.
ret_ed_index(Cmd) ->
case gstk:call(Cmd) of
{result, Val} ->
case io_lib:fread("~d.~d", Val) of
{ok, [Row,Col], []} -> {Row, Col};
Other -> {bad_result, Other}
end;
Bad_result -> Bad_result
end.
ret_ed_indexes(Cmd) ->
case gstk:call(Cmd) of
{result, ""} -> undefined;
{result, Val} ->
case io_lib:fread("~d.~d ~d.~d", Val) of
{ok, [Row1,Col1,Row2,Col2], []} -> {{Row1, Col1}, {Row2,Col2}};
Other -> {bad_result, Other}
end;
Bad_result -> Bad_result
end.
Returns : { Tag text ( ) , NewGstkId }
get_color_tag(Editor,Color,Gstkid) ->
SO = Gstkid#gstkid.widget_data,
Tags = SO#so.misc,
case lists:keysearch(Color, 2, Tags) of
{ value , { No , _ } } - > { [ " c",gstk : to_ascii(No ) ] , } ;
_Any ->
{No,_} = lists:max(Tags),
N=No+1,
SO2 = SO#so{misc=[{N,Color}|Tags]},
TagStr=["c",gstk:to_ascii(N)],
gstk:exec([Editor," tag co ",TagStr," -for ", gstk:to_color(Color)]),
{TagStr,Gstkid#gstkid{widget_data=SO2}}
end.
get_style_tag(DB,Editor,Style,Gstkid) ->
SO = Gstkid#gstkid.widget_data,
Tags = SO#so.misc,
case lists:keysearch(Style, 2, Tags) of
{ value , { No , _ } } - > { [ " f",gstk : to_ascii(No ) ] , } ;
_Any ->
{No,_} = lists:max(Tags),
N=No+1,
SO2 = SO#so{misc=[{N,Style}|Tags]},
TagStr=["f",gstk:to_ascii(N)],
gstk:exec([Editor," tag co ",TagStr," -font ",
{TagStr,Gstkid#gstkid{widget_data=SO2}}
end.
last_tag_val(TagVal, _Chr, [], _TagDict) -> TagVal;
last_tag_val(TagVal, Chr, [Tag|Ts],TagDict) ->
case atom_to_list(Tag) of
[Chr|ANo] ->
No = list_to_integer(ANo),
last_tag_val(gs:val(No, TagDict),Chr,Ts,TagDict);
_NoAcolor ->
last_tag_val(TagVal,Chr, Ts,TagDict)
end.
|
1d8e73e199721e84806792f2c5bf1e81b5d5f1ace9d73f1eb9d224e729df554b | pallet/pallet | converge_test.clj | (ns pallet.task.converge-test
(:require
[clojure.test :refer :all]
[pallet.api :refer [group-spec lift]]
[pallet.task.converge :refer :all]
[pallet.test-utils :refer :all]))
(def a (group-spec "a"))
(def b (group-spec "b"))
(with-private-vars [pallet.task.converge [build-args]]
(deftest build-args-test
(is (= [{a 1} :phase []]
(build-args ["pallet.task.converge-test/a" "1"])))
(is (= [{a 1 b 2} :phase []]
(build-args ["pallet.task.converge-test/a" "1"
"pallet.task.converge-test/b" "2"])))
(is (= [{a 1} :phase [:b]]
(build-args ["pallet.task.converge-test/a" "1" ":b"])))
(is (= [{a 1} :prefix "a" :phase [:b]]
(build-args ["a" "pallet.task.converge-test/a" "1" ":b"])))))
| null | https://raw.githubusercontent.com/pallet/pallet/30226008d243c1072dcfa1f27150173d6d71c36d/test/pallet/task/converge_test.clj | clojure | (ns pallet.task.converge-test
(:require
[clojure.test :refer :all]
[pallet.api :refer [group-spec lift]]
[pallet.task.converge :refer :all]
[pallet.test-utils :refer :all]))
(def a (group-spec "a"))
(def b (group-spec "b"))
(with-private-vars [pallet.task.converge [build-args]]
(deftest build-args-test
(is (= [{a 1} :phase []]
(build-args ["pallet.task.converge-test/a" "1"])))
(is (= [{a 1 b 2} :phase []]
(build-args ["pallet.task.converge-test/a" "1"
"pallet.task.converge-test/b" "2"])))
(is (= [{a 1} :phase [:b]]
(build-args ["pallet.task.converge-test/a" "1" ":b"])))
(is (= [{a 1} :prefix "a" :phase [:b]]
(build-args ["a" "pallet.task.converge-test/a" "1" ":b"])))))
|
|
b8465fdd8b02d9a81dab2a13286e7f7905c50ec566a4dda65c6f3d06087fd13b | sicmutils/sicmutils | deps.cljs | ;; These dependencies are required for the cljs build of the library. They are
;; also included as cljsjs dependencies in the build... I THINK the cljsjs
;; versions only matter for the externs they provide, but my confusion is on
;; full display here so please file an issue if you run into trouble.
{:npm-deps
{"complex.js" "^2.0.11"
"fraction.js" "^4.0.12"
"odex" "^2.0.4"}}
| null | https://raw.githubusercontent.com/sicmutils/sicmutils/77c64da79dd86fe490d9171585258a6a923655d5/src/deps.cljs | clojure | These dependencies are required for the cljs build of the library. They are
also included as cljsjs dependencies in the build... I THINK the cljsjs
versions only matter for the externs they provide, but my confusion is on
full display here so please file an issue if you run into trouble. | {:npm-deps
{"complex.js" "^2.0.11"
"fraction.js" "^4.0.12"
"odex" "^2.0.4"}}
|
2599da48f8ccec2f56d21ec9d9198f69e59265653edee85ec137f8c4eb2707c9 | uw-unsat/serval | xchg.rkt | #lang rosette
(require
"common.rkt")
(provide
xchg-r/m32-r32
xchg-r/m64-r64)
(define (interpret-xchg cpu dst src)
(define mm (cpu-memmgr cpu))
(when mm
(core:memmgr-atomic-begin mm))
(define temp (cpu-gpr-ref cpu dst))
(cpu-gpr-set! cpu dst (cpu-gpr-ref cpu src))
(cpu-gpr-set! cpu src temp)
(when mm
(core:memmgr-atomic-end mm)))
87 /r
(define-insn xchg-r/m32-r32 (dst src)
#:decode [((byte #x87) (/r reg r/m))
(list (gpr32-no-rex r/m) (gpr32-no-rex reg))]
[((rex/r r b) (byte #x87) (/r reg r/m))
(list (gpr32 b r/m) (gpr32 r reg))]
#:encode (list (rex/r src dst) (byte #x87) (/r src dst))
interpret-xchg)
(define-insn xchg-m32-r32 (dst src)
#:decode [((byte #x87) (modr/m (== (bv #b00 2)) reg r/m))
(list (register-indirect (gpr64-no-rex r/m) #f 32) (gpr32-no-rex reg))]
#:encode (let ([ed (register-encode dst)]
[es (register-encode src)])
(list (rex/r (car es) (first ed)) (byte #x87) (modr/m (second ed) (cdr es) (third ed)) (fourth ed)))
interpret-xchg)
; REX.W + 87 /r
(define-insn xchg-r/m64-r64 (dst src)
#:decode [((rex.w/r r b) (byte #x87) (/r reg r/m))
(list (gpr64 b r/m) (gpr64 r reg))]
#:encode (list (rex.w/r src dst) (byte #x87) (/r src dst))
interpret-xchg)
| null | https://raw.githubusercontent.com/uw-unsat/serval/be11ecccf03f81b8bd0557acf8385a6a5d4f51ed/serval/x86/interp/xchg.rkt | racket | REX.W + 87 /r | #lang rosette
(require
"common.rkt")
(provide
xchg-r/m32-r32
xchg-r/m64-r64)
(define (interpret-xchg cpu dst src)
(define mm (cpu-memmgr cpu))
(when mm
(core:memmgr-atomic-begin mm))
(define temp (cpu-gpr-ref cpu dst))
(cpu-gpr-set! cpu dst (cpu-gpr-ref cpu src))
(cpu-gpr-set! cpu src temp)
(when mm
(core:memmgr-atomic-end mm)))
87 /r
(define-insn xchg-r/m32-r32 (dst src)
#:decode [((byte #x87) (/r reg r/m))
(list (gpr32-no-rex r/m) (gpr32-no-rex reg))]
[((rex/r r b) (byte #x87) (/r reg r/m))
(list (gpr32 b r/m) (gpr32 r reg))]
#:encode (list (rex/r src dst) (byte #x87) (/r src dst))
interpret-xchg)
(define-insn xchg-m32-r32 (dst src)
#:decode [((byte #x87) (modr/m (== (bv #b00 2)) reg r/m))
(list (register-indirect (gpr64-no-rex r/m) #f 32) (gpr32-no-rex reg))]
#:encode (let ([ed (register-encode dst)]
[es (register-encode src)])
(list (rex/r (car es) (first ed)) (byte #x87) (modr/m (second ed) (cdr es) (third ed)) (fourth ed)))
interpret-xchg)
(define-insn xchg-r/m64-r64 (dst src)
#:decode [((rex.w/r r b) (byte #x87) (/r reg r/m))
(list (gpr64 b r/m) (gpr64 r reg))]
#:encode (list (rex.w/r src dst) (byte #x87) (/r src dst))
interpret-xchg)
|
2fdb53ca8f98b1271adb53d40b60f512e994a7ab79877ef6c6378a57b021b94c | rowangithub/DOrder | cav2014.ml | let rec loop i (j:int) k p n =
if i < n then
if (i >= p ) then loop (i+1) k k p n
else loop (i+1) j k p n
else j
let main p n =
let i = 0 in
let j = 0 in
let k = 3 in
let res = loop i j k p n in
if (p > 0 && n > p) then
assert (res = k)
else ()
let _ = main (-1) (-3) | null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/tests/icfp/cav2014.ml | ocaml | let rec loop i (j:int) k p n =
if i < n then
if (i >= p ) then loop (i+1) k k p n
else loop (i+1) j k p n
else j
let main p n =
let i = 0 in
let j = 0 in
let k = 3 in
let res = loop i j k p n in
if (p > 0 && n > p) then
assert (res = k)
else ()
let _ = main (-1) (-3) |
|
13c3da4134f5dd66baa6498a92bfc7721f08f8dd08ef26f8439333f9dbe77a9b | facebookincubator/hsthrift | AsyncTest.hs | Copyright ( c ) Facebook , Inc. and its affiliates .
# LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - type - defaults #
module AsyncTest (main) where
import Data.Monoid
import Test.HUnit
import TestRunner
import Util.Async
windowUnorderedReduceTest :: Test
windowUnorderedReduceTest = TestLabel "windowUnorderedReduce"
$ TestCase $ do
r0 <- windowUnorderedReduce 1 identity [1..10]
assertEqual "test0" 55 $ sum r0
r1 <- windowUnorderedReduce 3 identity [1..10]
assertEqual "test1" 55 $ sum r1
r2 <- windowUnorderedReduce 16 identity [1..10]
assertEqual "test2" 55 $ sum r2
where
identity :: Int -> IO (Sum Int)
identity a = return $ Sum a
main :: IO ()
main = testRunner $ TestList
[ windowUnorderedReduceTest
]
| null | https://raw.githubusercontent.com/facebookincubator/hsthrift/d3ff75d487e9d0c2904d18327373b603456e7a01/common/util/tests/AsyncTest.hs | haskell | # LANGUAGE OverloadedStrings # | Copyright ( c ) Facebook , Inc. and its affiliates .
# LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - type - defaults #
module AsyncTest (main) where
import Data.Monoid
import Test.HUnit
import TestRunner
import Util.Async
windowUnorderedReduceTest :: Test
windowUnorderedReduceTest = TestLabel "windowUnorderedReduce"
$ TestCase $ do
r0 <- windowUnorderedReduce 1 identity [1..10]
assertEqual "test0" 55 $ sum r0
r1 <- windowUnorderedReduce 3 identity [1..10]
assertEqual "test1" 55 $ sum r1
r2 <- windowUnorderedReduce 16 identity [1..10]
assertEqual "test2" 55 $ sum r2
where
identity :: Int -> IO (Sum Int)
identity a = return $ Sum a
main :: IO ()
main = testRunner $ TestList
[ windowUnorderedReduceTest
]
|
3b9b79d0ec7afbbc08c6864ecdfbcd37784a933e87ae111c576215121da2ff5b | martinslota/protocell | text_format.mli | open Base
type t
type sort
val show_sort : sort -> string
type id = string
type serialization_error = Field_value.validation_error
val show_serialization_error : serialization_error -> string
type parse_error =
[ `Unexpected_character of char
| `Invalid_number_string of string
| `Identifier_expected
| `Nested_message_unfinished
| Byte_input.error ]
val show_parse_error : parse_error -> string
type decoding_error =
[ `Wrong_text_value_for_string_field of sort * string Field_value.typ
| `Wrong_text_value_for_int_field of sort * int Field_value.typ
| `Wrong_text_value_for_int32_field of sort * int32 Field_value.typ
| `Wrong_text_value_for_int64_field of sort * int64 Field_value.typ
| `Wrong_text_value_for_float_field of sort * float Field_value.typ
| `Wrong_text_value_for_bool_field of sort * bool Field_value.typ
| `Wrong_text_value_for_message_field of sort
| `Wrong_text_value_for_enum_field of sort
| `Unrecognized_enum_value of string
| `Multiple_oneof_fields_set of id list
| `Integer_outside_int_type_range of int64 ]
val show_decoding_error : decoding_error -> string
type deserialization_error =
[ parse_error
| decoding_error
| Field_value.validation_error ]
val show_deserialization_error : deserialization_error -> string
type parsed_message
val serialize_field
: id ->
'v Field_value.typ ->
'v ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_optional_field
: id ->
'v Field_value.typ ->
'v option ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_repeated_field
: id ->
'v Field_value.typ ->
'v list ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_message_field
: id ->
('v -> (string, ([> serialization_error] as 'e)) Result.t) ->
'v option ->
Byte_output.t ->
(unit, 'e) Result.t
val serialize_oneof_message_field
: id ->
('v -> (string, ([> serialization_error] as 'e)) Result.t) ->
'v ->
Byte_output.t ->
(unit, 'e) Result.t
val serialize_repeated_message_field
: id ->
('v -> (string, ([> serialization_error] as 'e)) Result.t) ->
'v list ->
Byte_output.t ->
(unit, 'e) Result.t
val serialize_enum_field
: id ->
('v -> string) ->
'v ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_repeated_enum_field
: id ->
('v -> string) ->
'v list ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val deserialize_message : Byte_input.t -> (parsed_message, [> parse_error]) Result.t
val decode_field
: id ->
'v Field_value.typ ->
parsed_message ->
('v, [> decoding_error | Field_value.validation_error]) Result.t
val decode_optional_field
: id ->
'v Field_value.typ ->
parsed_message ->
('v option, [> decoding_error | Field_value.validation_error]) Result.t
val decode_repeated_field
: id ->
'v Field_value.typ ->
parsed_message ->
('v list, [> decoding_error | Field_value.validation_error]) Result.t
val decode_message_field
: id ->
(string -> ('v, ([> deserialization_error] as 'e)) Result.t) ->
parsed_message ->
('v option, 'e) Result.t
val decode_oneof_message_field
: id ->
(string -> ('v, ([> deserialization_error] as 'e)) Result.t) ->
parsed_message ->
('v, 'e) Result.t
val decode_repeated_message_field
: id ->
(string -> ('v, ([> deserialization_error] as 'e)) Result.t) ->
parsed_message ->
('v list, 'e) Result.t
val decode_enum_field
: id ->
(string -> 'v option) ->
(unit -> 'v) ->
parsed_message ->
('v, [> deserialization_error]) Result.t
val decode_repeated_enum_field
: id ->
(string -> 'v option) ->
(unit -> 'v) ->
parsed_message ->
('v list, [> deserialization_error]) Result.t
val decode_oneof_field
: (id, parsed_message -> ('v, ([> deserialization_error] as 'e)) Result.t) List.Assoc.t ->
parsed_message ->
('v option, 'e) Result.t
| null | https://raw.githubusercontent.com/martinslota/protocell/62545c7fb63ff76c95449ba015e40e0c3e0d94a5/src/runtime/text_format.mli | ocaml | open Base
type t
type sort
val show_sort : sort -> string
type id = string
type serialization_error = Field_value.validation_error
val show_serialization_error : serialization_error -> string
type parse_error =
[ `Unexpected_character of char
| `Invalid_number_string of string
| `Identifier_expected
| `Nested_message_unfinished
| Byte_input.error ]
val show_parse_error : parse_error -> string
type decoding_error =
[ `Wrong_text_value_for_string_field of sort * string Field_value.typ
| `Wrong_text_value_for_int_field of sort * int Field_value.typ
| `Wrong_text_value_for_int32_field of sort * int32 Field_value.typ
| `Wrong_text_value_for_int64_field of sort * int64 Field_value.typ
| `Wrong_text_value_for_float_field of sort * float Field_value.typ
| `Wrong_text_value_for_bool_field of sort * bool Field_value.typ
| `Wrong_text_value_for_message_field of sort
| `Wrong_text_value_for_enum_field of sort
| `Unrecognized_enum_value of string
| `Multiple_oneof_fields_set of id list
| `Integer_outside_int_type_range of int64 ]
val show_decoding_error : decoding_error -> string
type deserialization_error =
[ parse_error
| decoding_error
| Field_value.validation_error ]
val show_deserialization_error : deserialization_error -> string
type parsed_message
val serialize_field
: id ->
'v Field_value.typ ->
'v ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_optional_field
: id ->
'v Field_value.typ ->
'v option ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_repeated_field
: id ->
'v Field_value.typ ->
'v list ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_message_field
: id ->
('v -> (string, ([> serialization_error] as 'e)) Result.t) ->
'v option ->
Byte_output.t ->
(unit, 'e) Result.t
val serialize_oneof_message_field
: id ->
('v -> (string, ([> serialization_error] as 'e)) Result.t) ->
'v ->
Byte_output.t ->
(unit, 'e) Result.t
val serialize_repeated_message_field
: id ->
('v -> (string, ([> serialization_error] as 'e)) Result.t) ->
'v list ->
Byte_output.t ->
(unit, 'e) Result.t
val serialize_enum_field
: id ->
('v -> string) ->
'v ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val serialize_repeated_enum_field
: id ->
('v -> string) ->
'v list ->
Byte_output.t ->
(unit, [> serialization_error]) Result.t
val deserialize_message : Byte_input.t -> (parsed_message, [> parse_error]) Result.t
val decode_field
: id ->
'v Field_value.typ ->
parsed_message ->
('v, [> decoding_error | Field_value.validation_error]) Result.t
val decode_optional_field
: id ->
'v Field_value.typ ->
parsed_message ->
('v option, [> decoding_error | Field_value.validation_error]) Result.t
val decode_repeated_field
: id ->
'v Field_value.typ ->
parsed_message ->
('v list, [> decoding_error | Field_value.validation_error]) Result.t
val decode_message_field
: id ->
(string -> ('v, ([> deserialization_error] as 'e)) Result.t) ->
parsed_message ->
('v option, 'e) Result.t
val decode_oneof_message_field
: id ->
(string -> ('v, ([> deserialization_error] as 'e)) Result.t) ->
parsed_message ->
('v, 'e) Result.t
val decode_repeated_message_field
: id ->
(string -> ('v, ([> deserialization_error] as 'e)) Result.t) ->
parsed_message ->
('v list, 'e) Result.t
val decode_enum_field
: id ->
(string -> 'v option) ->
(unit -> 'v) ->
parsed_message ->
('v, [> deserialization_error]) Result.t
val decode_repeated_enum_field
: id ->
(string -> 'v option) ->
(unit -> 'v) ->
parsed_message ->
('v list, [> deserialization_error]) Result.t
val decode_oneof_field
: (id, parsed_message -> ('v, ([> deserialization_error] as 'e)) Result.t) List.Assoc.t ->
parsed_message ->
('v option, 'e) Result.t
|
|
49a20fd5e9e95cb851c3026c31e58a013c3576cc09609200ef3303de1d13659a | thattommyhall/offline-4clojure | p91.clj | ;; Graph Connectivity - Hard
Given a graph , determine whether the graph is connected . A connected graph is such that a path exists between any two given nodes.<br/><br/>-Your function must return true if the graph is connected and false otherwise.<br/><br/>-You will be given a set of tuples representing the edges of a graph . Each member of a tuple being a vertex / node in the graph.<br/><br/>-Each edge is undirected ( can be traversed either direction ) .
;;
;; tags - graph-theory
;; restricted -
(ns offline-4clojure.p91
(:use clojure.test))
(def __
;; your solution here
)
(defn -main []
(are [soln] soln
(= true (__ #{[:a :a]}))
(= true (__ #{[:a :b]}))
(= false (__ #{[1 2] [2 3] [3 1]
[4 5] [5 6] [6 4]}))
(= true (__ #{[1 2] [2 3] [3 1]
[4 5] [5 6] [6 4] [3 4]}))
(= false (__ #{[:a :b] [:b :c] [:c :d]
[:x :y] [:d :a] [:b :e]}))
(= true (__ #{[:a :b] [:b :c] [:c :d]
[:x :y] [:d :a] [:b :e] [:x :a]}))
))
| null | https://raw.githubusercontent.com/thattommyhall/offline-4clojure/73e32fc6687816aea3c514767cef3916176589ab/src/offline_4clojure/p91.clj | clojure | Graph Connectivity - Hard
tags - graph-theory
restricted -
your solution here | Given a graph , determine whether the graph is connected . A connected graph is such that a path exists between any two given nodes.<br/><br/>-Your function must return true if the graph is connected and false otherwise.<br/><br/>-You will be given a set of tuples representing the edges of a graph . Each member of a tuple being a vertex / node in the graph.<br/><br/>-Each edge is undirected ( can be traversed either direction ) .
(ns offline-4clojure.p91
(:use clojure.test))
(def __
)
(defn -main []
(are [soln] soln
(= true (__ #{[:a :a]}))
(= true (__ #{[:a :b]}))
(= false (__ #{[1 2] [2 3] [3 1]
[4 5] [5 6] [6 4]}))
(= true (__ #{[1 2] [2 3] [3 1]
[4 5] [5 6] [6 4] [3 4]}))
(= false (__ #{[:a :b] [:b :c] [:c :d]
[:x :y] [:d :a] [:b :e]}))
(= true (__ #{[:a :b] [:b :c] [:c :d]
[:x :y] [:d :a] [:b :e] [:x :a]}))
))
|
86fe04aefe52ed3e1bfe6c57c68ac480435c5f897e8a95ad567c7ba80f623253 | damn/engine | input.clj | (ns engine.input
(:require [engine.graphics :refer (mouse-coords)])
(:import [com.badlogic.gdx Gdx Input Input$Buttons Input$Keys]))
- position / coords ( move to graphics ? )
(mouse-coords))
(defn- to-mouse-key [k]
(case k
:left Input$Buttons/LEFT
:right Input$Buttons/RIGHT))
(defn- is-mouse-button-down? [k] (.isButtonPressed (Gdx/input) (to-mouse-key k)))
(defn- is-mouse-pressed? [k] (.isButtonJustPressed (Gdx/input) (to-mouse-key k)))
(def is-leftbutton-down? (partial is-mouse-button-down? :left))
(def is-rightbutton-down? (partial is-mouse-button-down? :right))
(defn- fix-number-key
"Keys :0, :1, ... :9 are understood as NUM_0, NUM_1, ..."
[k]
(try
(let [is-num (Integer/parseInt (name k))]
(str "NUM_" (name k)))
(catch NumberFormatException e
(name k))))
(def ^:private to-keyboard-key
(memoize (fn [k]
(eval (symbol (str "com.badlogic.gdx.Input$Keys/" (fix-number-key k)))))))
(defn is-key-pressed?
TODO check if this docstring is still true .
"Since last call to this. So do not call this twice in one frame else it will return false."
[k]
(.isKeyJustPressed (Gdx/input) (to-keyboard-key k)))
(defn is-key-down? [k]
(.isKeyPressed (Gdx/input) (to-keyboard-key k)))
; when using is-...-pressed? it is probably useful also to check if is-...-consumed?
; for example a bug occured:
; waypoints menu opens with try-consume-..-pressed while is-...-pressed? closed it again in the same frame
; TODO maybe is-...-pressed? always checks if not consumed yet (so it is really 'consumed')
(def mousebutton {:pressed false
:consumed false})
(def ^:private state (atom {:left mousebutton
:right mousebutton}))
(defn- is-pressed? [button] (-> @state button :pressed))
(defn is-leftm-pressed? [] (is-pressed? :left))
(defn is-rightm-pressed? [] (is-pressed? :right))
(defn- is-consumed? [button] (-> @state button :consumed))
(defn is-leftm-consumed? [] (is-consumed? :left))
(defn is-rightm-consumed? [] (is-consumed? :right))
(defn- check-if-pressed [state button]
(assoc-in state [button :pressed] (is-mouse-pressed? button)))
(defn- resolve-consumed [state button]
(if (and (-> state button :consumed)
(not (is-mouse-button-down? button)))
(assoc-in state [button :consumed] false)
state))
(defn update-mousebutton-state []
(swap! state #(-> %
(check-if-pressed :left)
(resolve-consumed :left)
(check-if-pressed :right)
(resolve-consumed :right))))
(defn- try-consume-pressed [button]
(when (and (is-pressed? button)
(not (is-consumed? button)))
(swap! state assoc-in [button :consumed] true)))
; TODO instead of 'consumed' concept rather something like 'mouse-being-held' ?!
(defn try-consume-leftm-pressed
"If leftmouse was pressed this frame and not yet consumed, consumes it and returns true else returns nil.
It is consumed as long as the leftmouse-button is down."
[]
(try-consume-pressed :left))
(defn try-consume-rightm-pressed []
"If rightmouse was pressed this frame and not yet consumed, consumes it and returns true else returns nil.
It is consumed as long as the leftmouse-button is down."
(try-consume-pressed :right))
| null | https://raw.githubusercontent.com/damn/engine/3fff91e6f6610272b5d3a6f0ada6d89adb218397/src/engine/input.clj | clojure | when using is-...-pressed? it is probably useful also to check if is-...-consumed?
for example a bug occured:
waypoints menu opens with try-consume-..-pressed while is-...-pressed? closed it again in the same frame
TODO maybe is-...-pressed? always checks if not consumed yet (so it is really 'consumed')
TODO instead of 'consumed' concept rather something like 'mouse-being-held' ?! | (ns engine.input
(:require [engine.graphics :refer (mouse-coords)])
(:import [com.badlogic.gdx Gdx Input Input$Buttons Input$Keys]))
- position / coords ( move to graphics ? )
(mouse-coords))
(defn- to-mouse-key [k]
(case k
:left Input$Buttons/LEFT
:right Input$Buttons/RIGHT))
(defn- is-mouse-button-down? [k] (.isButtonPressed (Gdx/input) (to-mouse-key k)))
(defn- is-mouse-pressed? [k] (.isButtonJustPressed (Gdx/input) (to-mouse-key k)))
(def is-leftbutton-down? (partial is-mouse-button-down? :left))
(def is-rightbutton-down? (partial is-mouse-button-down? :right))
(defn- fix-number-key
"Keys :0, :1, ... :9 are understood as NUM_0, NUM_1, ..."
[k]
(try
(let [is-num (Integer/parseInt (name k))]
(str "NUM_" (name k)))
(catch NumberFormatException e
(name k))))
(def ^:private to-keyboard-key
(memoize (fn [k]
(eval (symbol (str "com.badlogic.gdx.Input$Keys/" (fix-number-key k)))))))
(defn is-key-pressed?
TODO check if this docstring is still true .
"Since last call to this. So do not call this twice in one frame else it will return false."
[k]
(.isKeyJustPressed (Gdx/input) (to-keyboard-key k)))
(defn is-key-down? [k]
(.isKeyPressed (Gdx/input) (to-keyboard-key k)))
(def mousebutton {:pressed false
:consumed false})
(def ^:private state (atom {:left mousebutton
:right mousebutton}))
(defn- is-pressed? [button] (-> @state button :pressed))
(defn is-leftm-pressed? [] (is-pressed? :left))
(defn is-rightm-pressed? [] (is-pressed? :right))
(defn- is-consumed? [button] (-> @state button :consumed))
(defn is-leftm-consumed? [] (is-consumed? :left))
(defn is-rightm-consumed? [] (is-consumed? :right))
(defn- check-if-pressed [state button]
(assoc-in state [button :pressed] (is-mouse-pressed? button)))
(defn- resolve-consumed [state button]
(if (and (-> state button :consumed)
(not (is-mouse-button-down? button)))
(assoc-in state [button :consumed] false)
state))
(defn update-mousebutton-state []
(swap! state #(-> %
(check-if-pressed :left)
(resolve-consumed :left)
(check-if-pressed :right)
(resolve-consumed :right))))
(defn- try-consume-pressed [button]
(when (and (is-pressed? button)
(not (is-consumed? button)))
(swap! state assoc-in [button :consumed] true)))
(defn try-consume-leftm-pressed
"If leftmouse was pressed this frame and not yet consumed, consumes it and returns true else returns nil.
It is consumed as long as the leftmouse-button is down."
[]
(try-consume-pressed :left))
(defn try-consume-rightm-pressed []
"If rightmouse was pressed this frame and not yet consumed, consumes it and returns true else returns nil.
It is consumed as long as the leftmouse-button is down."
(try-consume-pressed :right))
|
d94412bb658d6123c01306a868ead711a76b1562399327f3ec9fba70aa2ccea4 | jayrbolton/coursework | memo_change.hs | memoized coin change problem by
-- where
change ( 0,n ) = 0
-- change (c,n)
-- | v!!n <= c = min (mem_change c (n-1)) (1 + mem_change (c-(v!!n)) n)
-- | v!!n > c = mem_change c (n-1)
GetChange(0 , n ) = 0 \\
GetChange(C , n ) = min(GetChange(C , n-1 ) , 1 + GetChange(C - n , n ) ) \ \ \ \ \ & \text { if $ v(n ) \leq C$ } \\
--GetChange(C, n) = GetChange(C, n-1) \ \ \ \ \ & \text{ if $v(n) > C$}\\
vs = [1,5,10,25,100,200]
-- Generate our memoization matrix. There's probably a prettier way to do it.
matrix c v = map (\(n,cs) -> (map (\c -> (n,c)) cs)) (zip [1..n] (replicate n [c,c-1..1]))
where n = length v
change_matrix c v = map (map (\(x,y) -> ch y (take x v))) (matrix c v)
where
ch 0 _ = 0
ch c [1] = c
ch c v
| last v <= c = min (mchange c (init v)) (1 + mchange (c-(last v)) v)
| last v > c = mchange c (init v)
where n = length v
mchange c v = (change_matrix c v) !! (length v - 1) !! 0
change 0 _ = 0
change c [1] = c
change c v
| last v <= c = min (change c (init v)) (1 + change (c-(last v)) v)
| last v > c = change c (init v)
where n = length v
| null | https://raw.githubusercontent.com/jayrbolton/coursework/f0da276527d42a6751fb8d29c76de35ce358fe65/student_originated_software/analysis/final/memo_change.hs | haskell | where
change (c,n)
| v!!n <= c = min (mem_change c (n-1)) (1 + mem_change (c-(v!!n)) n)
| v!!n > c = mem_change c (n-1)
GetChange(C, n) = GetChange(C, n-1) \ \ \ \ \ & \text{ if $v(n) > C$}\\
Generate our memoization matrix. There's probably a prettier way to do it. | memoized coin change problem by
change ( 0,n ) = 0
GetChange(0 , n ) = 0 \\
GetChange(C , n ) = min(GetChange(C , n-1 ) , 1 + GetChange(C - n , n ) ) \ \ \ \ \ & \text { if $ v(n ) \leq C$ } \\
vs = [1,5,10,25,100,200]
matrix c v = map (\(n,cs) -> (map (\c -> (n,c)) cs)) (zip [1..n] (replicate n [c,c-1..1]))
where n = length v
change_matrix c v = map (map (\(x,y) -> ch y (take x v))) (matrix c v)
where
ch 0 _ = 0
ch c [1] = c
ch c v
| last v <= c = min (mchange c (init v)) (1 + mchange (c-(last v)) v)
| last v > c = mchange c (init v)
where n = length v
mchange c v = (change_matrix c v) !! (length v - 1) !! 0
change 0 _ = 0
change c [1] = c
change c v
| last v <= c = min (change c (init v)) (1 + change (c-(last v)) v)
| last v > c = change c (init v)
where n = length v
|
5fe31431a6efa20bfefa638fac08ddd35427f1acf9e94e3cfd18b27cb6110085 | tmfg/mmtis-national-access-point | localization_test.clj | (ns ote.localization-test
(:require [clojure.test :as t :refer [deftest is]]
[ote.localization :as localization]
[clojure.set :as set]
[clojure.java.io :as io]))
(def languages ["fi" "sv" "en"])
(defn- deep-key-paths [prefix-path m]
(reduce-kv (fn [key-paths key val]
if is map , recurse
(into key-paths
(if (map? val)
(deep-key-paths (conj prefix-path key) val)
[(conj prefix-path key)])))
#{} m))
(defn- load-test-edn []
(-> (slurp (str "test/resources/lang.edn"))
read-string))
(deftest handle-unsupported-tr-operation
(let [translations (load-test-edn)]
(is (= (#'localization/message (get-in translations [:unsupported-op]) {})
(str "{{unknown translation operation " :no-op "}}")))))
(deftest concatenate-tr-vec
(let [translations (load-test-edn)]
(is (= (#'localization/message (get-in translations [:vec]) {})
"This is a vector"))))
(deftest handle-tr-plurals
(let [translations (load-test-edn)]
(is (= (#'localization/message (get-in translations [:plural]) {:count 0})
"Got no results"))
(is (= (#'localization/message (get-in translations [:plural]) {:count 1})
"Got one result"))
(is (= (#'localization/message (get-in translations [:plural]) {:count 2})
"Got 2 results"))))
;; TODO: Needs cljs testing support for more thorough testing of this feature.
(deftest tr-markdown-cljs-only
(let [translations (load-test-edn)]
(is (thrown-with-msg?
clojure.lang.ExceptionInfo #"Markdown formatted translations not supported."
(#'localization/message (get-in translations [:markdown]) {})))))
(deftest all-languages-have-same-keys
(let [langs (atom {})]
(doseq [lang languages]
(localization/load-language! lang (fn [_ translation]
(swap! langs assoc lang (deep-key-paths [] translation)))))
(let [langs @langs
fi-key-paths (langs "fi")]
(doseq [[lang lang-key-paths] langs]
(doseq [key-path fi-key-paths]
(is (lang-key-paths key-path)
(str "Translation for " key-path " missing in language " lang)))
(doseq [key-path lang-key-paths]
(is (fi-key-paths key-path)
(str "Extra key " key-path " in language " lang)))))))
| null | https://raw.githubusercontent.com/tmfg/mmtis-national-access-point/a86cc890ffa1fe4f773083be5d2556e87a93d975/ote/test/clj/ote/localization_test.clj | clojure | TODO: Needs cljs testing support for more thorough testing of this feature. | (ns ote.localization-test
(:require [clojure.test :as t :refer [deftest is]]
[ote.localization :as localization]
[clojure.set :as set]
[clojure.java.io :as io]))
(def languages ["fi" "sv" "en"])
(defn- deep-key-paths [prefix-path m]
(reduce-kv (fn [key-paths key val]
if is map , recurse
(into key-paths
(if (map? val)
(deep-key-paths (conj prefix-path key) val)
[(conj prefix-path key)])))
#{} m))
(defn- load-test-edn []
(-> (slurp (str "test/resources/lang.edn"))
read-string))
(deftest handle-unsupported-tr-operation
(let [translations (load-test-edn)]
(is (= (#'localization/message (get-in translations [:unsupported-op]) {})
(str "{{unknown translation operation " :no-op "}}")))))
(deftest concatenate-tr-vec
(let [translations (load-test-edn)]
(is (= (#'localization/message (get-in translations [:vec]) {})
"This is a vector"))))
(deftest handle-tr-plurals
(let [translations (load-test-edn)]
(is (= (#'localization/message (get-in translations [:plural]) {:count 0})
"Got no results"))
(is (= (#'localization/message (get-in translations [:plural]) {:count 1})
"Got one result"))
(is (= (#'localization/message (get-in translations [:plural]) {:count 2})
"Got 2 results"))))
(deftest tr-markdown-cljs-only
(let [translations (load-test-edn)]
(is (thrown-with-msg?
clojure.lang.ExceptionInfo #"Markdown formatted translations not supported."
(#'localization/message (get-in translations [:markdown]) {})))))
(deftest all-languages-have-same-keys
(let [langs (atom {})]
(doseq [lang languages]
(localization/load-language! lang (fn [_ translation]
(swap! langs assoc lang (deep-key-paths [] translation)))))
(let [langs @langs
fi-key-paths (langs "fi")]
(doseq [[lang lang-key-paths] langs]
(doseq [key-path fi-key-paths]
(is (lang-key-paths key-path)
(str "Translation for " key-path " missing in language " lang)))
(doseq [key-path lang-key-paths]
(is (fi-key-paths key-path)
(str "Extra key " key-path " in language " lang)))))))
|
b8341cd252052eb595689443e45866d4f2acb6446db2f401ed5751a75618a419 | diagrams/diagrams-builder | diagrams-builder-ps.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE RecordWildCards #
module Main where
import System.Directory (copyFile,
createDirectoryIfMissing)
import qualified System.FilePath as FP
import System.Console.CmdArgs
import Diagrams.Backend.Postscript
import Diagrams.Builder
import Diagrams.Prelude hiding (height, width)
compileExample :: Build -> IO ()
compileExample (Build{..}) = do
f <- readFile srcFile
createDirectoryIfMissing True dir
let bopts = mkBuildOpts Postscript zero (PostscriptOptions outFile (mkSizeSpec2D width height) EPS)
& snippets .~ [f]
& imports .~ [ "Diagrams.Backend.Postscript" ]
& diaExpr .~ expr
& decideRegen .~
(hashedRegenerate
(\hash opts -> opts & psfileName .~ mkFile hash )
dir
)
res <- buildDiagram bopts
case res of
ParseErr err -> putStrLn ("Parse error in " ++ srcFile) >> putStrLn err
InterpErr ierr -> putStrLn ("Error while compiling " ++ srcFile) >>
putStrLn (ppInterpError ierr)
Skipped hash -> copyFile (mkFile (hashToHexStr hash)) outFile
OK hash act -> do act >> copyFile (mkFile (hashToHexStr hash)) outFile
where
mkFile base = dir FP.</> base FP.<.> "eps"
build :: Build
build =
defaultBuildOpts
{ outFile = "out.eps" &= typFile &= help "Output file (default: \"out.eps\")"
}
&= summary "The diagrams-builder-ps program, for dynamically rendering diagrams using the native postscript backend. Give it a source file and an expression to render (which may refer to things declared in the source file), and it outputs an image, using hashing to avoid rerendering images unnecessarily."
&= program "diagrams-builder-ps"
main :: IO ()
main = do
opts <- cmdArgs build
compileExample opts
| null | https://raw.githubusercontent.com/diagrams/diagrams-builder/fa73414a1de8e9d8ba1117aa5ae023633859c0db/src/tools/diagrams-builder-ps.hs | haskell | # LANGUAGE DeriveDataTypeable # | # LANGUAGE RecordWildCards #
module Main where
import System.Directory (copyFile,
createDirectoryIfMissing)
import qualified System.FilePath as FP
import System.Console.CmdArgs
import Diagrams.Backend.Postscript
import Diagrams.Builder
import Diagrams.Prelude hiding (height, width)
compileExample :: Build -> IO ()
compileExample (Build{..}) = do
f <- readFile srcFile
createDirectoryIfMissing True dir
let bopts = mkBuildOpts Postscript zero (PostscriptOptions outFile (mkSizeSpec2D width height) EPS)
& snippets .~ [f]
& imports .~ [ "Diagrams.Backend.Postscript" ]
& diaExpr .~ expr
& decideRegen .~
(hashedRegenerate
(\hash opts -> opts & psfileName .~ mkFile hash )
dir
)
res <- buildDiagram bopts
case res of
ParseErr err -> putStrLn ("Parse error in " ++ srcFile) >> putStrLn err
InterpErr ierr -> putStrLn ("Error while compiling " ++ srcFile) >>
putStrLn (ppInterpError ierr)
Skipped hash -> copyFile (mkFile (hashToHexStr hash)) outFile
OK hash act -> do act >> copyFile (mkFile (hashToHexStr hash)) outFile
where
mkFile base = dir FP.</> base FP.<.> "eps"
build :: Build
build =
defaultBuildOpts
{ outFile = "out.eps" &= typFile &= help "Output file (default: \"out.eps\")"
}
&= summary "The diagrams-builder-ps program, for dynamically rendering diagrams using the native postscript backend. Give it a source file and an expression to render (which may refer to things declared in the source file), and it outputs an image, using hashing to avoid rerendering images unnecessarily."
&= program "diagrams-builder-ps"
main :: IO ()
main = do
opts <- cmdArgs build
compileExample opts
|
05aaa4734b3abb4bda5c9e363d9fa55db17ef943b70232b55281bc50a6866b4a | heroku/logplex | logplex_tlssyslog_drain.erl | %%%-------------------------------------------------------------------
Geoff Ca nt
@author nt < >
%% @version {@vsn}, {@date} {@time}
@doc Syslog / TLS drain
%% See #section-4.1
%% -lear-ietf-syslog-uri-00
%% @end
%%%-------------------------------------------------------------------
-module(logplex_tlssyslog_drain).
-behaviour(gen_fsm).
-define(SERVER, ?MODULE).
-define(RECONNECT_MSG, reconnect).
-define(TARGET_SEND_SIZE, 4096).
-define(SEND_TIMEOUT_MSG, send_timeout).
-define(SEND_TIMEOUT, timer:seconds(4)).
-define(HIBERNATE_TIMEOUT, 5000).
-define(DEFAULT_SHRINK_TRIES, 10).
-define(SHRINK_BUF_SIZE, 10).
-define(CLOSE_TIMEOUT_MSG, close_timeout).
-define(SSL_SOCKET, {sslsocket,_,_}).
-include("logplex.hrl").
-include("logplex_error.hrl").
-include("logplex_logging.hrl").
-include_lib("ex_uri/include/ex_uri.hrl").
-type pstate() :: 'disconnected' | 'ready_to_send' | 'sending' | 'disconnecting'.
%% ------------------------------------------------------------------
%% API Function Exports
%% ------------------------------------------------------------------
-export([resize_msg_buffer/2
,set_target_send_size/2
]).
-export([valid_uri/1
,uri/2
,start_link/4
]).
%% ------------------------------------------------------------------
%% gen_fsm Function Exports
%% ------------------------------------------------------------------
-export([disconnected/2,
ready_to_send/2,
sending/2,
disconnecting/2
]).
-export([init/1, handle_event/3, handle_sync_event/4,
handle_info/3, terminate/3, code_change/4]).
-record(state, {drain_id :: logplex_drain:id(),
drain_tok :: logplex_drain:token(),
channel_id :: logplex_channel:id(),
uri :: #ex_uri{},
host :: string() | inet:ip_address() | binary(),
port :: inet:port_number(),
insecure :: boolean(),
sock = undefined :: 'undefined' | ssl:sslsocket(),
%% Buffer for messages while disconnected
buf = logplex_msg_buffer:new(default_buf_size()) :: logplex_msg_buffer:buf(),
%% Last time we connected or successfully sent data
last_good_time :: 'undefined' | erlang:timestamp(),
%% TCP failures since last_good_time
failures = 0 :: non_neg_integer(),
Reconnect timer reference
reconnect_tref = undefined :: 'undefined' | reference(),
%% Send timer reference
send_tref = undefined :: 'undefined' | reference(),
%% SSL Send connection monitor reference
send_mref = undefined :: 'undefined' | reference(),
%% Close timer reference
close_tref :: reference() | 'undefined',
Time of last successful connection
connect_time :: 'undefined' | erlang:timestamp()
}).
%% ------------------------------------------------------------------
%% API Function Definitions
%% ------------------------------------------------------------------
start_link(ChannelID, DrainID, DrainTok, Uri) ->
{Host, Port, Insecure} = logplex_drain:unpack_uri(Uri),
gen_fsm:start_link(?MODULE,
[#state{drain_id=DrainID,
drain_tok=DrainTok,
channel_id=ChannelID,
uri=Uri,
host=Host,
port=Port,
insecure=Insecure}],
[]).
valid_uri(#ex_uri{scheme="syslog+tls",
authority=#ex_uri_authority{host=Host, port=Port}} = Uri)
when is_list(Host), is_integer(Port),
0 < Port andalso Port =< 65535 ->
{valid, tlssyslog, Uri};
valid_uri(#ex_uri{scheme="syslog+tls",
authority=A=#ex_uri_authority{host=Host,
port=undefined}} = Uri)
when is_list(Host) ->
{valid, tlssyslog,
Uri#ex_uri{authority=A#ex_uri_authority{port=6514}}};
valid_uri(_) ->
{error, invalid_tlssyslog_uri}.
-spec uri(Host, Port) ->
#ex_uri{}
when Host :: iolist(),
Port :: 'undefined' | non_neg_integer().
uri(Host, undefined) ->
uri(Host, 514);
uri(Host, Port) when is_binary(Host), is_integer(Port) ->
uri(binary_to_list(Host), Port);
uri(Host, Port) when is_list(Host), is_integer(Port) ->
#ex_uri{scheme="syslog",
authority=#ex_uri_authority{host=Host, port=Port}}.
resize_msg_buffer(Pid, NewSize)
when is_integer(NewSize), NewSize > 0 ->
gen_fsm:sync_send_all_state_event(Pid, {resize_msg_buffer, NewSize}).
set_target_send_size(Pid, NewSize)
when is_integer(NewSize), NewSize > 0 ->
gen_fsm:sync_send_all_state_event(Pid, {set_target_send_size, NewSize}).
%% ------------------------------------------------------------------
%% gen_fsm Function Definitions
%% ------------------------------------------------------------------
@private
init([State0 = #state{sock = undefined, host=H, port=P,
drain_id=DrainId, channel_id=ChannelId}])
when H =/= undefined, is_integer(P) ->
try
random:seed(os:timestamp()),
logplex_drain:register(DrainId, ChannelId, tlssyslog,
{H,P}),
DrainSize = logplex_app:config(tcp_drain_buffer_size),
State = State0#state{buf = logplex_msg_buffer:new(DrainSize)},
?INFO("drain_id=~p channel_id=~s dest=~s insecure=~p at=spawn",
log_info(State, [State0#state.insecure])),
{ok, disconnected,
State, hibernate}
catch
error:badarg -> ignore
end.
%% @doc Disconnected state. We wait here for the reconnect timer to
%% fire before initiating the reconnect sequence.
disconnected({timeout, TRef, ?RECONNECT_MSG},
State = #state{reconnect_tref = TRef, sock = undefined}) ->
do_reconnect(State#state{reconnect_tref=undefined});
disconnected({timeout, Received, ?RECONNECT_MSG},
State = #state{reconnect_tref = Expected}) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_reconnect "
"expected=~p received=~p state=disconnected",
log_info(State, [Expected, Received])),
reconnect(State);
disconnected({post, Msg}, State) ->
reconnect(buffer(Msg, State));
disconnected({timeout, _Ref, ?CLOSE_TIMEOUT_MSG}, State) ->
%% Already disconnected; nothing to do here
{next_state, disconnected, State, hibernate};
disconnected(timeout, State) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, disconnected, State, hibernate};
disconnected(Msg, State) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~1000p state=disconnected",
log_info(State, [Msg])),
{next_state, disconnected, State, ?HIBERNATE_TIMEOUT}.
%% @doc We have a socket open and messages to send. Collect up an
%% appropriate amount and flush them to the socket.
ready_to_send({timeout, _Ref, ?SEND_TIMEOUT_MSG},
State = #state{sock = ?SSL_SOCKET}) ->
Stale message .
send(State);
ready_to_send({timeout, TRef, ?CLOSE_TIMEOUT_MSG},
State=#state{close_tref=TRef}) ->
case close_if_idle(State) of
{closed, ClosedState} ->
{next_state, disconnected, ClosedState, hibernate};
{not_closed, State} ->
case close_if_old(State) of
{closed, ClosedState} ->
{next_state, disconnected, ClosedState, hibernate};
{not_closed, ContinueState} ->
{next_state, ready_to_send, ContinueState}
end
end;
ready_to_send({post, Msg}, State = #state{sock = ?SSL_SOCKET}) ->
send(buffer(Msg, State));
ready_to_send({inet_reply, Sock, ok}, S = #state{sock = Sock})
when is_port(Sock) ->
%% Stale inet reply
send(S);
ready_to_send(timeout, S = #state{}) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, ready_to_send, S, hibernate};
ready_to_send(Msg, State = #state{sock = ?SSL_SOCKET}) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~p state=ready_to_send",
log_info(State, [Msg])),
{next_state, ready_to_send, State, ?HIBERNATE_TIMEOUT}.
%% @doc We sent some data to the socket and are waiting the result of
%% the send operation.
sending({timeout, Ref, ?SEND_TIMEOUT_MSG},
S = #state{send_tref=Ref}) ->
?INFO("drain_id=~p channel_id=~s dest=~s err=send_timeout "
"state=sending",
log_info(S, [])),
reconnect(tcp_bad(S#state{send_mref=undefined, send_tref=undefined}));
sending({post, Msg}, State) ->
{next_state, sending, buffer(Msg, State), ?HIBERNATE_TIMEOUT};
sending({MRef, ok}, S = #state{send_mref = MRef, send_tref = TRef}) ->
erlang:demonitor(MRef, [flush]),
send(tcp_good(S#state{send_mref=undefined,
send_tref=cancel_timeout(TRef, ?SEND_TIMEOUT_MSG)}));
sending({MRef, {error, Reason}}, S = #state{send_mref=MRef, sock=Sock}) ->
erlang:demonitor(MRef, [flush]),
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=ssl data=~p sock=~p duration=~s state=sending",
log_info(S, [sending, Reason, Sock, duration(S)])),
reconnect(tcp_bad(S#state{send_mref = undefined}));
sending({'DOWN', MRef, _, _, Reason}, S = #state{send_mref=MRef, sock=Sock}) ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=ssl data=~p sock=~p duration=~s state=sending",
log_info(S, [sending, Reason, Sock, duration(S)])),
reconnect(tcp_bad(S#state{send_mref=undefined}));
sending({timeout, _TRef, ?CLOSE_TIMEOUT_MSG}, State) ->
case connection_too_old(State) of
true ->
{next_state, disconnecting, State};
_ ->
{next_state, sending, start_close_timer(State)}
end;
sending(timeout, S = #state{}) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, sending, S, hibernate};
sending(Msg, State) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~p state=sending",
log_info(State, [Msg])),
{next_state, sending, State, ?HIBERNATE_TIMEOUT}.
%% @doc We got an close timeout while in the sending state but haven't
%% gotten an inet_reply yet.
disconnecting({timeout, _TRef, ?SEND_TIMEOUT_MSG}, S) ->
?INFO("drain_id=~p channel_id=~s dest=~s err=send_timeout "
"state=disconnecting", log_info(S, [])),
{next_state, disconnected,
tcp_bad(close(S#state{send_tref=undefined})), hibernate};
disconnecting({inet_reply, Sock, Status}, S = #state{sock = Sock,
send_tref = SendTRef}) ->
case Status of
{error, Reason} ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s state=disconnecting",
log_info(S, [disconnecting, Reason, Sock, duration(S)]));
_ -> ok
end,
cancel_timeout(SendTRef, ?SEND_TIMEOUT_MSG),
NewState = S#state{sock = undefined, send_tref = undefined},
{next_state, disconnected, close(NewState), hibernate};
disconnecting({post, Msg}, State) ->
{next_state, sending, buffer(Msg, State), ?HIBERNATE_TIMEOUT};
disconnecting({timeout, TRef, ?CLOSE_TIMEOUT_MSG}, State=#state{close_tref=TRef}) ->
%% Shouldn't see this since entering this state means the timer wasn't reset
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_close_timeout "
"state=disconnecting", log_info(State, [])),
{next_state, disconnecting, State};
disconnecting(timeout, S = #state{}) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, disconnecting, S, hibernate};
disconnecting(Msg, State) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~p state=disconnecting", log_info(State, [Msg])),
{next_state, disconnecting, State, ?HIBERNATE_TIMEOUT}.
@private
state_name(Event , _ From , State ) - >
? ~p ] Unexpected event ~p " ,
%% [state_name, Event]),
{ next_state , state_name , State } .
@private
handle_event(_Event, StateName, State) ->
{next_state, StateName, State, ?HIBERNATE_TIMEOUT}.
@private
handle_sync_event({set_target_send_size, Size}, _From, StateName,
State = #state{})
when is_integer(Size), Size > 0 ->
put(target_send_size, Size),
{reply, {ok, Size}, StateName, State, ?HIBERNATE_TIMEOUT};
handle_sync_event({resize_msg_buffer, NewSize}, _From, StateName,
State = #state{buf = Buf})
when is_integer(NewSize), NewSize > 0 ->
NewBuf = logplex_msg_buffer:resize(NewSize, Buf),
{reply, ok, StateName, State#state{buf = NewBuf}, ?HIBERNATE_TIMEOUT};
handle_sync_event(Event, _From, StateName, State) ->
?WARN("[state ~p] Unexpected event ~p",
[StateName, Event]),
{next_state, StateName, State, ?HIBERNATE_TIMEOUT}.
@private
handle_info({tcp, Sock, Data}, StateName,
State = #state{sock = Sock}) ->
?WARN("drain_id=~p channel_id=~s dest=~s state=~p "
"err=unexpected_peer_data data=~p",
log_info(State, [StateName, Data])),
{next_state, StateName, State, ?HIBERNATE_TIMEOUT};
handle_info({tcp_error, Sock, Reason}, StateName,
State = #state{sock = Sock}) ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [StateName, Reason, Sock, duration(State)])),
reconnect(tcp_bad(State));
handle_info({inet_reply, Sock, {error, Reason}}, StateName,
State = #state{sock = Sock}) ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [StateName, Reason, Sock, duration(State)])),
reconnect(tcp_bad(State));
handle_info({ssl_closed, Sock}, StateName,
State = #state{sock = Sock}) ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [StateName, closed, Sock, duration(State)])),
reconnect(tcp_bad(State));
handle_info(shutdown, StateName, State0 = #state{sock = ?SSL_SOCKET}) ->
case send(State0) of
{next_state, ready_to_send, State1} ->
catch ssl:close(State1#state.sock),
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State1, [StateName, shutdown, State1#state.sock, duration(State1)])),
{stop, {shutdown,call}, State1#state{sock = undefined}};
{next_state, sending, State1} ->
handle_info(shutdown, StateName, State1)
end;
handle_info(shutdown, StateName, State) ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p duration=~s",
log_info(State, [StateName, shutdown, duration(State)])),
{stop, {shutdown,call}, State};
%% close_timeout used to be called idle_timeout; remove once we are on v72+
%% this can be removed once we are on v72+
handle_info({timeout, TRef, idle_timeout}, StateName, State) ->
apply(?MODULE, StateName, [{timeout, TRef, ?CLOSE_TIMEOUT_MSG}, State]);
handle_info(timeout, StateName, State) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, StateName, State, hibernate};
handle_info(Info, StateName, State) ->
?MODULE:StateName(Info, State).
@private
terminate(Reason, StateName, State) ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"at=terminate reason=~p",
log_info(State, [StateName, Reason])),
ok.
@private
code_change(_OldVsn, StateName, State, _Extra) ->
{ok, StateName, State, ?HIBERNATE_TIMEOUT}.
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
@private
@doc Time has finally come to reconnect . Attempt the reconnection ,
%% send buffered messages on success, schedule a delayed reconnect if
%% not.
-spec do_reconnect(#state{}) ->
{next_state, pstate(), #state{}}.
do_reconnect(State = #state{sock = undefined,
reconnect_tref = undefined,
buf=Buf,
failures = Failures}) ->
case connect(State) of
{ok, Sock} ->
?INFO("drain_id=~p channel_id=~s dest=~s "
"state=disconnected at=connect try=~p sock=~p",
log_info(State, [Failures + 1, Sock])),
NewState = State#state{sock=Sock,
reconnect_tref = undefined,
send_tref = undefined,
buf = maybe_resize(Buf),
connect_time=os:timestamp()},
send(start_close_timer(NewState));
{error, Reason} ->
NewState = tcp_bad(State),
case Failures of
0 ->
%% Reduce log volume by skipping logging on
first failure .
ok;
_ ->
handle_error(Reason, State),
?ERR("drain_id=~p channel_id=~s dest=~s at=connect "
"err=gen_tcp data=~p try=~p last_success=~s "
"state=disconnected",
log_info(State, [Reason, NewState#state.failures,
time_failed(NewState)]))
end,
reconnect(NewState)
end.
handle_error({tls_alert, Alert}, #state{ channel_id=ChannelID, uri=URI, drain_tok=DrainToken }) ->
logplex_message:process_error(ChannelID, DrainToken, ?L14, "error=\"~s\" uri=\"~s\"", [Alert, logplex_drain:uri_to_binary(URI)]);
handle_error(_, _) ->
ok.
@private
connect(#state{sock = undefined, channel_id=ChannelID, drain_id=DrainID, uri=Dest, host=Host, port=Port})
when is_integer(Port), 0 < Port, Port =< 65535 ->
SendTimeoutS = logplex_app:config(tcp_syslog_send_timeout_secs),
TLSOpts = logplex_tls:connect_opts(ChannelID, DrainID, Dest),
SocketOpts = socket_opts(),
ssl:connect(Host, Port, TLSOpts ++ SocketOpts,
timer:seconds(SendTimeoutS));
connect(#state{}) ->
{error, bogus_port_number}.
socket_opts() ->
[binary
%% We don't expect data, but why not.
,{active, true}
,{exit_on_close, true}
,{keepalive, true}
,{packet, raw}
,{reuseaddr, true}].
-spec reconnect(#state{}) -> {next_state, pstate(), #state{}}.
@private
reconnect(State = #state{reconnect_tref = Ref}) when is_reference(Ref) ->
Reconnect timer was set
case erlang:read_timer(Ref) of
false ->
%% and has expired
reconnect(State#state{reconnect_tref=undefined});
_ ->
%% and is still valid
{next_state, disconnected, State, ?HIBERNATE_TIMEOUT}
end;
reconnect(State = #state{failures = 0, last_good_time=undefined}) ->
First reconnect ever
%% Skip straight through to reconnection code.
do_reconnect(State);
reconnect(State = #state{failures = 0, last_good_time=T})
when is_tuple(T), tuple_size(T) =:= 3 ->
Min = logplex_app:config(tcp_syslog_reconnect_min, 30),
SecsSinceConnect = timer:now_diff(os:timestamp(), T) div 1000000,
case SecsSinceConnect of
TooFew when TooFew < Min ->
%% We hibernate only when we need to reconnect with a timer. The
%% timer acts as a rate limiter! If you remove the timer, you must
%% re-think the hibernation.
{next_state, disconnected,
reconnect_in(timer:seconds(Min), State), hibernate};
_EnoughTime ->
do_reconnect(State)
end;
reconnect(State = #state{failures = F}) ->
Max = logplex_app:config(tcp_syslog_backoff_max, 300),
BackOff = case length(integer_to_list(Max, 2)) of
MaxExp when F > MaxExp -> Max;
_ -> 1 bsl F
end,
NewBuf = maybe_shrink(State),
%% We hibernate only when we need to reconnect with a timer. The timer
%% acts as a rate limiter! If you remove the timer, you must re-think
%% the hibernation.
{next_state, disconnected,
reconnect_in(timer:seconds(BackOff), State#state{buf=NewBuf}),
hibernate}.
reconnect_in(MS, State = #state{}) ->
Ref = erlang:start_timer(MS, self(), ?RECONNECT_MSG),
State#state{reconnect_tref = Ref}.
@private
tcp_good(State = #state{}) ->
State#state{last_good_time = os:timestamp(),
failures = 0}.
@private
%% Caller must ensure sock is closed before calling this.
tcp_bad(State = #state{send_tref=TRef}) when is_reference(TRef) ->
%% After the socket is closed the send-timer is irrelevant
cancel_timeout(TRef, ?SEND_TIMEOUT_MSG),
tcp_bad(State#state{send_tref = undefined});
tcp_bad(State = #state{sock = Sock}) when Sock =/= undefined ->
catch ssl:close(Sock),
tcp_bad(State#state{sock = undefined});
tcp_bad(State = #state{sock = undefined,
failures = F}) ->
State#state{failures = F + 1}.
-spec time_failed(#state{}) -> iolist().
@private
time_failed(State = #state{}) ->
time_failed(os:timestamp(), State).
time_failed(Now, #state{last_good_time=T0})
when is_tuple(T0) ->
integer_to_list(timer:now_diff(Now, T0) div 1000000);
time_failed(_, #state{last_good_time=undefined}) ->
"".
@private
log_info(#state{drain_id=DrainId, channel_id=ChannelId, uri=Uri}, Rest)
when is_list(Rest) ->
[DrainId, ChannelId, logplex_drain:uri_to_binary(Uri) | Rest].
-spec msg_stat('drain_dropped' | 'drain_buffered' | 'drain_delivered' |
'requests_sent',
non_neg_integer(), #state{}) -> any().
msg_stat(Key, N,
#state{drain_id=DrainId, channel_id=ChannelId}) ->
logplex_stats:incr(#drain_stat{drain_id=DrainId,
drain_type=tlssyslog,
channel_id=ChannelId,
key=Key}, N).
-spec duration(#state{}) -> iolist().
duration(#state{connect_time=undefined}) ->
"undefined";
duration(#state{connect_time=T0}) ->
US = timer:now_diff(os:timestamp(), T0),
io_lib:format("~f", [US / 1000000]).
%% -spec buffer_status(#state{}) -> 'empty' | 'has_messages_to_send'.
%% %% @private
buffer_status(State = # state{buf = Buf } ) - >
%% case logplex_msg_buffer:len(Buf) of
%% 0 -> empty;
%% _ -> has_messages_to_send
%% end.
-spec buffer(any(), #state{}) -> #state{}.
@private
buffer(Msg, State = #state{buf = Buf}) ->
{Result, NewBuf} = logplex_msg_buffer:push_ext(Msg, Buf),
msg_stat(drain_buffered, 1, State),
case Result of
displace ->
msg_stat(drain_dropped, 1, State),
logplex_realtime:incr('drain.dropped');
insert -> ok
end,
State#state{buf=NewBuf}.
@private
%% @doc Send buffered messages.
-spec send(#state{}) -> {next_state, 'sending' | 'ready_to_send', #state{}}.
send(State = #state{buf = Buf, sock = Sock,
drain_tok = DrainTok}) ->
case logplex_msg_buffer:empty(Buf) of
empty ->
{next_state, ready_to_send, State};
not_empty ->
PktSize = target_send_size(),
{Data, N, NewBuf} =
buffer_to_pkts(Buf, PktSize, DrainTok),
try
ssl : send({sslsocket , _ , Pid } , Data )
%% ssl_connection(Pid, Data)
%% gen_fsm:sync_send_all_state_event(Pid, {application_data, iolist_to_binary(Data)})
%% Ref = erlang:monitor(Pid)
%% erlang:send(Pid, {'$gen_sync_all_state_event', {self(), Ref}, {application_data, iolist_to_binary(Data)}})
%% {Ref, Reply} ->
%% erlang:demonitor(Ref, [flush]),
%% {ok, Reply};
{ ' DOWN ' , Ref , _ , _ , noconnection } - >
%% Node = get_node(Process),
%% exit({nodedown, Node});
{ ' DOWN ' , Ref , _ , _ , Reason } - >
%% exit(Reason)
{sslsocket, _, Pid} = Sock,
MRef = erlang:monitor(process, Pid),
erlang:send(Pid,
{'$gen_sync_all_state_event',
{self(), MRef},
{application_data, iolist_to_binary(Data)}}),
TRef = erlang:start_timer(?SEND_TIMEOUT, self(),
?SEND_TIMEOUT_MSG),
msg_stat(drain_delivered, N, State),
logplex_realtime:incr('drain.delivered', N),
{next_state, sending,
State#state{buf=NewBuf, send_tref=TRef, send_mref=MRef}}
msg_stat(drain_dropped , N , State ) ,
%% logplex_realtime:incr('drain.dropped', N),
%% {next_state, ready_to_send, State#state{buf=NewBuf}};
%% _ ->
msg_stat(drain_delivered , N , State ) ,
%% logplex_realtime:incr('drain.delivered', N),
%% {next_state, sending,
%% State#state{buf = NewBuf, send_tref=Ref}}
%% end
catch
error:badarg ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [send, closed, Sock,
duration(State)])),
%% Re-use old state as we know the messages we
%% just de-buffered are lost to tcp.
reconnect(tcp_bad(State))
end
end.
cancel_timeout(undefined, _Msg) -> undefined;
cancel_timeout(Ref, Msg)
when is_reference(Ref) ->
case erlang:cancel_timer(Ref) of
false ->
%% Flush expired timer message
receive
{timeout, Ref, Msg} -> undefined
after 0 -> undefined
end;
_Time ->
Timer did n't fire , so no message to worry about
undefined
end.
start_close_timer(State=#state{close_tref = CloseTRef}) ->
cancel_timeout(CloseTRef, ?CLOSE_TIMEOUT_MSG),
MaxIdle = logplex_app:config(tcp_syslog_idle_timeout, timer:minutes(5)),
Fuzz = random:uniform(logplex_app:config(tcp_syslog_idle_fuzz, 15000)),
NewTimer = erlang:start_timer(MaxIdle + Fuzz, self(), ?CLOSE_TIMEOUT_MSG),
State#state{close_tref = NewTimer}.
compare_point(#state{last_good_time=undefined, connect_time=ConnectTime}) ->
ConnectTime;
compare_point(#state{last_good_time=LastGood}) ->
LastGood.
connection_idle(State) ->
MaxIdle = logplex_app:config(tcp_syslog_idle_timeout, timer:minutes(5)),
SinceLastGoodMicros = timer:now_diff(os:timestamp(), compare_point(State)),
SinceLastGoodMicros > (MaxIdle * 1000).
close_if_idle(State = #state{sock = Sock}) ->
case connection_idle(State) of
true ->
?INFO("drain_id=~p channel_id=~s dest=~s at=idle_timeout",
log_info(State, [])),
ssl:close(Sock),
{closed, State#state{sock=undefined}};
_ ->
{not_closed, State}
end.
connection_too_old(#state{connect_time = ConnectTime}) ->
MaxTotal = logplex_app:config(tcp_syslog_max_ttl, timer:hours(5)),
SinceConnectMicros = timer:now_diff(os:timestamp(), ConnectTime),
SinceConnectMicros > (MaxTotal * 1000).
close(State = #state{sock = undefined}) ->
State;
close(State = #state{sock = Sock}) ->
ssl:close(Sock),
State#state{sock=undefined}.
close_if_old(State) ->
case connection_too_old(State) of
true ->
?INFO("drain_id=~p channel_id=~s dest=~s at=max_ttl",
log_info(State, [])),
{closed, close(State)};
_ ->
{not_closed, start_close_timer(State)}
end.
buffer_to_pkts(Buf, BytesRemaining, DrainTok) ->
logplex_msg_buffer:to_pkts(Buf, BytesRemaining,
pkt_fmt(DrainTok)).
pkt_fmt(DrainTok) ->
Frame = fun (Msg) ->
SyslogMsg = logplex_syslog_utils:to_msg(Msg, DrainTok),
logplex_syslog_utils:frame(SyslogMsg)
end,
fun ({loss_indication, N, When}) ->
case logplex_app:config(tcp_syslog_send_loss_msg) of
dont_send ->
skip;
_ ->
{frame,
Frame(logplex_syslog_utils:overflow_msg(N, When))}
end;
({msg, MData}) ->
{frame, Frame(MData)}
end.
target_send_size() ->
case get(target_send_size) of
Size when is_integer(Size),
Size > 0 ->
Size;
_ ->
logplex_app:config(tcp_drain_target_bytes,
?TARGET_SEND_SIZE)
end.
maybe_resize(Buf) ->
Default = default_buf_size(),
case logplex_msg_buffer:max_size(Buf) < Default of
true -> logplex_msg_buffer:resize(Default, Buf);
false -> Buf
end.
maybe_shrink(#state{ failures=Tries, buf=Buf }=State) ->
Max = logplex_msg_buffer:max_size(Buf),
case Max =:= ?SHRINK_BUF_SIZE of
true ->
Buf;
false ->
%% Shrink if we have never connected before or the last update time
%% is more than ?SHRINK_TRIES old, and if the buffer is
%% currently full and dropping data
IsFull = full =:= logplex_msg_buffer:full(Buf),
NumLost = logplex_msg_buffer:lost(Buf),
ShrinkAfter = logplex_app:config(tcp_syslog_shrink_after, ?DEFAULT_SHRINK_TRIES),
?INFO("drain_id=~p channel_id=~s dest=~s at=maybe_shrink "
"is_full=~p num_lost=~p tries=~p shrink_after=~p",
log_info(State, [IsFull, NumLost, Tries, ShrinkAfter])),
case IsFull andalso NumLost > 0 andalso Tries > ShrinkAfter of
true ->
logplex_msg_buffer:resize(?SHRINK_BUF_SIZE, Buf);
false ->
Buf
end
end.
default_buf_size() -> logplex_app:config(tcp_drain_buffer_size, 1024).
| null | https://raw.githubusercontent.com/heroku/logplex/fc520c44cf4687726d5d51464d3264ddc6abb0ba/src/logplex_tlssyslog_drain.erl | erlang | -------------------------------------------------------------------
@version {@vsn}, {@date} {@time}
See #section-4.1
-lear-ietf-syslog-uri-00
@end
-------------------------------------------------------------------
------------------------------------------------------------------
API Function Exports
------------------------------------------------------------------
------------------------------------------------------------------
gen_fsm Function Exports
------------------------------------------------------------------
Buffer for messages while disconnected
Last time we connected or successfully sent data
TCP failures since last_good_time
Send timer reference
SSL Send connection monitor reference
Close timer reference
------------------------------------------------------------------
API Function Definitions
------------------------------------------------------------------
------------------------------------------------------------------
gen_fsm Function Definitions
------------------------------------------------------------------
@doc Disconnected state. We wait here for the reconnect timer to
fire before initiating the reconnect sequence.
Already disconnected; nothing to do here
@doc We have a socket open and messages to send. Collect up an
appropriate amount and flush them to the socket.
Stale inet reply
@doc We sent some data to the socket and are waiting the result of
the send operation.
@doc We got an close timeout while in the sending state but haven't
gotten an inet_reply yet.
Shouldn't see this since entering this state means the timer wasn't reset
[state_name, Event]),
close_timeout used to be called idle_timeout; remove once we are on v72+
this can be removed once we are on v72+
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------
send buffered messages on success, schedule a delayed reconnect if
not.
Reduce log volume by skipping logging on
We don't expect data, but why not.
and has expired
and is still valid
Skip straight through to reconnection code.
We hibernate only when we need to reconnect with a timer. The
timer acts as a rate limiter! If you remove the timer, you must
re-think the hibernation.
We hibernate only when we need to reconnect with a timer. The timer
acts as a rate limiter! If you remove the timer, you must re-think
the hibernation.
Caller must ensure sock is closed before calling this.
After the socket is closed the send-timer is irrelevant
-spec buffer_status(#state{}) -> 'empty' | 'has_messages_to_send'.
%% @private
case logplex_msg_buffer:len(Buf) of
0 -> empty;
_ -> has_messages_to_send
end.
@doc Send buffered messages.
ssl_connection(Pid, Data)
gen_fsm:sync_send_all_state_event(Pid, {application_data, iolist_to_binary(Data)})
Ref = erlang:monitor(Pid)
erlang:send(Pid, {'$gen_sync_all_state_event', {self(), Ref}, {application_data, iolist_to_binary(Data)}})
{Ref, Reply} ->
erlang:demonitor(Ref, [flush]),
{ok, Reply};
Node = get_node(Process),
exit({nodedown, Node});
exit(Reason)
logplex_realtime:incr('drain.dropped', N),
{next_state, ready_to_send, State#state{buf=NewBuf}};
_ ->
logplex_realtime:incr('drain.delivered', N),
{next_state, sending,
State#state{buf = NewBuf, send_tref=Ref}}
end
Re-use old state as we know the messages we
just de-buffered are lost to tcp.
Flush expired timer message
Shrink if we have never connected before or the last update time
is more than ?SHRINK_TRIES old, and if the buffer is
currently full and dropping data | Geoff Ca nt
@author nt < >
@doc Syslog / TLS drain
-module(logplex_tlssyslog_drain).
-behaviour(gen_fsm).
-define(SERVER, ?MODULE).
-define(RECONNECT_MSG, reconnect).
-define(TARGET_SEND_SIZE, 4096).
-define(SEND_TIMEOUT_MSG, send_timeout).
-define(SEND_TIMEOUT, timer:seconds(4)).
-define(HIBERNATE_TIMEOUT, 5000).
-define(DEFAULT_SHRINK_TRIES, 10).
-define(SHRINK_BUF_SIZE, 10).
-define(CLOSE_TIMEOUT_MSG, close_timeout).
-define(SSL_SOCKET, {sslsocket,_,_}).
-include("logplex.hrl").
-include("logplex_error.hrl").
-include("logplex_logging.hrl").
-include_lib("ex_uri/include/ex_uri.hrl").
-type pstate() :: 'disconnected' | 'ready_to_send' | 'sending' | 'disconnecting'.
-export([resize_msg_buffer/2
,set_target_send_size/2
]).
-export([valid_uri/1
,uri/2
,start_link/4
]).
-export([disconnected/2,
ready_to_send/2,
sending/2,
disconnecting/2
]).
-export([init/1, handle_event/3, handle_sync_event/4,
handle_info/3, terminate/3, code_change/4]).
-record(state, {drain_id :: logplex_drain:id(),
drain_tok :: logplex_drain:token(),
channel_id :: logplex_channel:id(),
uri :: #ex_uri{},
host :: string() | inet:ip_address() | binary(),
port :: inet:port_number(),
insecure :: boolean(),
sock = undefined :: 'undefined' | ssl:sslsocket(),
buf = logplex_msg_buffer:new(default_buf_size()) :: logplex_msg_buffer:buf(),
last_good_time :: 'undefined' | erlang:timestamp(),
failures = 0 :: non_neg_integer(),
Reconnect timer reference
reconnect_tref = undefined :: 'undefined' | reference(),
send_tref = undefined :: 'undefined' | reference(),
send_mref = undefined :: 'undefined' | reference(),
close_tref :: reference() | 'undefined',
Time of last successful connection
connect_time :: 'undefined' | erlang:timestamp()
}).
start_link(ChannelID, DrainID, DrainTok, Uri) ->
{Host, Port, Insecure} = logplex_drain:unpack_uri(Uri),
gen_fsm:start_link(?MODULE,
[#state{drain_id=DrainID,
drain_tok=DrainTok,
channel_id=ChannelID,
uri=Uri,
host=Host,
port=Port,
insecure=Insecure}],
[]).
valid_uri(#ex_uri{scheme="syslog+tls",
authority=#ex_uri_authority{host=Host, port=Port}} = Uri)
when is_list(Host), is_integer(Port),
0 < Port andalso Port =< 65535 ->
{valid, tlssyslog, Uri};
valid_uri(#ex_uri{scheme="syslog+tls",
authority=A=#ex_uri_authority{host=Host,
port=undefined}} = Uri)
when is_list(Host) ->
{valid, tlssyslog,
Uri#ex_uri{authority=A#ex_uri_authority{port=6514}}};
valid_uri(_) ->
{error, invalid_tlssyslog_uri}.
-spec uri(Host, Port) ->
#ex_uri{}
when Host :: iolist(),
Port :: 'undefined' | non_neg_integer().
uri(Host, undefined) ->
uri(Host, 514);
uri(Host, Port) when is_binary(Host), is_integer(Port) ->
uri(binary_to_list(Host), Port);
uri(Host, Port) when is_list(Host), is_integer(Port) ->
#ex_uri{scheme="syslog",
authority=#ex_uri_authority{host=Host, port=Port}}.
resize_msg_buffer(Pid, NewSize)
when is_integer(NewSize), NewSize > 0 ->
gen_fsm:sync_send_all_state_event(Pid, {resize_msg_buffer, NewSize}).
set_target_send_size(Pid, NewSize)
when is_integer(NewSize), NewSize > 0 ->
gen_fsm:sync_send_all_state_event(Pid, {set_target_send_size, NewSize}).
@private
init([State0 = #state{sock = undefined, host=H, port=P,
drain_id=DrainId, channel_id=ChannelId}])
when H =/= undefined, is_integer(P) ->
try
random:seed(os:timestamp()),
logplex_drain:register(DrainId, ChannelId, tlssyslog,
{H,P}),
DrainSize = logplex_app:config(tcp_drain_buffer_size),
State = State0#state{buf = logplex_msg_buffer:new(DrainSize)},
?INFO("drain_id=~p channel_id=~s dest=~s insecure=~p at=spawn",
log_info(State, [State0#state.insecure])),
{ok, disconnected,
State, hibernate}
catch
error:badarg -> ignore
end.
disconnected({timeout, TRef, ?RECONNECT_MSG},
State = #state{reconnect_tref = TRef, sock = undefined}) ->
do_reconnect(State#state{reconnect_tref=undefined});
disconnected({timeout, Received, ?RECONNECT_MSG},
State = #state{reconnect_tref = Expected}) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_reconnect "
"expected=~p received=~p state=disconnected",
log_info(State, [Expected, Received])),
reconnect(State);
disconnected({post, Msg}, State) ->
reconnect(buffer(Msg, State));
disconnected({timeout, _Ref, ?CLOSE_TIMEOUT_MSG}, State) ->
{next_state, disconnected, State, hibernate};
disconnected(timeout, State) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, disconnected, State, hibernate};
disconnected(Msg, State) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~1000p state=disconnected",
log_info(State, [Msg])),
{next_state, disconnected, State, ?HIBERNATE_TIMEOUT}.
ready_to_send({timeout, _Ref, ?SEND_TIMEOUT_MSG},
State = #state{sock = ?SSL_SOCKET}) ->
Stale message .
send(State);
ready_to_send({timeout, TRef, ?CLOSE_TIMEOUT_MSG},
State=#state{close_tref=TRef}) ->
case close_if_idle(State) of
{closed, ClosedState} ->
{next_state, disconnected, ClosedState, hibernate};
{not_closed, State} ->
case close_if_old(State) of
{closed, ClosedState} ->
{next_state, disconnected, ClosedState, hibernate};
{not_closed, ContinueState} ->
{next_state, ready_to_send, ContinueState}
end
end;
ready_to_send({post, Msg}, State = #state{sock = ?SSL_SOCKET}) ->
send(buffer(Msg, State));
ready_to_send({inet_reply, Sock, ok}, S = #state{sock = Sock})
when is_port(Sock) ->
send(S);
ready_to_send(timeout, S = #state{}) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, ready_to_send, S, hibernate};
ready_to_send(Msg, State = #state{sock = ?SSL_SOCKET}) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~p state=ready_to_send",
log_info(State, [Msg])),
{next_state, ready_to_send, State, ?HIBERNATE_TIMEOUT}.
sending({timeout, Ref, ?SEND_TIMEOUT_MSG},
S = #state{send_tref=Ref}) ->
?INFO("drain_id=~p channel_id=~s dest=~s err=send_timeout "
"state=sending",
log_info(S, [])),
reconnect(tcp_bad(S#state{send_mref=undefined, send_tref=undefined}));
sending({post, Msg}, State) ->
{next_state, sending, buffer(Msg, State), ?HIBERNATE_TIMEOUT};
sending({MRef, ok}, S = #state{send_mref = MRef, send_tref = TRef}) ->
erlang:demonitor(MRef, [flush]),
send(tcp_good(S#state{send_mref=undefined,
send_tref=cancel_timeout(TRef, ?SEND_TIMEOUT_MSG)}));
sending({MRef, {error, Reason}}, S = #state{send_mref=MRef, sock=Sock}) ->
erlang:demonitor(MRef, [flush]),
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=ssl data=~p sock=~p duration=~s state=sending",
log_info(S, [sending, Reason, Sock, duration(S)])),
reconnect(tcp_bad(S#state{send_mref = undefined}));
sending({'DOWN', MRef, _, _, Reason}, S = #state{send_mref=MRef, sock=Sock}) ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=ssl data=~p sock=~p duration=~s state=sending",
log_info(S, [sending, Reason, Sock, duration(S)])),
reconnect(tcp_bad(S#state{send_mref=undefined}));
sending({timeout, _TRef, ?CLOSE_TIMEOUT_MSG}, State) ->
case connection_too_old(State) of
true ->
{next_state, disconnecting, State};
_ ->
{next_state, sending, start_close_timer(State)}
end;
sending(timeout, S = #state{}) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, sending, S, hibernate};
sending(Msg, State) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~p state=sending",
log_info(State, [Msg])),
{next_state, sending, State, ?HIBERNATE_TIMEOUT}.
disconnecting({timeout, _TRef, ?SEND_TIMEOUT_MSG}, S) ->
?INFO("drain_id=~p channel_id=~s dest=~s err=send_timeout "
"state=disconnecting", log_info(S, [])),
{next_state, disconnected,
tcp_bad(close(S#state{send_tref=undefined})), hibernate};
disconnecting({inet_reply, Sock, Status}, S = #state{sock = Sock,
send_tref = SendTRef}) ->
case Status of
{error, Reason} ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s state=disconnecting",
log_info(S, [disconnecting, Reason, Sock, duration(S)]));
_ -> ok
end,
cancel_timeout(SendTRef, ?SEND_TIMEOUT_MSG),
NewState = S#state{sock = undefined, send_tref = undefined},
{next_state, disconnected, close(NewState), hibernate};
disconnecting({post, Msg}, State) ->
{next_state, sending, buffer(Msg, State), ?HIBERNATE_TIMEOUT};
disconnecting({timeout, TRef, ?CLOSE_TIMEOUT_MSG}, State=#state{close_tref=TRef}) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_close_timeout "
"state=disconnecting", log_info(State, [])),
{next_state, disconnecting, State};
disconnecting(timeout, S = #state{}) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, disconnecting, S, hibernate};
disconnecting(Msg, State) ->
?WARN("drain_id=~p channel_id=~s dest=~s err=unexpected_info "
"data=~p state=disconnecting", log_info(State, [Msg])),
{next_state, disconnecting, State, ?HIBERNATE_TIMEOUT}.
@private
state_name(Event , _ From , State ) - >
? ~p ] Unexpected event ~p " ,
{ next_state , state_name , State } .
@private
handle_event(_Event, StateName, State) ->
{next_state, StateName, State, ?HIBERNATE_TIMEOUT}.
@private
handle_sync_event({set_target_send_size, Size}, _From, StateName,
State = #state{})
when is_integer(Size), Size > 0 ->
put(target_send_size, Size),
{reply, {ok, Size}, StateName, State, ?HIBERNATE_TIMEOUT};
handle_sync_event({resize_msg_buffer, NewSize}, _From, StateName,
State = #state{buf = Buf})
when is_integer(NewSize), NewSize > 0 ->
NewBuf = logplex_msg_buffer:resize(NewSize, Buf),
{reply, ok, StateName, State#state{buf = NewBuf}, ?HIBERNATE_TIMEOUT};
handle_sync_event(Event, _From, StateName, State) ->
?WARN("[state ~p] Unexpected event ~p",
[StateName, Event]),
{next_state, StateName, State, ?HIBERNATE_TIMEOUT}.
@private
handle_info({tcp, Sock, Data}, StateName,
State = #state{sock = Sock}) ->
?WARN("drain_id=~p channel_id=~s dest=~s state=~p "
"err=unexpected_peer_data data=~p",
log_info(State, [StateName, Data])),
{next_state, StateName, State, ?HIBERNATE_TIMEOUT};
handle_info({tcp_error, Sock, Reason}, StateName,
State = #state{sock = Sock}) ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [StateName, Reason, Sock, duration(State)])),
reconnect(tcp_bad(State));
handle_info({inet_reply, Sock, {error, Reason}}, StateName,
State = #state{sock = Sock}) ->
?ERR("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [StateName, Reason, Sock, duration(State)])),
reconnect(tcp_bad(State));
handle_info({ssl_closed, Sock}, StateName,
State = #state{sock = Sock}) ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [StateName, closed, Sock, duration(State)])),
reconnect(tcp_bad(State));
handle_info(shutdown, StateName, State0 = #state{sock = ?SSL_SOCKET}) ->
case send(State0) of
{next_state, ready_to_send, State1} ->
catch ssl:close(State1#state.sock),
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State1, [StateName, shutdown, State1#state.sock, duration(State1)])),
{stop, {shutdown,call}, State1#state{sock = undefined}};
{next_state, sending, State1} ->
handle_info(shutdown, StateName, State1)
end;
handle_info(shutdown, StateName, State) ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p duration=~s",
log_info(State, [StateName, shutdown, duration(State)])),
{stop, {shutdown,call}, State};
handle_info({timeout, TRef, idle_timeout}, StateName, State) ->
apply(?MODULE, StateName, [{timeout, TRef, ?CLOSE_TIMEOUT_MSG}, State]);
handle_info(timeout, StateName, State) ->
Sleep when inactive , trigger fullsweep GC & Compact
{next_state, StateName, State, hibernate};
handle_info(Info, StateName, State) ->
?MODULE:StateName(Info, State).
@private
terminate(Reason, StateName, State) ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"at=terminate reason=~p",
log_info(State, [StateName, Reason])),
ok.
@private
code_change(_OldVsn, StateName, State, _Extra) ->
{ok, StateName, State, ?HIBERNATE_TIMEOUT}.
@private
@doc Time has finally come to reconnect . Attempt the reconnection ,
-spec do_reconnect(#state{}) ->
{next_state, pstate(), #state{}}.
do_reconnect(State = #state{sock = undefined,
reconnect_tref = undefined,
buf=Buf,
failures = Failures}) ->
case connect(State) of
{ok, Sock} ->
?INFO("drain_id=~p channel_id=~s dest=~s "
"state=disconnected at=connect try=~p sock=~p",
log_info(State, [Failures + 1, Sock])),
NewState = State#state{sock=Sock,
reconnect_tref = undefined,
send_tref = undefined,
buf = maybe_resize(Buf),
connect_time=os:timestamp()},
send(start_close_timer(NewState));
{error, Reason} ->
NewState = tcp_bad(State),
case Failures of
0 ->
first failure .
ok;
_ ->
handle_error(Reason, State),
?ERR("drain_id=~p channel_id=~s dest=~s at=connect "
"err=gen_tcp data=~p try=~p last_success=~s "
"state=disconnected",
log_info(State, [Reason, NewState#state.failures,
time_failed(NewState)]))
end,
reconnect(NewState)
end.
handle_error({tls_alert, Alert}, #state{ channel_id=ChannelID, uri=URI, drain_tok=DrainToken }) ->
logplex_message:process_error(ChannelID, DrainToken, ?L14, "error=\"~s\" uri=\"~s\"", [Alert, logplex_drain:uri_to_binary(URI)]);
handle_error(_, _) ->
ok.
@private
connect(#state{sock = undefined, channel_id=ChannelID, drain_id=DrainID, uri=Dest, host=Host, port=Port})
when is_integer(Port), 0 < Port, Port =< 65535 ->
SendTimeoutS = logplex_app:config(tcp_syslog_send_timeout_secs),
TLSOpts = logplex_tls:connect_opts(ChannelID, DrainID, Dest),
SocketOpts = socket_opts(),
ssl:connect(Host, Port, TLSOpts ++ SocketOpts,
timer:seconds(SendTimeoutS));
connect(#state{}) ->
{error, bogus_port_number}.
socket_opts() ->
[binary
,{active, true}
,{exit_on_close, true}
,{keepalive, true}
,{packet, raw}
,{reuseaddr, true}].
-spec reconnect(#state{}) -> {next_state, pstate(), #state{}}.
@private
reconnect(State = #state{reconnect_tref = Ref}) when is_reference(Ref) ->
Reconnect timer was set
case erlang:read_timer(Ref) of
false ->
reconnect(State#state{reconnect_tref=undefined});
_ ->
{next_state, disconnected, State, ?HIBERNATE_TIMEOUT}
end;
reconnect(State = #state{failures = 0, last_good_time=undefined}) ->
First reconnect ever
do_reconnect(State);
reconnect(State = #state{failures = 0, last_good_time=T})
when is_tuple(T), tuple_size(T) =:= 3 ->
Min = logplex_app:config(tcp_syslog_reconnect_min, 30),
SecsSinceConnect = timer:now_diff(os:timestamp(), T) div 1000000,
case SecsSinceConnect of
TooFew when TooFew < Min ->
{next_state, disconnected,
reconnect_in(timer:seconds(Min), State), hibernate};
_EnoughTime ->
do_reconnect(State)
end;
reconnect(State = #state{failures = F}) ->
Max = logplex_app:config(tcp_syslog_backoff_max, 300),
BackOff = case length(integer_to_list(Max, 2)) of
MaxExp when F > MaxExp -> Max;
_ -> 1 bsl F
end,
NewBuf = maybe_shrink(State),
{next_state, disconnected,
reconnect_in(timer:seconds(BackOff), State#state{buf=NewBuf}),
hibernate}.
reconnect_in(MS, State = #state{}) ->
Ref = erlang:start_timer(MS, self(), ?RECONNECT_MSG),
State#state{reconnect_tref = Ref}.
@private
tcp_good(State = #state{}) ->
State#state{last_good_time = os:timestamp(),
failures = 0}.
@private
tcp_bad(State = #state{send_tref=TRef}) when is_reference(TRef) ->
cancel_timeout(TRef, ?SEND_TIMEOUT_MSG),
tcp_bad(State#state{send_tref = undefined});
tcp_bad(State = #state{sock = Sock}) when Sock =/= undefined ->
catch ssl:close(Sock),
tcp_bad(State#state{sock = undefined});
tcp_bad(State = #state{sock = undefined,
failures = F}) ->
State#state{failures = F + 1}.
-spec time_failed(#state{}) -> iolist().
@private
time_failed(State = #state{}) ->
time_failed(os:timestamp(), State).
time_failed(Now, #state{last_good_time=T0})
when is_tuple(T0) ->
integer_to_list(timer:now_diff(Now, T0) div 1000000);
time_failed(_, #state{last_good_time=undefined}) ->
"".
@private
log_info(#state{drain_id=DrainId, channel_id=ChannelId, uri=Uri}, Rest)
when is_list(Rest) ->
[DrainId, ChannelId, logplex_drain:uri_to_binary(Uri) | Rest].
-spec msg_stat('drain_dropped' | 'drain_buffered' | 'drain_delivered' |
'requests_sent',
non_neg_integer(), #state{}) -> any().
msg_stat(Key, N,
#state{drain_id=DrainId, channel_id=ChannelId}) ->
logplex_stats:incr(#drain_stat{drain_id=DrainId,
drain_type=tlssyslog,
channel_id=ChannelId,
key=Key}, N).
-spec duration(#state{}) -> iolist().
duration(#state{connect_time=undefined}) ->
"undefined";
duration(#state{connect_time=T0}) ->
US = timer:now_diff(os:timestamp(), T0),
io_lib:format("~f", [US / 1000000]).
buffer_status(State = # state{buf = Buf } ) - >
-spec buffer(any(), #state{}) -> #state{}.
@private
buffer(Msg, State = #state{buf = Buf}) ->
{Result, NewBuf} = logplex_msg_buffer:push_ext(Msg, Buf),
msg_stat(drain_buffered, 1, State),
case Result of
displace ->
msg_stat(drain_dropped, 1, State),
logplex_realtime:incr('drain.dropped');
insert -> ok
end,
State#state{buf=NewBuf}.
@private
-spec send(#state{}) -> {next_state, 'sending' | 'ready_to_send', #state{}}.
send(State = #state{buf = Buf, sock = Sock,
drain_tok = DrainTok}) ->
case logplex_msg_buffer:empty(Buf) of
empty ->
{next_state, ready_to_send, State};
not_empty ->
PktSize = target_send_size(),
{Data, N, NewBuf} =
buffer_to_pkts(Buf, PktSize, DrainTok),
try
ssl : send({sslsocket , _ , Pid } , Data )
{ ' DOWN ' , Ref , _ , _ , noconnection } - >
{ ' DOWN ' , Ref , _ , _ , Reason } - >
{sslsocket, _, Pid} = Sock,
MRef = erlang:monitor(process, Pid),
erlang:send(Pid,
{'$gen_sync_all_state_event',
{self(), MRef},
{application_data, iolist_to_binary(Data)}}),
TRef = erlang:start_timer(?SEND_TIMEOUT, self(),
?SEND_TIMEOUT_MSG),
msg_stat(drain_delivered, N, State),
logplex_realtime:incr('drain.delivered', N),
{next_state, sending,
State#state{buf=NewBuf, send_tref=TRef, send_mref=MRef}}
msg_stat(drain_dropped , N , State ) ,
msg_stat(drain_delivered , N , State ) ,
catch
error:badarg ->
?INFO("drain_id=~p channel_id=~s dest=~s state=~p "
"err=gen_tcp data=~p sock=~p duration=~s",
log_info(State, [send, closed, Sock,
duration(State)])),
reconnect(tcp_bad(State))
end
end.
cancel_timeout(undefined, _Msg) -> undefined;
cancel_timeout(Ref, Msg)
when is_reference(Ref) ->
case erlang:cancel_timer(Ref) of
false ->
receive
{timeout, Ref, Msg} -> undefined
after 0 -> undefined
end;
_Time ->
Timer did n't fire , so no message to worry about
undefined
end.
start_close_timer(State=#state{close_tref = CloseTRef}) ->
cancel_timeout(CloseTRef, ?CLOSE_TIMEOUT_MSG),
MaxIdle = logplex_app:config(tcp_syslog_idle_timeout, timer:minutes(5)),
Fuzz = random:uniform(logplex_app:config(tcp_syslog_idle_fuzz, 15000)),
NewTimer = erlang:start_timer(MaxIdle + Fuzz, self(), ?CLOSE_TIMEOUT_MSG),
State#state{close_tref = NewTimer}.
compare_point(#state{last_good_time=undefined, connect_time=ConnectTime}) ->
ConnectTime;
compare_point(#state{last_good_time=LastGood}) ->
LastGood.
connection_idle(State) ->
MaxIdle = logplex_app:config(tcp_syslog_idle_timeout, timer:minutes(5)),
SinceLastGoodMicros = timer:now_diff(os:timestamp(), compare_point(State)),
SinceLastGoodMicros > (MaxIdle * 1000).
close_if_idle(State = #state{sock = Sock}) ->
case connection_idle(State) of
true ->
?INFO("drain_id=~p channel_id=~s dest=~s at=idle_timeout",
log_info(State, [])),
ssl:close(Sock),
{closed, State#state{sock=undefined}};
_ ->
{not_closed, State}
end.
connection_too_old(#state{connect_time = ConnectTime}) ->
MaxTotal = logplex_app:config(tcp_syslog_max_ttl, timer:hours(5)),
SinceConnectMicros = timer:now_diff(os:timestamp(), ConnectTime),
SinceConnectMicros > (MaxTotal * 1000).
close(State = #state{sock = undefined}) ->
State;
close(State = #state{sock = Sock}) ->
ssl:close(Sock),
State#state{sock=undefined}.
close_if_old(State) ->
case connection_too_old(State) of
true ->
?INFO("drain_id=~p channel_id=~s dest=~s at=max_ttl",
log_info(State, [])),
{closed, close(State)};
_ ->
{not_closed, start_close_timer(State)}
end.
buffer_to_pkts(Buf, BytesRemaining, DrainTok) ->
logplex_msg_buffer:to_pkts(Buf, BytesRemaining,
pkt_fmt(DrainTok)).
pkt_fmt(DrainTok) ->
Frame = fun (Msg) ->
SyslogMsg = logplex_syslog_utils:to_msg(Msg, DrainTok),
logplex_syslog_utils:frame(SyslogMsg)
end,
fun ({loss_indication, N, When}) ->
case logplex_app:config(tcp_syslog_send_loss_msg) of
dont_send ->
skip;
_ ->
{frame,
Frame(logplex_syslog_utils:overflow_msg(N, When))}
end;
({msg, MData}) ->
{frame, Frame(MData)}
end.
target_send_size() ->
case get(target_send_size) of
Size when is_integer(Size),
Size > 0 ->
Size;
_ ->
logplex_app:config(tcp_drain_target_bytes,
?TARGET_SEND_SIZE)
end.
maybe_resize(Buf) ->
Default = default_buf_size(),
case logplex_msg_buffer:max_size(Buf) < Default of
true -> logplex_msg_buffer:resize(Default, Buf);
false -> Buf
end.
maybe_shrink(#state{ failures=Tries, buf=Buf }=State) ->
Max = logplex_msg_buffer:max_size(Buf),
case Max =:= ?SHRINK_BUF_SIZE of
true ->
Buf;
false ->
IsFull = full =:= logplex_msg_buffer:full(Buf),
NumLost = logplex_msg_buffer:lost(Buf),
ShrinkAfter = logplex_app:config(tcp_syslog_shrink_after, ?DEFAULT_SHRINK_TRIES),
?INFO("drain_id=~p channel_id=~s dest=~s at=maybe_shrink "
"is_full=~p num_lost=~p tries=~p shrink_after=~p",
log_info(State, [IsFull, NumLost, Tries, ShrinkAfter])),
case IsFull andalso NumLost > 0 andalso Tries > ShrinkAfter of
true ->
logplex_msg_buffer:resize(?SHRINK_BUF_SIZE, Buf);
false ->
Buf
end
end.
default_buf_size() -> logplex_app:config(tcp_drain_buffer_size, 1024).
|
4cdd4d46751822500afecfb766831ebc999c1c9094d82ed8519929e34efcd5e8 | slipstream/SlipStreamServer | metering.clj | (ns sixsq.slipstream.metering.metering
"Core functions that copy a set of resource documents into 'metering'
documents."
(:require
[clj-time.core :as time]
[clojure.core.async :as async]
[clojure.string :as str]
[clojure.tools.logging :as log]
[qbits.spandex :as spandex]
[sixsq.slipstream.metering.utils :as utils]))
(def ^:const metering-resource-uri "")
;;
per Year = ANN , per Month = MON , per Week = WEE , per Day = DAY , per Hour = HUR , per Minute = MIN , per Second = SEC .
(def ^:const price-divisor {"SEC" (/ 1. 60), "MIN" 1, "HUR" 60, "GiBh" 60, "MiBh" 60, "DAY" (* 60 24), "WEE" (* 60 24 7)})
(def ^:const quantity-divisor {"GiBh" (* 1024 1024), "MiBh" 1024})
(def ^:const doc-type "_doc")
(defn es-hosts
[host port]
[(format ":%s" host port)])
(defn index-action [index type]
{:index {:_index index, :_type type}})
(defn search-url [index type]
(str/join "/" [index type "_search"]))
(defn search-urls [indices types]
(map #(search-url %1 %2) indices types))
(defn process-options
[{:keys [es-host es-port
vm-index
bucky-index
metering-index
metering-period-minutes]
:or {es-host "127.0.0.1"
es-port 9200
vm-index "slipstream-virtual-machine"
bucky-index "slipstream-storage-bucket"
metering-index "slipstream-metering"
metering-period-minutes 1}}]
{:hosts (es-hosts es-host es-port)
:resource-search-urls (search-urls [vm-index bucky-index] [doc-type doc-type])
:metering-action (index-action metering-index doc-type)
:metering-period-minutes metering-period-minutes})
(defn assoc-snapshot-time
[timestamp m]
(assoc m :snapshot-time timestamp))
(defn quantity
[{:keys [usageInKiB] :as resource}]
(let [billingUnit (when usageInKiB (-> resource
:serviceOffer
:price:billingUnit))]
(if usageInKiB (/ usageInKiB (get quantity-divisor billingUnit (* 1024 1024))) 1)))
(defn add-unitCode
[{:keys [price:unitCode] :as serviceOffer}]
(if price:unitCode
serviceOffer
(assoc serviceOffer
:price:unitCode
(or (:price:billingUnit serviceOffer)
(:price:billingUnitCode serviceOffer)))))
TODO : quantization for hour period , i.e apply the full hour price to first minute then zero for the rest of the hour
(defn assoc-price
[{:keys [serviceOffer] :as m}]
(let [so (when (and serviceOffer (map? serviceOffer)) (add-unitCode serviceOffer))
price-map (when (:price:unitCost so)
(some->> so
:price:unitCode
(get price-divisor)
(/ (:price:unitCost serviceOffer))
(* (quantity m))
(assoc {} :price)))]
(merge m price-map)))
(defn assoc-type
[{{resource-type :resource:type} :serviceOffer :as m}]
(if resource-type (assoc m :resource:type resource-type) m))
(defn update-id
[timestamp {:keys [id] :as m}]
(let [uuid (second (str/split (or id (utils/random-uuid)) #"/"))
ts (str/replace timestamp #"[:\.]" "-")
new-id (str "metering/" uuid "-" ts)]
(assoc m :id new-id)))
(defn replace-resource-uri
[m]
(assoc m :resourceURI metering-resource-uri))
(defn complete-index-action
"Add the :_id key to the index action so that the Elasticsearch :_id key is
consistent with the CIMI resourceID. The :_type key should already be
present in the index-action parameter."
[index-action {:keys [id] :as v}]
(let [action (first (keys index-action))
args (first (vals index-action))
uuid (second (str/split id #"/"))]
[{action (assoc args :_id uuid)} v]))
(defn create-actions
"work on a subset of documents returned by the global query search"
[timestamp index-action page]
(->> page
:body
:hits
:hits
(map :_source)
(map (partial assoc-snapshot-time timestamp))
(map assoc-price)
(map assoc-type)
(map (partial update-id timestamp))
(map replace-resource-uri)
(map (partial complete-index-action index-action))))
(defn bulk-insert
"Start the bulk insert for the provided actions/documents. A channel which
will hold the results is returned."
[client actions]
(let [{:keys [input-ch output-ch]} (spandex/bulk-chan client {:flush-threshold 100
:flush-interval 1000
:max-concurrent-requests 3})]
(when (pos? (count actions))
(doseq [action actions]
(async/put! input-ch action)))
(async/close! input-ch)
output-ch))
(defn response-stats
[resp]
(if (instance? Throwable resp)
(do
(log/error resp)
[0 {}])
(let [[job responses] resp
n (count job)
freq (frequencies (->> responses
:body
:items
(map :index)
(map :status)))]
[n freq])))
(defn merge-stats
[& stats]
[(reduce + 0 (map first stats))
(or (apply merge-with + (map second stats)) {})])
(defn handle-results
[ch]
(let [results (loop [stats [0 {}]]
(if-let [resp (async/<!! ch)]
(let [resp-stats (response-stats resp)]
(recur (merge-stats stats resp-stats)))
stats))]
(log/debug "bulk insert stats:" results)
results))
(defn- meter-resource
[hosts resource-search-url metering-action]
(async/go
(with-open [client (spandex/client {:hosts hosts})]
(let [timestamp (str (time/now))
ch (spandex/scroll-chan client
{:url resource-search-url
:body {:query {:match_all {}}}})]
(log/info "start metering snapshot" timestamp "from" resource-search-url)
(let [[total freq] (loop [stats [0 {}]]
(if-let [page (async/<! ch)]
(let [resp-stats (if (instance? Throwable page)
(do
(log/error "scroll result exception: " page)
[0 {}])
(->> page
(create-actions timestamp metering-action)
(bulk-insert client)
handle-results))]
(recur (merge-stats stats resp-stats)))
stats))]
(let [treated (reduce + (vals freq))
created (get freq 201 0)
stats [total treated created]
msg (str "finish metering snapshot " timestamp
" from " resource-search-url
" - " stats)]
(if (apply not= stats)
(log/error msg)
(log/info msg))
stats))))))
(defn meter-resources
[hosts resource-search-urls metering-action]
(doall (map #(meter-resource hosts % metering-action) resource-search-urls)))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/metering/src/sixsq/slipstream/metering/metering.clj | clojure | (ns sixsq.slipstream.metering.metering
"Core functions that copy a set of resource documents into 'metering'
documents."
(:require
[clj-time.core :as time]
[clojure.core.async :as async]
[clojure.string :as str]
[clojure.tools.logging :as log]
[qbits.spandex :as spandex]
[sixsq.slipstream.metering.utils :as utils]))
(def ^:const metering-resource-uri "")
per Year = ANN , per Month = MON , per Week = WEE , per Day = DAY , per Hour = HUR , per Minute = MIN , per Second = SEC .
(def ^:const price-divisor {"SEC" (/ 1. 60), "MIN" 1, "HUR" 60, "GiBh" 60, "MiBh" 60, "DAY" (* 60 24), "WEE" (* 60 24 7)})
(def ^:const quantity-divisor {"GiBh" (* 1024 1024), "MiBh" 1024})
(def ^:const doc-type "_doc")
(defn es-hosts
[host port]
[(format ":%s" host port)])
(defn index-action [index type]
{:index {:_index index, :_type type}})
(defn search-url [index type]
(str/join "/" [index type "_search"]))
(defn search-urls [indices types]
(map #(search-url %1 %2) indices types))
(defn process-options
[{:keys [es-host es-port
vm-index
bucky-index
metering-index
metering-period-minutes]
:or {es-host "127.0.0.1"
es-port 9200
vm-index "slipstream-virtual-machine"
bucky-index "slipstream-storage-bucket"
metering-index "slipstream-metering"
metering-period-minutes 1}}]
{:hosts (es-hosts es-host es-port)
:resource-search-urls (search-urls [vm-index bucky-index] [doc-type doc-type])
:metering-action (index-action metering-index doc-type)
:metering-period-minutes metering-period-minutes})
(defn assoc-snapshot-time
[timestamp m]
(assoc m :snapshot-time timestamp))
(defn quantity
[{:keys [usageInKiB] :as resource}]
(let [billingUnit (when usageInKiB (-> resource
:serviceOffer
:price:billingUnit))]
(if usageInKiB (/ usageInKiB (get quantity-divisor billingUnit (* 1024 1024))) 1)))
(defn add-unitCode
[{:keys [price:unitCode] :as serviceOffer}]
(if price:unitCode
serviceOffer
(assoc serviceOffer
:price:unitCode
(or (:price:billingUnit serviceOffer)
(:price:billingUnitCode serviceOffer)))))
TODO : quantization for hour period , i.e apply the full hour price to first minute then zero for the rest of the hour
(defn assoc-price
[{:keys [serviceOffer] :as m}]
(let [so (when (and serviceOffer (map? serviceOffer)) (add-unitCode serviceOffer))
price-map (when (:price:unitCost so)
(some->> so
:price:unitCode
(get price-divisor)
(/ (:price:unitCost serviceOffer))
(* (quantity m))
(assoc {} :price)))]
(merge m price-map)))
(defn assoc-type
[{{resource-type :resource:type} :serviceOffer :as m}]
(if resource-type (assoc m :resource:type resource-type) m))
(defn update-id
[timestamp {:keys [id] :as m}]
(let [uuid (second (str/split (or id (utils/random-uuid)) #"/"))
ts (str/replace timestamp #"[:\.]" "-")
new-id (str "metering/" uuid "-" ts)]
(assoc m :id new-id)))
(defn replace-resource-uri
[m]
(assoc m :resourceURI metering-resource-uri))
(defn complete-index-action
"Add the :_id key to the index action so that the Elasticsearch :_id key is
consistent with the CIMI resourceID. The :_type key should already be
present in the index-action parameter."
[index-action {:keys [id] :as v}]
(let [action (first (keys index-action))
args (first (vals index-action))
uuid (second (str/split id #"/"))]
[{action (assoc args :_id uuid)} v]))
(defn create-actions
"work on a subset of documents returned by the global query search"
[timestamp index-action page]
(->> page
:body
:hits
:hits
(map :_source)
(map (partial assoc-snapshot-time timestamp))
(map assoc-price)
(map assoc-type)
(map (partial update-id timestamp))
(map replace-resource-uri)
(map (partial complete-index-action index-action))))
(defn bulk-insert
"Start the bulk insert for the provided actions/documents. A channel which
will hold the results is returned."
[client actions]
(let [{:keys [input-ch output-ch]} (spandex/bulk-chan client {:flush-threshold 100
:flush-interval 1000
:max-concurrent-requests 3})]
(when (pos? (count actions))
(doseq [action actions]
(async/put! input-ch action)))
(async/close! input-ch)
output-ch))
(defn response-stats
[resp]
(if (instance? Throwable resp)
(do
(log/error resp)
[0 {}])
(let [[job responses] resp
n (count job)
freq (frequencies (->> responses
:body
:items
(map :index)
(map :status)))]
[n freq])))
(defn merge-stats
[& stats]
[(reduce + 0 (map first stats))
(or (apply merge-with + (map second stats)) {})])
(defn handle-results
[ch]
(let [results (loop [stats [0 {}]]
(if-let [resp (async/<!! ch)]
(let [resp-stats (response-stats resp)]
(recur (merge-stats stats resp-stats)))
stats))]
(log/debug "bulk insert stats:" results)
results))
(defn- meter-resource
[hosts resource-search-url metering-action]
(async/go
(with-open [client (spandex/client {:hosts hosts})]
(let [timestamp (str (time/now))
ch (spandex/scroll-chan client
{:url resource-search-url
:body {:query {:match_all {}}}})]
(log/info "start metering snapshot" timestamp "from" resource-search-url)
(let [[total freq] (loop [stats [0 {}]]
(if-let [page (async/<! ch)]
(let [resp-stats (if (instance? Throwable page)
(do
(log/error "scroll result exception: " page)
[0 {}])
(->> page
(create-actions timestamp metering-action)
(bulk-insert client)
handle-results))]
(recur (merge-stats stats resp-stats)))
stats))]
(let [treated (reduce + (vals freq))
created (get freq 201 0)
stats [total treated created]
msg (str "finish metering snapshot " timestamp
" from " resource-search-url
" - " stats)]
(if (apply not= stats)
(log/error msg)
(log/info msg))
stats))))))
(defn meter-resources
[hosts resource-search-urls metering-action]
(doall (map #(meter-resource hosts % metering-action) resource-search-urls)))
|
|
4ec4df4713dbb0cd3f9d2d58d656f52249e0c8548ab226a5aaf4c0f4c27d9d52 | coccinelle/coccinelle | bytearray.mli | (***************************************************************************)
Copyright 1999 - 2010 ,
(* *)
(* This library is free software: you can redistribute it and/or modify *)
(* it under the terms of the GNU Lesser General Public License as *)
published by the Free Software Foundation , either version 2 of the
(* License, or (at your option) any later version. A special linking *)
exception to the GNU Lesser General Public License applies to this
(* library, see the LICENSE file for more information. *)
(***************************************************************************)
type t =
(char, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t
type tf =
(float, Bigarray.float64_elt, Bigarray.c_layout) Bigarray.Array1.t
val create : int -> t
val length : t -> int
(*
val to_string : t -> string
*)
val of_string : string -> t
val mmap_of_string : Unix.file_descr -> string -> t
val to_floatarray : tf -> int -> float array
val to_this_floatarray : float array -> tf -> int -> float array
val of_floatarray : float array -> tf
(*
val sub : t -> int -> int -> string
*)
val blit_from_string : string -> int -> t -> int -> int -> unit
(*
val blit_to_bytes : t -> int -> bytes -> int -> int -> unit
*)
val prefix : t -> t -> int -> bool
val marshal : 'a -> Marshal.extern_flags list -> t
val unmarshal : t -> int -> 'a
val marshal_to_buffer : t -> int -> 'a -> Marshal.extern_flags list -> int
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/c0452be88e2670e82d8f69a345ae1f3dbabe048a/bundles/parmap/parmap/src/bytearray.mli | ocaml | *************************************************************************
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
License, or (at your option) any later version. A special linking
library, see the LICENSE file for more information.
*************************************************************************
val to_string : t -> string
val sub : t -> int -> int -> string
val blit_to_bytes : t -> int -> bytes -> int -> int -> unit
| Copyright 1999 - 2010 ,
published by the Free Software Foundation , either version 2 of the
exception to the GNU Lesser General Public License applies to this
type t =
(char, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t
type tf =
(float, Bigarray.float64_elt, Bigarray.c_layout) Bigarray.Array1.t
val create : int -> t
val length : t -> int
val of_string : string -> t
val mmap_of_string : Unix.file_descr -> string -> t
val to_floatarray : tf -> int -> float array
val to_this_floatarray : float array -> tf -> int -> float array
val of_floatarray : float array -> tf
val blit_from_string : string -> int -> t -> int -> int -> unit
val prefix : t -> t -> int -> bool
val marshal : 'a -> Marshal.extern_flags list -> t
val unmarshal : t -> int -> 'a
val marshal_to_buffer : t -> int -> 'a -> Marshal.extern_flags list -> int
|
fd420315c990265936d6a93f21c2cd4bd320dad08d5af32a13334f8866e45b7b | caiorss/Functional-Programming | HighestClose.hs |
import qualified Data.ByteString.Lazy.Char8 as L
closing = readPrice . (!!4) . L.split ','
readPrice :: L.ByteString -> Maybe Int
readPrice str =
case L.readInt str of
Nothing -> Nothing
Just (dollars,rest) ->
case L.readInt (L.tail rest) of
Nothing -> Nothing
Just (cents,more) ->
Just (dollars * 100 + cents)
highestClose = maximum . (Nothing:) . map closing . L.lines
highestCloseFrom path = do
contents <- L.readFile path
print (highestClose contents)
| null | https://raw.githubusercontent.com/caiorss/Functional-Programming/ef3526898e3014e9c99bf495033ff36a4530503d/haskell/rwh/ch08/HighestClose.hs | haskell |
import qualified Data.ByteString.Lazy.Char8 as L
closing = readPrice . (!!4) . L.split ','
readPrice :: L.ByteString -> Maybe Int
readPrice str =
case L.readInt str of
Nothing -> Nothing
Just (dollars,rest) ->
case L.readInt (L.tail rest) of
Nothing -> Nothing
Just (cents,more) ->
Just (dollars * 100 + cents)
highestClose = maximum . (Nothing:) . map closing . L.lines
highestCloseFrom path = do
contents <- L.readFile path
print (highestClose contents)
|
|
9ca20808b24c7df202a80e4ae77656fa3b3b02b9d8cfa0a9f7d6e971f6b2f27c | mirage/ocaml-fsevents | bindings.ml |
* Copyright ( c ) 2015 < >
* Copyright ( c ) 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2015 David Sheets <>
* Copyright (c) 2014 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
open Ctypes
let (|||) = Int32.logor
let (??>) flag int32 = if flag then int32 else 0_l
let (??<) field int32 = Int32.logand field int32 <> 0_l
module T = Types.C(Types_detected)
module C(F: Cstubs.FOREIGN) = struct
type t = unit ptr
(* typedef struct __FSEventStream* FSEventStreamRef; *)
let typ : t typ = typedef (ptr void) "FSEventStreamRef"
let const_typ : t typ = typedef typ "ConstFSEventStreamRef"
module CreateFlags = struct
open T.CreateFlags
type t = {
use_cf_types : bool;
no_defer : bool;
watch_root : bool;
ignore_self : bool;
file_events : bool;
mark_self : bool;
}
let empty = {
use_cf_types = false;
no_defer = false;
watch_root = false;
ignore_self = false;
file_events = false;
mark_self = false;
}
let detailed_interactive = {
use_cf_types = false;
no_defer = true;
watch_root = true;
ignore_self = false;
file_events = true;
mark_self = false;
}
let use_cf_types_i = Unsigned.UInt32.to_int32 use_cf_types
let no_defer_i = Unsigned.UInt32.to_int32 no_defer
let watch_root_i = Unsigned.UInt32.to_int32 watch_root
let ignore_self_i = Unsigned.UInt32.to_int32 ignore_self
let file_events_i = Unsigned.UInt32.to_int32 file_events
let mark_self_i = Unsigned.UInt32.to_int32 mark_self
let to_uint32 {
use_cf_types;
no_defer;
watch_root;
ignore_self;
file_events;
mark_self;
} = Unsigned.UInt32.of_int32 (
(??> use_cf_types use_cf_types_i) |||
(??> no_defer no_defer_i) |||
(??> watch_root watch_root_i) |||
(??> ignore_self ignore_self_i) |||
(??> file_events file_events_i) |||
(??> mark_self mark_self_i)
)
let of_uint32 i =
let i = Unsigned.UInt32.to_int32 i in
{
use_cf_types = ??< i use_cf_types_i;
no_defer = ??< i no_defer_i;
watch_root = ??< i watch_root_i;
ignore_self = ??< i ignore_self_i;
file_events = ??< i file_events_i;
mark_self = ??< i mark_self_i;
}
let typ = view ~read:of_uint32 ~write:to_uint32 t
end
module EventFlags = struct
open T.EventFlags
type dropping_party = {
user : bool;
kernel: bool;
}
type item_type = File | Symlink | Dir | Hardlink
type t = {
must_scan_subdirs : dropping_party option;
event_ids_wrapped : bool;
history_done : bool;
root_changed : bool;
mount : bool;
unmount : bool;
own_event : bool;
item_created : bool;
item_removed : bool;
item_inode_meta_mod : bool;
item_renamed : bool;
item_modified : bool;
item_finder_info_mod : bool;
item_change_owner : bool;
item_xattr_mod : bool;
item_type : item_type option;
item_is_last_hardlink: bool;
}
let must_scan_subdirs_i = Unsigned.UInt32.to_int32 must_scan_subdirs
let user_dropped_i = Unsigned.UInt32.to_int32 user_dropped
let kernel_dropped_i = Unsigned.UInt32.to_int32 kernel_dropped
let event_ids_wrapped_i = Unsigned.UInt32.to_int32 event_ids_wrapped
let history_done_i = Unsigned.UInt32.to_int32 history_done
let root_changed_i = Unsigned.UInt32.to_int32 root_changed
let mount_i = Unsigned.UInt32.to_int32 mount
let unmount_i = Unsigned.UInt32.to_int32 unmount
let own_event_i = Unsigned.UInt32.to_int32 own_event
let item_created_i = Unsigned.UInt32.to_int32 item_created
let item_removed_i = Unsigned.UInt32.to_int32 item_removed
let item_inode_meta_mod_i = Unsigned.UInt32.to_int32 item_inode_meta_mod
let item_renamed_i = Unsigned.UInt32.to_int32 item_renamed
let item_modified_i = Unsigned.UInt32.to_int32 item_modified
let item_finder_info_mod_i = Unsigned.UInt32.to_int32 item_finder_info_mod
let item_change_owner_i = Unsigned.UInt32.to_int32 item_change_owner
let item_xattr_mod_i = Unsigned.UInt32.to_int32 item_xattr_mod
let item_is_file_i = Unsigned.UInt32.to_int32 item_is_file
let item_is_dir_i = Unsigned.UInt32.to_int32 item_is_dir
let item_is_symlink_i = Unsigned.UInt32.to_int32 item_is_symlink
let item_is_hardlink_i = Unsigned.UInt32.to_int32 item_is_hardlink
let item_is_last_hardlink_i= Unsigned.UInt32.to_int32 item_is_last_hardlink
let to_uint32 {
must_scan_subdirs;
event_ids_wrapped;
history_done;
root_changed;
mount;
unmount;
own_event;
item_created;
item_removed;
item_inode_meta_mod;
item_renamed;
item_modified;
item_finder_info_mod;
item_change_owner;
item_xattr_mod;
item_type;
item_is_last_hardlink;
} = Unsigned.UInt32.of_int32 (
(match must_scan_subdirs with
| None -> 0_l
| Some { user; kernel } ->
must_scan_subdirs_i |||
(??> user user_dropped_i) |||
(??> kernel kernel_dropped_i)
) |||
(??> event_ids_wrapped event_ids_wrapped_i) |||
(??> history_done history_done_i) |||
(??> root_changed root_changed_i) |||
(??> mount mount_i) |||
(??> unmount unmount_i) |||
(??> own_event own_event_i) |||
(??> item_created item_created_i) |||
(??> item_removed item_removed_i) |||
(??> item_inode_meta_mod item_inode_meta_mod_i) |||
(??> item_renamed item_renamed_i) |||
(??> item_modified item_modified_i) |||
(??> item_finder_info_mod item_finder_info_mod_i) |||
(??> item_change_owner item_change_owner_i) |||
(??> item_xattr_mod item_xattr_mod_i) |||
(match item_type with
| None -> 0_l
| Some File -> item_is_file_i
| Some Dir -> item_is_dir_i
| Some Symlink -> item_is_symlink_i
| Some Hardlink-> item_is_hardlink_i
) |||
(??> item_is_last_hardlink item_is_last_hardlink_i)
)
let must_scan_subdirs_of_uint32 i =
if ??< i must_scan_subdirs_i
then Some {
user = ??< i user_dropped_i;
kernel = ??< i kernel_dropped_i;
} else None
let item_type_of_uint32 i =
if ??< i item_is_file_i
then Some File
else if ??< i item_is_dir_i
then Some Dir
else if ??< i item_is_symlink_i
then Some Symlink
else if ??< i item_is_hardlink_i
then Some Hardlink
else None
let of_uint32 i =
let i = Unsigned.UInt32.to_int32 i in
{
must_scan_subdirs = must_scan_subdirs_of_uint32 i;
event_ids_wrapped = ??< i event_ids_wrapped_i;
history_done = ??< i history_done_i;
root_changed = ??< i root_changed_i;
mount = ??< i mount_i;
unmount = ??< i unmount_i;
own_event = ??< i own_event_i;
item_created = ??< i item_created_i;
item_removed = ??< i item_removed_i;
item_inode_meta_mod = ??< i item_inode_meta_mod_i;
item_renamed = ??< i item_renamed_i;
item_modified = ??< i item_modified_i;
item_finder_info_mod = ??< i item_finder_info_mod_i;
item_change_owner = ??< i item_change_owner_i;
item_xattr_mod = ??< i item_xattr_mod_i;
item_type = item_type_of_uint32 i;
item_is_last_hardlink = ??< i item_is_last_hardlink_i;
}
let typ = view ~read:of_uint32 ~write:to_uint32 t
end
module EventId = struct
type t =
| Now
| Since of Unsigned.UInt64.t
let of_uint64 i =
if i = T.EventId.since_now
then Now
else Since i
let to_uint64 = function
| Now -> T.EventId.since_now
| Since i -> i
let typ = view ~read:of_uint64 ~write:to_uint64 T.EventId.t
end
module Callback = struct
type t = string -> EventFlags.t -> EventId.t -> unit
let void_string_typ = view
~read:(coerce (ptr void) (ptr string))
~write:(coerce (ptr string) (ptr void))
(ptr void)
typedef void ( * FSEventStreamCallback ) (
ConstFSEventStreamRef streamRef ,
void * clientCallBackInfo ,
size_t ,
void * eventPaths ,
const [ ] ,
const FSEventStreamEventId eventIds [ ] ) ;
ConstFSEventStreamRef streamRef,
void *clientCallBackInfo,
size_t numEvents,
void *eventPaths,
const FSEventStreamEventFlags eventFlags[],
const FSEventStreamEventId eventIds[]);
*)
let cstring_typ =
Foreign.funptr ~runtime_lock:true ~name:"FSEventStreamCallback" (
const_typ @->
ptr void @->
size_t @->
void_string_typ @->
typedef (ptr EventFlags.typ) "const FSEventStreamEventFlags *" @->
typedef (ptr T.EventId.t) "const FSEventStreamEventId *" @->
returning void
)
let to_cstring_typ fn _stream _info num_events paths flags ids =
let n = Unsigned.Size_t.to_int num_events in
let paths = CArray.from_ptr paths n in
let flags = CArray.from_ptr flags n in
let ids = CArray.from_ptr ids n in
for i = 0 to n - 1 do
let id = EventId.of_uint64 (CArray.get ids i) in
fn (CArray.get paths i) (CArray.get flags i) id
done
end
module Context = struct
type 'a t = {
version : int;
info : 'a;
retain : Cf.Allocator.retain_callback_t;
release : Cf.Allocator.release_callback_t;
copy_description : Cf.Allocator.copy_description_callback_t;
}
let typ = typedef (ptr void) "FSEventStreamContext"
end
module PathList = Cf.Array.List.Make(Cf.String.String)
extern FSEventStreamRef FSEventStreamCreate (
CFAllocatorRef allocator ,
FSEventStreamCallback callback ,
FSEventStreamContext * context ,
CFArrayRef pathsToWatch ,
FSEventStreamEventId sinceWhen ,
CFTimeInterval latency ,
flags
) ;
CFAllocatorRef allocator,
FSEventStreamCallback callback,
FSEventStreamContext *context,
CFArrayRef pathsToWatch,
FSEventStreamEventId sinceWhen,
CFTimeInterval latency,
FSEventStreamCreateFlags flags
); *)
let create = F.(foreign "FSEventStreamCreate" (
ptr_opt void @->
Callback.cstring_typ @->
ptr_opt Context.typ @->
PathList.typ @->
EventId.typ @->
Cf.TimeInterval.typ @->
CreateFlags.typ @->
returning typ
))
(* extern FSEventStreamEventId FSEventStreamGetLatestEventId(
ConstFSEventStreamRef streamRef
); *)
let get_latest_event_id = F.(foreign "FSEventStreamGetLatestEventId" (
typ @-> returning EventId.typ
))
extern void FSEventStreamScheduleWithRunLoop (
FSEventStreamRef streamRef ,
CFRunLoopRef runLoop ,
CFStringRef runLoopMode
) ;
FSEventStreamRef streamRef,
CFRunLoopRef runLoop,
CFStringRef runLoopMode
); *)
let schedule_with_run_loop = F.(foreign "FSEventStreamScheduleWithRunLoop" (
typ @->
Cf.RunLoop.typ @->
Cf.RunLoop.Mode.typ @->
returning void
))
extern Boolean FSEventStreamStart (
FSEventStreamRef streamRef
) ;
FSEventStreamRef streamRef
); *)
let start = F.(foreign "FSEventStreamStart" (
typ @-> returning bool
))
(* extern void FSEventStreamFlushSync(
FSEventStreamRef streamRef
); *)
let flush_sync = F.(foreign "FSEventStreamFlushSync" (
typ @-> returning void
))
(* extern void FSEventStreamStop(
FSEventStreamRef streamRef
); *)
let stop = F.(foreign "FSEventStreamStop" (
typ @-> returning void
))
(* extern void FSEventStreamInvalidate(
FSEventStreamRef streamRef
); *)
let invalidate = F.(foreign "FSEventStreamInvalidate" (
typ @-> returning void
))
extern void FSEventStreamRelease (
FSEventStreamRef streamRef
) ;
FSEventStreamRef streamRef
); *)
let release = F.(foreign "FSEventStreamRelease" (
typ @-> returning void
))
extern CF_RETURNS_RETAINED (
ConstFSEventStreamRef streamRef
) ;
ConstFSEventStreamRef streamRef
); *)
let copy_paths_being_watched =
F.(foreign "FSEventStreamCopyPathsBeingWatched" (
const_typ @-> returning PathList.typ
))
end
| null | https://raw.githubusercontent.com/mirage/ocaml-fsevents/5c6abdcc20ce284d88003ce3300e637f7514e5e7/lib_gen/bindings.ml | ocaml | typedef struct __FSEventStream* FSEventStreamRef;
extern FSEventStreamEventId FSEventStreamGetLatestEventId(
ConstFSEventStreamRef streamRef
);
extern void FSEventStreamFlushSync(
FSEventStreamRef streamRef
);
extern void FSEventStreamStop(
FSEventStreamRef streamRef
);
extern void FSEventStreamInvalidate(
FSEventStreamRef streamRef
); |
* Copyright ( c ) 2015 < >
* Copyright ( c ) 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2015 David Sheets <>
* Copyright (c) 2014 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
open Ctypes
let (|||) = Int32.logor
let (??>) flag int32 = if flag then int32 else 0_l
let (??<) field int32 = Int32.logand field int32 <> 0_l
module T = Types.C(Types_detected)
module C(F: Cstubs.FOREIGN) = struct
type t = unit ptr
let typ : t typ = typedef (ptr void) "FSEventStreamRef"
let const_typ : t typ = typedef typ "ConstFSEventStreamRef"
module CreateFlags = struct
open T.CreateFlags
type t = {
use_cf_types : bool;
no_defer : bool;
watch_root : bool;
ignore_self : bool;
file_events : bool;
mark_self : bool;
}
let empty = {
use_cf_types = false;
no_defer = false;
watch_root = false;
ignore_self = false;
file_events = false;
mark_self = false;
}
let detailed_interactive = {
use_cf_types = false;
no_defer = true;
watch_root = true;
ignore_self = false;
file_events = true;
mark_self = false;
}
let use_cf_types_i = Unsigned.UInt32.to_int32 use_cf_types
let no_defer_i = Unsigned.UInt32.to_int32 no_defer
let watch_root_i = Unsigned.UInt32.to_int32 watch_root
let ignore_self_i = Unsigned.UInt32.to_int32 ignore_self
let file_events_i = Unsigned.UInt32.to_int32 file_events
let mark_self_i = Unsigned.UInt32.to_int32 mark_self
let to_uint32 {
use_cf_types;
no_defer;
watch_root;
ignore_self;
file_events;
mark_self;
} = Unsigned.UInt32.of_int32 (
(??> use_cf_types use_cf_types_i) |||
(??> no_defer no_defer_i) |||
(??> watch_root watch_root_i) |||
(??> ignore_self ignore_self_i) |||
(??> file_events file_events_i) |||
(??> mark_self mark_self_i)
)
let of_uint32 i =
let i = Unsigned.UInt32.to_int32 i in
{
use_cf_types = ??< i use_cf_types_i;
no_defer = ??< i no_defer_i;
watch_root = ??< i watch_root_i;
ignore_self = ??< i ignore_self_i;
file_events = ??< i file_events_i;
mark_self = ??< i mark_self_i;
}
let typ = view ~read:of_uint32 ~write:to_uint32 t
end
module EventFlags = struct
open T.EventFlags
type dropping_party = {
user : bool;
kernel: bool;
}
type item_type = File | Symlink | Dir | Hardlink
type t = {
must_scan_subdirs : dropping_party option;
event_ids_wrapped : bool;
history_done : bool;
root_changed : bool;
mount : bool;
unmount : bool;
own_event : bool;
item_created : bool;
item_removed : bool;
item_inode_meta_mod : bool;
item_renamed : bool;
item_modified : bool;
item_finder_info_mod : bool;
item_change_owner : bool;
item_xattr_mod : bool;
item_type : item_type option;
item_is_last_hardlink: bool;
}
let must_scan_subdirs_i = Unsigned.UInt32.to_int32 must_scan_subdirs
let user_dropped_i = Unsigned.UInt32.to_int32 user_dropped
let kernel_dropped_i = Unsigned.UInt32.to_int32 kernel_dropped
let event_ids_wrapped_i = Unsigned.UInt32.to_int32 event_ids_wrapped
let history_done_i = Unsigned.UInt32.to_int32 history_done
let root_changed_i = Unsigned.UInt32.to_int32 root_changed
let mount_i = Unsigned.UInt32.to_int32 mount
let unmount_i = Unsigned.UInt32.to_int32 unmount
let own_event_i = Unsigned.UInt32.to_int32 own_event
let item_created_i = Unsigned.UInt32.to_int32 item_created
let item_removed_i = Unsigned.UInt32.to_int32 item_removed
let item_inode_meta_mod_i = Unsigned.UInt32.to_int32 item_inode_meta_mod
let item_renamed_i = Unsigned.UInt32.to_int32 item_renamed
let item_modified_i = Unsigned.UInt32.to_int32 item_modified
let item_finder_info_mod_i = Unsigned.UInt32.to_int32 item_finder_info_mod
let item_change_owner_i = Unsigned.UInt32.to_int32 item_change_owner
let item_xattr_mod_i = Unsigned.UInt32.to_int32 item_xattr_mod
let item_is_file_i = Unsigned.UInt32.to_int32 item_is_file
let item_is_dir_i = Unsigned.UInt32.to_int32 item_is_dir
let item_is_symlink_i = Unsigned.UInt32.to_int32 item_is_symlink
let item_is_hardlink_i = Unsigned.UInt32.to_int32 item_is_hardlink
let item_is_last_hardlink_i= Unsigned.UInt32.to_int32 item_is_last_hardlink
let to_uint32 {
must_scan_subdirs;
event_ids_wrapped;
history_done;
root_changed;
mount;
unmount;
own_event;
item_created;
item_removed;
item_inode_meta_mod;
item_renamed;
item_modified;
item_finder_info_mod;
item_change_owner;
item_xattr_mod;
item_type;
item_is_last_hardlink;
} = Unsigned.UInt32.of_int32 (
(match must_scan_subdirs with
| None -> 0_l
| Some { user; kernel } ->
must_scan_subdirs_i |||
(??> user user_dropped_i) |||
(??> kernel kernel_dropped_i)
) |||
(??> event_ids_wrapped event_ids_wrapped_i) |||
(??> history_done history_done_i) |||
(??> root_changed root_changed_i) |||
(??> mount mount_i) |||
(??> unmount unmount_i) |||
(??> own_event own_event_i) |||
(??> item_created item_created_i) |||
(??> item_removed item_removed_i) |||
(??> item_inode_meta_mod item_inode_meta_mod_i) |||
(??> item_renamed item_renamed_i) |||
(??> item_modified item_modified_i) |||
(??> item_finder_info_mod item_finder_info_mod_i) |||
(??> item_change_owner item_change_owner_i) |||
(??> item_xattr_mod item_xattr_mod_i) |||
(match item_type with
| None -> 0_l
| Some File -> item_is_file_i
| Some Dir -> item_is_dir_i
| Some Symlink -> item_is_symlink_i
| Some Hardlink-> item_is_hardlink_i
) |||
(??> item_is_last_hardlink item_is_last_hardlink_i)
)
let must_scan_subdirs_of_uint32 i =
if ??< i must_scan_subdirs_i
then Some {
user = ??< i user_dropped_i;
kernel = ??< i kernel_dropped_i;
} else None
let item_type_of_uint32 i =
if ??< i item_is_file_i
then Some File
else if ??< i item_is_dir_i
then Some Dir
else if ??< i item_is_symlink_i
then Some Symlink
else if ??< i item_is_hardlink_i
then Some Hardlink
else None
let of_uint32 i =
let i = Unsigned.UInt32.to_int32 i in
{
must_scan_subdirs = must_scan_subdirs_of_uint32 i;
event_ids_wrapped = ??< i event_ids_wrapped_i;
history_done = ??< i history_done_i;
root_changed = ??< i root_changed_i;
mount = ??< i mount_i;
unmount = ??< i unmount_i;
own_event = ??< i own_event_i;
item_created = ??< i item_created_i;
item_removed = ??< i item_removed_i;
item_inode_meta_mod = ??< i item_inode_meta_mod_i;
item_renamed = ??< i item_renamed_i;
item_modified = ??< i item_modified_i;
item_finder_info_mod = ??< i item_finder_info_mod_i;
item_change_owner = ??< i item_change_owner_i;
item_xattr_mod = ??< i item_xattr_mod_i;
item_type = item_type_of_uint32 i;
item_is_last_hardlink = ??< i item_is_last_hardlink_i;
}
let typ = view ~read:of_uint32 ~write:to_uint32 t
end
module EventId = struct
type t =
| Now
| Since of Unsigned.UInt64.t
let of_uint64 i =
if i = T.EventId.since_now
then Now
else Since i
let to_uint64 = function
| Now -> T.EventId.since_now
| Since i -> i
let typ = view ~read:of_uint64 ~write:to_uint64 T.EventId.t
end
module Callback = struct
type t = string -> EventFlags.t -> EventId.t -> unit
let void_string_typ = view
~read:(coerce (ptr void) (ptr string))
~write:(coerce (ptr string) (ptr void))
(ptr void)
typedef void ( * FSEventStreamCallback ) (
ConstFSEventStreamRef streamRef ,
void * clientCallBackInfo ,
size_t ,
void * eventPaths ,
const [ ] ,
const FSEventStreamEventId eventIds [ ] ) ;
ConstFSEventStreamRef streamRef,
void *clientCallBackInfo,
size_t numEvents,
void *eventPaths,
const FSEventStreamEventFlags eventFlags[],
const FSEventStreamEventId eventIds[]);
*)
let cstring_typ =
Foreign.funptr ~runtime_lock:true ~name:"FSEventStreamCallback" (
const_typ @->
ptr void @->
size_t @->
void_string_typ @->
typedef (ptr EventFlags.typ) "const FSEventStreamEventFlags *" @->
typedef (ptr T.EventId.t) "const FSEventStreamEventId *" @->
returning void
)
let to_cstring_typ fn _stream _info num_events paths flags ids =
let n = Unsigned.Size_t.to_int num_events in
let paths = CArray.from_ptr paths n in
let flags = CArray.from_ptr flags n in
let ids = CArray.from_ptr ids n in
for i = 0 to n - 1 do
let id = EventId.of_uint64 (CArray.get ids i) in
fn (CArray.get paths i) (CArray.get flags i) id
done
end
module Context = struct
type 'a t = {
version : int;
info : 'a;
retain : Cf.Allocator.retain_callback_t;
release : Cf.Allocator.release_callback_t;
copy_description : Cf.Allocator.copy_description_callback_t;
}
let typ = typedef (ptr void) "FSEventStreamContext"
end
module PathList = Cf.Array.List.Make(Cf.String.String)
extern FSEventStreamRef FSEventStreamCreate (
CFAllocatorRef allocator ,
FSEventStreamCallback callback ,
FSEventStreamContext * context ,
CFArrayRef pathsToWatch ,
FSEventStreamEventId sinceWhen ,
CFTimeInterval latency ,
flags
) ;
CFAllocatorRef allocator,
FSEventStreamCallback callback,
FSEventStreamContext *context,
CFArrayRef pathsToWatch,
FSEventStreamEventId sinceWhen,
CFTimeInterval latency,
FSEventStreamCreateFlags flags
); *)
let create = F.(foreign "FSEventStreamCreate" (
ptr_opt void @->
Callback.cstring_typ @->
ptr_opt Context.typ @->
PathList.typ @->
EventId.typ @->
Cf.TimeInterval.typ @->
CreateFlags.typ @->
returning typ
))
let get_latest_event_id = F.(foreign "FSEventStreamGetLatestEventId" (
typ @-> returning EventId.typ
))
extern void FSEventStreamScheduleWithRunLoop (
FSEventStreamRef streamRef ,
CFRunLoopRef runLoop ,
CFStringRef runLoopMode
) ;
FSEventStreamRef streamRef,
CFRunLoopRef runLoop,
CFStringRef runLoopMode
); *)
let schedule_with_run_loop = F.(foreign "FSEventStreamScheduleWithRunLoop" (
typ @->
Cf.RunLoop.typ @->
Cf.RunLoop.Mode.typ @->
returning void
))
extern Boolean FSEventStreamStart (
FSEventStreamRef streamRef
) ;
FSEventStreamRef streamRef
); *)
let start = F.(foreign "FSEventStreamStart" (
typ @-> returning bool
))
let flush_sync = F.(foreign "FSEventStreamFlushSync" (
typ @-> returning void
))
let stop = F.(foreign "FSEventStreamStop" (
typ @-> returning void
))
let invalidate = F.(foreign "FSEventStreamInvalidate" (
typ @-> returning void
))
extern void FSEventStreamRelease (
FSEventStreamRef streamRef
) ;
FSEventStreamRef streamRef
); *)
let release = F.(foreign "FSEventStreamRelease" (
typ @-> returning void
))
extern CF_RETURNS_RETAINED (
ConstFSEventStreamRef streamRef
) ;
ConstFSEventStreamRef streamRef
); *)
let copy_paths_being_watched =
F.(foreign "FSEventStreamCopyPathsBeingWatched" (
const_typ @-> returning PathList.typ
))
end
|
c28cb2c26d5d15f42e16f360b60cfa91914c6d6a476b6536ddbd485d8935e2e1 | csabahruska/jhc-components | DataConstructors.hs | Generated by DrIFT ( Automatic class derivations for )
# LINE 1 " src / DataConstructors.hs " #
{-# LANGUAGE OverloadedStrings #-}
module DataConstructors(
AliasType(..),
boxPrimitive,
collectDeriving,
conSlots,
constructionExpression,
Constructor(..),
DataFamily(..),
DataTable(..),
DataTableMonad(..),
dataTablePrims,
deconstructionExpression,
deriveClasses,
extractIO,
extractIO',
extractPrimitive,
ExtTypeInfo(..),
extTypeInfoExtType,
followAlias,
followAliases,
getConstructor,
getConstructorArities,
getProduct,
getSiblings,
lookupExtTypeInfo,
mktBox,
modBox,
numberSiblings,
onlyChild,
pprintTypeOfCons,
primitiveAliases,
removeNewtypes,
samplePrimitiveDataTable,
showDataTable,
Slot(..),
slotTypes,
slotTypesHs,
tAbsurd,
toDataTable,
typesCompatable,
updateLit
) where
import Control.Monad.Identity
import Control.Monad.Writer(tell,execWriter)
import Data.Maybe
import Data.Monoid hiding(getProduct)
import List(sortBy)
import qualified Data.Map as Map hiding(map)
import qualified Data.Set as Set hiding(map)
import C.Prims
import Data.Binary
import Doc.DocLike as D
import Doc.PPrint
import Doc.Pretty
import E.Binary()
import E.E
import E.Show
import E.Subst
import E.Traverse
import E.TypeCheck
import E.Values
import FrontEnd.Class(instanceName)
import FrontEnd.HsSyn
import FrontEnd.SrcLoc
import FrontEnd.Syn.Traverse
import FrontEnd.Tc.Type
import GenUtil
import Info.Types
import Name.Id
import Name.Name as Name
import Name.Names
import Name.VConsts
import PackedString
import Support.CanType
import Support.FreeVars
import Support.MapBinaryInstance
import Support.Unparse
import Util.HasSize
import Util.SameShape
import Util.SetLike as S
import Util.VarName
import qualified Cmm.Op as Op
import qualified Util.Graph as G
import qualified Util.Seq as Seq
tipe' (TAp t1 t2) = liftM2 eAp (tipe' t1) (tipe' t2)
tipe' (TArrow t1 t2) = do
t1' <- tipe' t1
t2' <- tipe' t2
return $ EPi (tVr emptyId (t1')) t2'
tipe' (TCon (Tycon n k)) | Just n' <- Map.lookup n primitiveAliases = return $ ELit litCons { litName = n', litType = kind k }
tipe' (TCon (Tycon n k)) = return $ ELit litCons { litName = n, litType = kind k }
tipe' (TVar tv@Tyvar { tyvarKind = k}) = do
v <- lookupName tv
return $ EVar $ tVr v (kind k)
tipe' (TForAll [] (_ :=> t)) = tipe' t
tipe' (TExists [] (_ :=> t)) = tipe' t
tipe' (TForAll xs (_ :=> t)) = do
xs' <- flip mapM xs $ \tv -> do
v <- newName (map anonymous [35 .. ]) () tv
return $ tVr v (kind $ tyvarKind tv)
t' <- tipe' t
return $ foldr EPi t' xs' -- [ tVr n (kind k) | n <- [2,4..] | k <- xs ]
tipe' ~(TExists xs (_ :=> t)) = do
xs' <- flip mapM xs $ \tv -> do
v < - newName [ 70,72 .. ] ( ) tv
--return $ tVr v (kind $ tyvarKind tv)
return $ (kind $ tyvarKind tv)
t' <- tipe' t
return $ ELit litCons { litName = name_UnboxedTupleConstructor typeLevel (length xs' + 1), litArgs = (t':xs'), litType = eHash }
kind (KBase KUTuple) = eHash
kind (KBase KHash) = eHash
kind (KBase Star) = eStar
kind (KBase (KNamed t)) = ESort (ESortNamed t)
kind (Kfun k1 k2) = EPi (tVr emptyId (kind k1)) (kind k2)
kind k = error $ "DataConstructors.kind: cannot convert " ++ show k
data AliasType = ErasedAlias | RecursiveAlias
deriving(Eq,Ord,Show)
{-! derive: Binary !-}
-- these apply to types
data DataFamily =
DataAbstract -- abstract internal type, has children of representation unknown and irrelevant.
| DataNone -- children don't apply. data constructor for instance
| DataPrimitive -- primitive type, children are all numbers.
| DataEnum {-# UNPACK #-} !Int -- bounded integral type, argument is maximum number
| DataNormal [Name] -- child constructors
| DataAlias !AliasType
deriving(Eq,Ord,Show)
{-! derive: Binary !-}
-- | Record describing a data type.
-- * is also a data type containing the type constructors, which are unlifted, yet boxed.
data Constructor = Constructor {
conName :: Name, -- name of constructor
conType :: E, -- type of constructor
conExpr :: E, -- expression which constructs this value
conOrigSlots :: [Slot], -- original slots
what constructor it inhabits , similar to , but not quite .
conVirtual :: Maybe [Name], -- whether this is a virtual constructor that translates into an enum and its siblings
conChildren :: DataFamily,
conCTYPE :: Maybe ExtType -- external type
} deriving(Show)
{-! derive: Binary !-}
data Slot =
SlotNormal E
| SlotUnpacked E !Name [E]
| SlotExistential TVr
deriving(Eq,Ord,Show)
{-! derive: Binary !-}
mapESlot f (SlotExistential t) = SlotExistential t { tvrType = f (tvrType t) }
mapESlot f (SlotNormal e) = SlotNormal $ f e
mapESlot f (SlotUnpacked e n es) = SlotUnpacked (f e) n (map f es)
conSlots s = getSlots $ conOrigSlots s
getSlots ss = concatMap f ss where
f (SlotNormal e) = [e]
f (SlotUnpacked _ _ es) = es
f (SlotExistential e) = [tvrType e]
getHsSlots ss = map f ss where
f (SlotNormal e) = e
f (SlotUnpacked e _ es) = e
f (SlotExistential e) = tvrType e
newtype DataTable = DataTable (Map.Map Name Constructor)
deriving(Monoid)
instance Binary DataTable where
put (DataTable dt) = putMap dt
get = fmap DataTable getMap
emptyConstructor = Constructor {
conName = error "emptyConstructor.conName",
conType = Unknown,
conOrigSlots = [],
conExpr = Unknown,
conInhabits = error "emptyConstructor.conInhabits",
conVirtual = Nothing,
conCTYPE = Nothing,
conChildren = DataNone
}
instance HasSize DataTable where
size (DataTable d) = Map.size d
{-# NOINLINE getConstructor #-}
getConstructor :: Monad m => Name -> DataTable -> m Constructor
getConstructor n _ | isJust me = return (emptyConstructor {
conName = n, conType = e,
conExpr = foldr ELam (foldl eAp (mktBox e) (map EVar tvrs)) tvrs,
conInhabits = s_Star, conOrigSlots = map SlotNormal sts }) where
sts = map tvrType ss
tvrs = [ tvr { tvrIdent = i , tvrType = t } | i <- anonymousIds | t <- sts ]
(_,ss) = fromPi e
me@(~(Just e)) = fromConjured modBox n `mplus` fromConjured modAbsurd n
getConstructor n _ | RawType <- nameType n = return $ primitiveConstructor n
getConstructor n _ | Just (level,arity) <- fromName_UnboxedTupleConstructor n = return $ if level == termLevel then snd $ tunboxedtuple arity else fst $ tunboxedtuple arity
n _ | Just v < - fromUnboxedNameTuple n , DataConstructor < - nameType n = return $ snd $ tunboxedtuple v
n _ | Just v < - fromUnboxedNameTuple n , TypeConstructor < - nameType n = return $ fst $ tunboxedtuple v
getConstructor n (DataTable map) = case Map.lookup n map of
Just x -> return x
Nothing -> fail $ "getConstructor: " ++ show (nameType n,n)
-- | return the single constructor of product types
getProduct :: Monad m => DataTable -> E -> m Constructor
getProduct dataTable e | (ELit LitCons { litName = cn }) <-
followAliases dataTable e, Just c <- getConstructor cn dataTable = f c where
f c | DataNormal [x] <- conChildren c = getConstructor x dataTable
| otherwise = fail "Not Product type"
getProduct _ _ = fail "Not Product type"
tunboxedtuple :: Int -> (Constructor,Constructor)
tunboxedtuple n = (typeCons,dataCons) where
dataCons = emptyConstructor {
conName = dc,
conType = dtipe,
conOrigSlots = map (SlotNormal . EVar) typeVars,
conExpr = foldr ($) (ELit litCons
{ litName = dc
, litArgs = map EVar vars
, litType = ftipe
}) (map ELam vars),
conInhabits = tc
}
typeCons = emptyConstructor {
conName = tc,
conType = foldr EPi eHash (replicate n tvr { tvrType = eStar }),
conOrigSlots = replicate n (SlotNormal eStar),
conExpr = tipe,
conInhabits = s_Hash,
conChildren = DataNormal [dc]
}
dc = name_UnboxedTupleConstructor termLevel n
tc = name_UnboxedTupleConstructor typeLevel n
tipe = foldr ELam ftipe typeVars
typeVars = take n [ tvr { tvrType = eStar, tvrIdent = v } | v <- anonymousIds ]
vars = [ tvr { tvrType = EVar t, tvrIdent = v } | v <- map anonymous [ n + 8, n + 9 ..] | t <- typeVars ]
ftipe = ELit (litCons { litName = tc, litArgs = map EVar typeVars, litType = eHash })
dtipe = foldr EPi (foldr EPi ftipe [ v { tvrIdent = emptyId } | v <- vars]) typeVars
-- | conjured data types, these data types are created as needed and can be of any type, their
-- actual type is encoded in their names.
--
-- Absurd - this is a type that it used to default otherwise unconstrained
-- types, it is not special in any particular way but is just an arbitrary type
-- to give to things.
--
-- Box - this type can be used to represent any boxed values. It is considered
-- equivalent to all boxed values so is not a very precise type. It is used in
-- the final stages of compilation before core mangling so that optimizations
-- that were previously blocked by type variables can be carried out.
tAbsurd k = ELit (litCons {
litName = nameConjured modAbsurd k, litArgs = [], litType = k })
mktBox k = ELit (litCons {
litName = nameConjured modBox k, litArgs = [],
litType = k, litAliasFor = af }) where
af = case k of
EPi TVr { tvrType = t1 } t2 -> Just (ELam tvr { tvrType = t1 } (mktBox t2))
_ -> Nothing
tarrow = emptyConstructor {
conName = tc_Arrow,
conType = EPi (tVr emptyId eStar) (EPi (tVr emptyId eStar) eStar),
conOrigSlots = [SlotNormal eStar,SlotNormal eStar],
conExpr = ELam (tVr va1 eStar) (ELam (tVr va2 eStar) (EPi (tVr emptyId (EVar $ tVr va1 eStar)) (EVar $ tVr va2 eStar))),
conInhabits = s_Star,
conChildren = DataAbstract
}
primitiveConstructor name = emptyConstructor {
conName = name,
conType = eHash,
conExpr = ELit (litCons { litName = name, litArgs = [], litType = eHash }),
conInhabits = s_Hash,
conChildren = DataPrimitive
}
sortName :: ESort -> Name
sortName s = f s where
f EStar = s_Star -- ^ the sort of boxed lazy types
f EBang = s_Bang -- ^ the sort of boxed strict types
f EHash = s_Hash -- ^ the sort of unboxed types
f ETuple = s_Tuple -- ^ the sort of unboxed tuples
f EHashHash = s_HashHash -- ^ the supersort of unboxed types
f EStarStar = s_StarStar -- ^ the supersort of boxed types
f (ESortNamed n) = n -- ^ user defined sorts
sortConstructor name ss = emptyConstructor {
conName = name,
conType = ESort ss,
conExpr = ESort (ESortNamed name),
conInhabits = sortName ss
}
typesCompatable :: forall m . Monad m => E -> E -> m ()
typesCompatable a b = f etherealIds a b where
f :: [Id] -> E -> E -> m ()
f _ (ESort a) (ESort b) = when (a /= b) $ fail $ "Sorts don't match: " ++ pprint (ESort a,ESort b)
f _ (EVar a) (EVar b) = when (a /= b) $ fail $ "Vars don't match: " ++ pprint (a,b)
we expand aliases first , because the newtype might have phantom types as arguments
f c (ELit (LitCons { litAliasFor = Just af, litArgs = as })) b = do
f c (foldl eAp af as) b
f c a (ELit (LitCons { litAliasFor = Just af, litArgs = as })) = do
f c a (foldl eAp af as)
f c (ELit LitCons { litName = n, litArgs = xs, litType = t }) (ELit LitCons { litName = n', litArgs = xs', litType = t' }) | n == n' = do
f c t t'
when (not $ sameShape1 xs xs') $ fail "Arg lists don't match"
zipWithM_ (f c) xs xs'
f c (EAp a b) (EAp a' b') = do
f c a a'
f c b b'
f c (ELam va ea) (ELam vb eb) = lam va ea vb eb c
f c (EPi va ea) (EPi vb eb) = lam va ea vb eb c
f c (EPi (TVr { tvrIdent = eid, tvrType = a}) b) (ELit (LitCons { litName = n, litArgs = [a',b'], litType = t })) | eid == emptyId, conName tarrow == n, t == eStar = do
f c a a'
f c b b'
f c (ELit (LitCons { litName = n, litArgs = [a',b'], litType = t })) (EPi (TVr { tvrIdent = eid, tvrType = a}) b) | eid == emptyId, conName tarrow == n, t == eStar = do
f c a a'
f c b b'
f _ a b | boxCompat a b || boxCompat b a = return ()
f _ a b = fail $ "Types don't match:" ++ pprint (a,b)
lam :: TVr -> E -> TVr -> E -> [Id] -> m ()
lam va ea vb eb ~(c:cs) = do
f (c:cs) (tvrType va) (tvrType vb)
f cs (subst va (EVar va { tvrIdent = c }) ea) (subst vb (EVar vb { tvrIdent = c }) eb)
boxCompat (ELit (LitCons { litName = n })) t | Just e <- fromConjured modBox n = e == getType t
boxCompat _ _ = False
extractPrimitive :: Monad m => DataTable -> E -> m (E,(ExtType,E))
extractPrimitive dataTable e = case followAliases dataTable (getType e) of
st@(ELit LitCons { litName = c, litArgs = [], litType = t })
| t == eHash -> return (e,(ExtType (packString $show c),st))
| otherwise -> do
Constructor { conChildren = DataNormal [cn] } <- getConstructor c dataTable
Constructor { conOrigSlots = [SlotNormal st] } <- getConstructor cn dataTable
(ELit LitCons { litName = n, litArgs = []}) <- return $ followAliases dataTable st
let tvra = tVr vn st
(vn:_) = newIds (freeIds e)
return (eCase e [Alt (litCons { litName = cn, litArgs = [tvra],
litType = (getType e) }) (EVar tvra)] Unknown,(ExtType (packString $ show n),st))
e' -> fail $ "extractPrimitive: " ++ show (e,e')
boxPrimitive ::
Monad m
=> DataTable
-> E -- primitive to box
-> E -- what type we want it to have
-> m (E,(ExtType,E))
boxPrimitive dataTable e et = case followAliases dataTable et of
st@(ELit LitCons { litName = c, litArgs = [], litType = t })
| t == eHash -> return (e,(ExtType . packString $ show c,st))
| otherwise -> do
Constructor { conChildren = DataNormal [cn] } <- getConstructor c dataTable
Constructor { conOrigSlots = [SlotNormal st] } <- getConstructor cn dataTable
(ELit LitCons { litName = n, litArgs = []}) <- return $ followAliases dataTable st
let tvra = tVr vn st
(vn:_) = newIds (freeVars (e,et))
if isManifestAtomic e then
return $ (ELit litCons { litName = cn, litArgs = [e], litType = et },(ExtType . packString $ show n,st))
else
return $ (eStrictLet tvra e $ ELit litCons { litName = cn, litArgs = [EVar tvra], litType = et },(ExtType . packString $ show n,st))
e' -> fail $ "boxPrimitive: " ++ show (e,e')
extractIO :: Monad m => E -> m E
extractIO e = f e where
f (ELit LitCons { litName = c, litArgs = [x] }) | c == tc_IO = return x
f (ELit LitCons { litAliasFor = Just af, litArgs = as }) = f (foldl eAp af as)
f _ = fail "extractIO: not an IO type"
extract IO or an unboxed version of it , ( ST , World - > ( , a # ) )
extractIO' :: E -> ([E],Bool,E)
extractIO' e = f e [] where
f (ELit LitCons { litName = c, litArgs = [x] }) rs | c == tc_IO = (reverse rs, True,x)
f (ELit LitCons { litName = c, litArgs = [_,x] }) rs | c == tc_ST = (reverse rs, True,x)
f (expandAlias -> Just t) rs = f t rs
f (fromPi -> (fromUnboxedTuple -> Just [s',x],[getType -> s''])) rs
| isState_ s' && isState_ s'' = (reverse rs, True,x)
f (EPi v e) rs = f e (getType v:rs)
f e rs = (reverse rs, False,e)
-- f (fromPi -> (getType -> s',[getType -> s''])) | isState_ s' && isState_ s'' = (True,tUnit)
data ExtTypeInfo
= ExtTypeVoid -- maps to 'void'
| ExtTypeRaw ExtType -- value is an unboxed type suitable for passing with the argument calling convention
boxed type , name is constructor of box , E is type of the slice , and ExtType is the calling convention to use
extTypeInfoExtType (ExtTypeRaw et) = et
extTypeInfoExtType (ExtTypeBoxed _ _ et) = et
extTypeInfoExtType ExtTypeVoid = "void"
lookupExtTypeInfo :: Monad m => DataTable -> E -> m ExtTypeInfo
lookupExtTypeInfo dataTable oe = f Set.empty oe where
f :: Monad m => Set.Set Name -> E -> m ExtTypeInfo
handle the void context ones first
f _ e@(ELit LitCons { litName = c }) | c == tc_Unit || c == tc_State_ = return ExtTypeVoid
-- if the constructor is in the external type map, replace its external
-- type with the one in the map
f seen e@(ELit LitCons { litName = c, litArgs = [ta] }) | c == tc_Ptr = do
we know a pointer is a boxed BitsPtr
case f seen ta of
Just (ExtTypeBoxed _ _ (ExtType et)) -> return $ ExtTypeBoxed b t (ExtType $ et `mappend` "*")
Just (ExtTypeRaw (ExtType et)) -> return $ ExtTypeBoxed b t (ExtType $ et `mappend` "*")
_ -> return $ ExtTypeBoxed b t "HsPtr"
f seen e@(ELit LitCons { litName = c, litArgs = [ta] }) | c == tc_Complex = do
case f seen ta of
Just (ExtTypeRaw (ExtType et)) -> return $ ExtTypeRaw (ExtType $ "_Complex " `mappend` et)
_ -> fail "invalid _Complex type"
f seen e@(ELit LitCons { litName = c }) | Just (conCTYPE -> Just et) <- getConstructor c dataTable = do
return $ case g seen e of
Just (ExtTypeBoxed b t _) -> ExtTypeBoxed b t et
Just ExtTypeVoid -> ExtTypeVoid
_ -> ExtTypeRaw et
f seen e = g seen e
-- if we are a raw type, we can be foreigned
g _ (ELit LitCons { litName = c })
| Just et <- Map.lookup c rawExtTypeMap = return (ExtTypeRaw et)
-- if we are a single constructor data type with a single foreignable unboxed
-- slot, we are foreiginable
g _ (ELit LitCons { litName = c, litAliasFor = Nothing })
| Just Constructor { conChildren = DataNormal [cn] } <- getConstructor c dataTable,
Just Constructor { conOrigSlots = [SlotNormal st] } <- getConstructor cn dataTable,
Just (ExtTypeRaw et) <- lookupExtTypeInfo dataTable st = return $ ExtTypeBoxed cn st et
g seen e@(ELit LitCons { litName = n }) | Just e' <- followAlias dataTable e,
n `Set.notMember` seen = f (Set.insert n seen) e'
g _ e = fail $ "lookupExtTypeInfo: " ++ show (oe,e)
expandAlias :: Monad m => E -> m E
expandAlias (ELit LitCons { litAliasFor = Just af, litArgs = as }) = return (foldl eAp af as)
expandAlias _ = fail "expandAlias: not alias"
followAlias :: Monad m => DataTable -> E -> m E
followAlias _ (ELit LitCons { litAliasFor = Just af, litArgs = as }) = return (foldl eAp af as)
followAlias _ _ = fail "followAlias: not alias"
followAliases :: DataTable -> E -> E
followAliases _dataTable e = f e where
f (ELit LitCons { litAliasFor = Just af, litArgs = as }) = f (foldl eAp af as)
f e = e
dataTablePrims = DataTable $ Map.fromList ([ (conName x,x) | x <- [tarrow] ])
deriveClasses :: IdMap Comb -> DataTable -> [(SrcLoc,Name,Name)] -> [(TVr,E)]
deriveClasses cmap dt@(DataTable mp) ctd = concatMap f ctd where
f (_,cd,t) | Just c <- getConstructor t dt, TypeConstructor == nameType (conName c), Just is <- conVirtual c = g is c cd
f _ = []
g is c cl = h cl where
lupvar v = EVar (combHead comb) where
Just comb = mlookup (toId v) cmap
typ = conExpr c
DataNormal [con] = conChildren c
Just conr = getConstructor con (DataTable mp)
[it@(ELit LitCons { litName = it_name })] = conSlots conr
Just itr = getConstructor it_name (DataTable mp)
DataEnum mv = conChildren itr
v1 = tvr { tvrIdent = anonymous 1, tvrType = typ }
v2 = tvr { tvrIdent = anonymous 2, tvrType = typ }
i1 = tvr { tvrIdent = anonymous 3, tvrType = it }
i2 = tvr { tvrIdent = anonymous 4, tvrType = it }
b3 = tvr { tvrIdent = anonymous 5, tvrType = tBoolzh }
val1 = tvr { tvrIdent = anonymous 7, tvrType = typ }
unbox e = ELam v1 (ELam v2 (ec (EVar v1) i1 (ec (EVar v2) i2 e))) where
ec v i e = eCase v [Alt (litCons { litName = con, litArgs = [i], litType = typ }) e] Unknown
h cl | cl == class_Eq = [mkCmpFunc v_equals Op.Eq]
h cl | cl == class_Ord = [
mkCmpFunc v_geq Op.UGte,
mkCmpFunc v_leq Op.ULte,
mkCmpFunc v_lt Op.ULt,
mkCmpFunc v_gt Op.UGt]
h cl | Just ans <- lookup cl mthds = ans where
mthds = [(class_Enum,[
(iv_te,ib_te),
(iv_fe,ib_fe),
iv v_succ succ_body,
iv v_pred pred_body,
iv v_enumFrom from_body,
iv v_enumFromTo fromTo_body,
iv v_enumFromThen fromThen_body,
iv v_enumFromThenTo fromThenTo_body
]),
(class_Ix,[
iv v_range range_body,
-- iv v_inRange inRange_body,
iv v_index index_body
])]
iv_te = setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName v_toEnum (nameName $ conName c), tvrType = getType ib_te }
iv_fe = setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName v_fromEnum (nameName $ conName c), tvrType = getType ib_fe }
iv fname body = (setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName fname (nameName $ conName c), tvrType = getType body },body)
succ_body = foldl EAp (lupvar v_enum_succ) [typ, box, debox, max]
pred_body = foldl EAp (lupvar v_enum_pred) [typ, box, debox]
from_body = foldl EAp (lupvar v_enum_from) [typ, box, debox, max]
fromTo_body = foldl EAp (lupvar v_enum_fromTo) [typ, box, debox]
fromThen_body = foldl EAp (lupvar v_enum_fromThen) [typ, box, debox, max]
fromThenTo_body = foldl EAp (lupvar v_enum_fromThenTo) [typ, box, debox]
range_body = foldl EAp (lupvar v_ix_range) [typ, box, debox]
inRange_body = foldl EAp ( lupvar v_ix_inRange ) [ typ , box , debox ]
index_body = foldl EAp (lupvar v_ix_index) [typ, box, debox]
ib_te = foldl EAp (lupvar v_enum_toEnum) [typ, box, toEzh (mv - 1)]
ib_fe = ELam val1 (create_uintegralCast_toInt con tEnumzh (EVar val1))
max = ELit (LitInt (fromIntegral $ mv - 1) tEnumzh)
box = ELam i1 (ELit (litCons { litName = con, litArgs = [EVar i1], litType = typ }))
debox = ELam v1 (ec (EVar v1) i1 (EVar i1)) where
ec v i e = eCase v [Alt (litCons { litName = con, litArgs = [i], litType = typ }) e] Unknown
h _ = []
mkCmpFunc fname op = (iv_eq,ib_eq) where
ib_eq = unbox (eStrictLet b3 (oper_IIB op (EVar i1) (EVar i2)) (ELit (litCons { litName = dc_Boolzh, litArgs = [EVar b3], litType = tBool })))
iv_eq = setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName fname (nameName $ conName c), tvrType = getType ib_eq }
oper_IIB op a b = EPrim (Op (Op.BinOp op Op.bits16 Op.bits16) Op.bits16) [a,b] tBoolzh
create_integralCast conv c1 t1 c2 t2 e t = eCase e [Alt (litCons { litName = c1, litArgs = [tvra], litType = te }) cc] Unknown where
te = getType e
ELit LitCons { litName = n1, litArgs = [] } = t1
ELit LitCons { litName = n2, litArgs = [] } = t2
Just n1' = nameToOpTy n1
Just n2' = nameToOpTy n2
tvra = tVr va2 t1
tvrb = tVr va3 t2
cc = if n1 == n2 then ELit (litCons { litName = c2, litArgs = [EVar tvra], litType = t }) else
eStrictLet tvrb (EPrim (Op (Op.ConvOp conv n1') n2') [EVar tvra] t2) (ELit (litCons { litName = c2, litArgs = [EVar tvrb], litType = t }))
nameToOpTy n = do RawType <- return $ nameType n; Op.readTy (show n)
create_uintegralCast_toInt c1 t1 e = create_integralCast Op.U2U c1 t1 dc_Int tIntzh e tInt
updateLit :: DataTable -> Lit e t -> Lit e t
updateLit _ l@LitInt {} = l
updateLit dataTable lc@LitCons { litAliasFor = Just {} } = lc
updateLit dataTable lc@LitCons { litName = n } = lc { litAliasFor = af } where
af = do
Constructor { conChildren = DataNormal [x], conOrigSlots = cs } <- getConstructor n dataTable
Constructor { conChildren = DataAlias ErasedAlias, conOrigSlots = [SlotNormal sl] } <- getConstructor x dataTable
return (foldr ELam sl [ tVr i s | s <- getSlots cs | i <- anonymousIds])
removeNewtypes :: DataTable -> E -> E
removeNewtypes dataTable e = runIdentity (f e) where
f ec@ECase {} = emapEGH f f return ec { eCaseAlts = map g (eCaseAlts ec) } where
g (Alt l e) = Alt (gl $ updateLit dataTable l) e
f (ELit l) = emapEGH f f return (ELit (gl $ updateLit dataTable l))
f e = emapEGH f f return e
gl lc@LitCons { litAliasFor = Just e } = lc { litAliasFor = Just $ removeNewtypes dataTable e }
gl l = l
collectDeriving :: [HsDecl] -> [(SrcLoc,Name,Name)]
collectDeriving ds = concatMap f ds where
f decl@HsDataDecl {} = g decl
f decl@HsDeclDeriving {} = h decl
f _ = []
g decl = [(hsDeclSrcLoc decl, toName ClassName c,
toName TypeConstructor (hsDeclName decl)) | c <- hsDeclDerives decl ]
h decl@(hsDeclClassHead -> ch) | [(ltc -> Just t)] <- hsClassHeadArgs ch = [(hsDeclSrcLoc decl,toName ClassName (hsClassHead ch), t)] where
ltc (HsTyApp t1 _) = ltc t1
ltc (HsTyCon n) = Just (toName TypeConstructor n)
ltc x = Nothing
h _ = []
# NOINLINE toDataTable #
toDataTable :: (Map.Map Name Kind) -> (Map.Map Name Type) -> [HsDecl] -> DataTable -> DataTable
toDataTable km cm ds currentDataTable = newDataTable where
newDataTable = DataTable (Map.mapWithKey fixupMap $
Map.fromList [ (conName x,procNewTypes x) | x <- ds', conName x `notElem` keys primitiveAliases ])
fullDataTable = (newDataTable `mappend` currentDataTable)
procNewTypes c = c { conExpr = f (conExpr c), conType = f (conType c), conOrigSlots = map (mapESlot f) (conOrigSlots c) } where
f = removeNewtypes fullDataTable
fixupMap k _ | Just n <- getConstructor k dataTablePrims = n
fixupMap _ n = n
ds' = Seq.toList $ execWriter (mapM_ f ds)
newtypeLoopBreakers = map fst $ fst $ G.findLoopBreakers (const 0) (const True) (G.newGraph newtypeDeps fst snd) where
newtypeDeps = [ (n,concatMap (fm . hsBangType) $ hsConDeclArgs c) |
HsDataDecl { hsDeclDeclType = DeclTypeNewtype, hsDeclName = n, hsDeclCons = (head -> c) } <- ds ]
fm t = execWriter $ f t
f HsTyCon { hsTypeName = n } = tell [n]
f t = traverseHsType_ f t
f decl@HsDataDecl { hsDeclDeclType = DeclTypeNewtype, hsDeclName = nn, hsDeclCons = cs } =
dt decl (if nn `elem` newtypeLoopBreakers then DataAlias RecursiveAlias else DataAlias ErasedAlias) cs
f decl@HsDataDecl { hsDeclDeclType = DeclTypeKind } = dkind decl
f decl@HsDataDecl { hsDeclCons = cs } = dt decl DataNone cs
f _ = return ()
dt decl DataNone cs@(_:_:_) | all null (map hsConDeclArgs cs) = do
let virtualCons'@(fc:_) = map (makeData DataNone typeInfo) cs
typeInfo@(theType,_,_) = makeType decl (hsDeclCTYPE decl)
virt = Just (map conName virtualCons')
f (n,vc) = vc { conExpr = ELit (litCons { litName = consName, litArgs = [ELit (LitInt (fromIntegral n) rtype)], litType = conType vc }), conVirtual = virt }
virtualCons = map f (zip [(0 :: Int) ..] virtualCons')
consName = mapName (id,(++ "#")) $ toName DataConstructor (nameName (conName theType))
rtypeName = mapName (id,(++ "#")) $ toName TypeConstructor (nameName (conName theType))
rtype = ELit litCons { litName = rtypeName, litType = eHash, litAliasFor = Just tEnumzh }
dataCons = fc { conName = consName, conType = getType (conExpr dataCons), conOrigSlots = [SlotNormal rtype], conExpr = ELam (tVr (anonymous 3) rtype) (ELit (litCons { litName = consName, litArgs = [EVar (tVr (anonymous 6) rtype)], litType = conExpr theType })) }
rtypeCons = emptyConstructor {
conName = rtypeName,
conType = eHash,
conExpr = rtype,
conInhabits = s_Hash,
conChildren = DataEnum (length virtualCons)
}
tell (Seq.fromList virtualCons)
tell (Seq.singleton dataCons)
tell (Seq.singleton rtypeCons)
tell $ Seq.singleton theType { conChildren = DataNormal [consName], conVirtual = virt }
return ()
dt decl alias cs = do
let dataCons = map (makeData alias typeInfo) cs
typeInfo@(theType,_,_) = makeType decl (hsDeclCTYPE decl)
tell (Seq.fromList dataCons)
tell $ Seq.singleton theType { conChildren = DataNormal (map conName dataCons) }
dkind HsDataDecl { .. } = do
tell $ Seq.singleton $ (sortConstructor hsDeclName EHashHash) {
conChildren = DataNormal (map hsConDeclName hsDeclCons) }
flip mapM_ hsDeclCons $ \ HsConDecl { .. } -> do
let Just theKind = kind `fmap` (Map.lookup hsConDeclName km)
(theTypeFKind,theTypeKArgs') = fromPi theKind
theTypeArgs = [ tvr { tvrIdent = x } | tvr <- theTypeKArgs' | x <- anonymousIds ]
theTypeExpr = ELit litCons {
litName = hsConDeclName,
litArgs = map EVar theTypeArgs,
litType = theTypeFKind }
tell $ Seq.singleton emptyConstructor {
conName = hsConDeclName,
conType = theKind,
conOrigSlots = map (SlotNormal . tvrType) theTypeArgs,
conExpr = foldr ($) theTypeExpr (map ELam theTypeArgs),
conInhabits = hsDeclName
}
dkind _ = error "dkind passed bad decl"
makeData alias (theType,theTypeArgs,theTypeExpr) x = theData where
theData = emptyConstructor {
conName = dataConsName,
conType =foldr ($) (getType theExpr) (map EPi theTypeArgs),
conOrigSlots = origSlots,
conExpr = theExpr,
conInhabits = conName theType,
conChildren = alias
}
dataConsName = toName Name.DataConstructor (hsConDeclName x)
theExpr = foldr ELam (strictize tslots $ ELit litCons { litName = dataConsName, litArgs = map EVar dvars, litType = theTypeExpr }) hsvars
strictize tslots con = E.Subst.subst tvr { tvrIdent = sillyId } Unknown $ f tslots con where
f (Left (v,False):rs) con = f rs con
f (Left (v,True):rs) con = eStrictLet v (EVar v) (f rs con)
f (Right (v,dc,rcs):rs) con = eCase (EVar v) [Alt pat (f rs con)] Unknown where
pat = litCons { litName = dc, litArgs = rcs, litType = (getType v) }
f [] con = con
-- substitution is only about substituting type variables
(ELit LitCons { litArgs = thisTypeArgs }, origArgs) = fromPi $ runVarName $ do
let (vs,ty) = case Map.lookup dataConsName cm of Just (TForAll vs (_ :=> ty)) -> (vs,ty); ~(Just ty) -> ([],ty)
mapM_ (newName anonymousIds ()) vs
tipe' ty
subst = substMap $ fromList [ (tvrIdent tv ,EVar $ tv { tvrIdent = p }) | EVar tv <- thisTypeArgs | p <- anonymousIds ]
origSlots = map SlotExistential existentials ++ map f tslots where
f (Left (e,_)) = SlotNormal (getType e)
f (Right (e,n,es)) = SlotUnpacked (getType e) n (map getType es)
hsvars = existentials ++ map f tslots where
f (Left (e,_)) = e
f (Right (e,_,_)) = e
dvars = existentials ++ concatMap f tslots where
f (Left (e,_)) = [e]
f (Right (_,_,es)) = es
tslots = f (newIds fvset) (map isHsBangedTy (hsConDeclArgs x)) origArgs where
f (i:is) (False:bs) (e:es) = Left (e { tvrIdent = i, tvrType = subst (tvrType e) },False):f is bs es
f (i:j:is) (True:bs) (e:es) = maybe (Left (e { tvrIdent = i, tvrType = subst (tvrType e) },True):f is bs es) id $ g e (tvrType e) where
g e te = do
ELit LitCons { litName = n } <- return $ followAliases fullDataTable te
Constructor { conChildren = DataNormal [dc] } <- getConstructor n fullDataTable
con <- getConstructor dc fullDataTable
case (conChildren con,slotTypes fullDataTable dc te) of
(DataAlias ErasedAlias,[nt]) -> g e nt
(_,[st]) -> do
let nv = tvr { tvrIdent = j, tvrType = st }
return $ Right (e { tvrIdent = i, tvrType = subst (tvrType e)},dc,[nv]):f is bs es
_ -> fail "not unboxable"
f _ [] [] = []
f _ _ _ = error "DataConstructors.tslots"
fvset = freeVars (thisTypeArgs,origArgs) `mappend` fromList (take (length theTypeArgs + 2) anonymousIds)
-- existentials are free variables in the arguments, that arn't bound in the type
existentials = values $ freeVars (map getType origArgs) S.\\ (freeVars thisTypeArgs :: IdMap TVr)
-- arguments that the front end passes or pulls out of this constructor
hsArgs = existentials + + [ tvr { tvrIdent = x } | tvr < - origArgs | x < - drop ( 5 + length theTypeArgs ) [ 2,4 .. ] ]
makeType decl ct = (theType,theTypeArgs,theTypeExpr) where
theTypeName = toName Name.TypeConstructor (hsDeclName decl)
Just theKind = kind `fmap` (Map.lookup theTypeName km)
(theTypeFKind,theTypeKArgs') = fromPi theKind
theTypeArgs = [ tvr { tvrIdent = x } | tvr <- theTypeKArgs' | x <- anonymousIds ]
theTypeExpr = ELit litCons { litName = theTypeName, litArgs = map EVar theTypeArgs, litType = theTypeFKind }
theType = emptyConstructor {
conCTYPE = fmap (ExtType . packString) ct,
conExpr = foldr ($) theTypeExpr (map ELam theTypeArgs),
conInhabits = if theTypeFKind == eStar then s_Star else s_Hash,
conName = theTypeName,
conOrigSlots = map (SlotNormal . tvrType) theTypeArgs,
conType = theKind,
conVirtual = Nothing
}
isHsBangedTy HsBangedTy {} = True
isHsBangedTy _ = False
getConstructorArities :: DataTable -> [(Name,Int)]
getConstructorArities (DataTable dt) = [ (n,length $ conSlots c) | (n,c) <- Map.toList dt]
constructionExpression ::
DataTable -- ^ table of data constructors
-> Name -- ^ name of said constructor
-> E -- ^ type of eventual constructor
-> E -- ^ saturated lambda calculus term
constructionExpression dataTable n typ@(ELit LitCons { litName = pn, litArgs = xs })
| DataAlias ErasedAlias <- conChildren mc = ELam var (EVar var)
| DataAlias RecursiveAlias <- conChildren mc = let var' = var { tvrType = st } in ELam var' (prim_unsafeCoerce (EVar var') typ)
| pn == conName pc = sub (conExpr mc) where
~[st] = slotTypes dataTable n typ
var = tvr { tvrIdent = vid, tvrType = typ }
(vid:_) = newIds (freeVars typ)
Just mc = getConstructor n dataTable
Just pc = getConstructor (conInhabits mc) dataTable
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
constructionExpression wdt n e | Just fa <- followAlias wdt e = constructionExpression wdt n fa
constructionExpression _ n e = error $ "constructionExpression: error in " ++ show n ++ ": " ++ show e
deconstructionExpression ::
UniqueProducer m
=> DataTable -- ^ table of data constructors
-> Name -- ^ name of said constructor
-> E -- ^ type of pattern
-> [TVr] -- ^ variables to be bound
-> E -- ^ body of alt
-> m (Alt E) -- ^ resulting alternative
deconstructionExpression dataTable name typ@(ELit LitCons { litName = pn, litArgs = xs }) vs e | pn == conName pc = ans where
Just mc = getConstructor name dataTable
Just pc = getConstructor (conInhabits mc) dataTable
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
ans = case conVirtual mc of
Just _ -> return $ let ELit LitCons { litArgs = [ELit (LitInt n t)] } = conExpr mc in Alt (LitInt n t) e
Nothing -> do
let f vs (SlotExistential t:ss) rs ls = f vs ss (t:rs) ls
f (v:vs) (SlotNormal _:ss) rs ls = f vs ss (v:rs) ls
f (v:vs) (SlotUnpacked e n es:ss) rs ls = do
let g t = do
s <- newUniq
return $ tVr (anonymous s) t
as <- mapM g (map sub es)
f vs ss (reverse as ++ rs) ((v,ELit litCons { litName = n, litArgs = map EVar as, litType = sub e }):ls)
f [] [] rs ls = return $ Alt (litCons { litName = name, litArgs = reverse rs, litType = typ }) (eLetRec ls e)
f _ _ _ _ = error "DataConstructors.deconstructuonExpression.f"
f vs (conOrigSlots mc) [] []
deconstructionExpression wdt n ty vs e | Just fa <- followAlias wdt ty = deconstructionExpression wdt n fa vs e
deconstructionExpression _ n e _ _ = error $ "deconstructionExpression: error in " ++ show n ++ ": " ++ show e
slotTypes ::
DataTable -- ^ table of data constructors
-> Name -- ^ name of constructor
-> E -- ^ type of value
-> [E] -- ^ type of each slot
slotTypes wdt n (ELit LitCons { litName = pn, litArgs = xs, litType = _ })
| pn == conName pc = [sub x | x <- conSlots mc ]
where
Identity mc = getConstructor n wdt
Identity pc = getConstructor (conInhabits mc) wdt
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
slotTypes wdt n kind
| sortKindLike kind, (e,ts) <- fromPi kind = take (length (conSlots mc) - length ts) (conSlots mc)
| sortKindLike kind , ( e , ts ) < - fromPi kind = ( )
where Identity mc = getConstructor n wdt
slotTypes wdt n e | Just fa <- followAlias wdt e = slotTypes wdt n fa
slotTypes _ n e = error $ "slotTypes: error in " ++ show n ++ ": " ++ show e
slotTypesHs ::
DataTable -- ^ table of data constructors
-> Name -- ^ name of constructor
-> E -- ^ type of value
-> [E] -- ^ type of each slot
slotTypesHs wdt n (ELit LitCons { litName = pn, litArgs = xs, litType = _ })
| pn == conName pc = [sub x | x <- getHsSlots $ conOrigSlots mc ]
where
Identity mc = getConstructor n wdt
Identity pc = getConstructor (conInhabits mc) wdt
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
slotTypesHs wdt n kind
| sortKindLike kind, (e,ts) <- fromPi kind = take (length (conSlots mc) - length ts) (conSlots mc)
where Identity mc = getConstructor n wdt
slotTypesHs wdt n e | Just fa <- followAlias wdt e = slotTypes wdt n fa
slotTypesHs _ n e = error $ "slotTypesHs: error in " ++ show n ++ ": " ++ show e
# NOINLINE showDataTable #
showDataTable (DataTable mp) = vcat xs where
c con = vcat [t,e,cs,vt,ih,ch,mc] where
t = text "::" <+> ePretty conType
e = text "=" <+> ePretty conExpr
cs = text "slots:" <+> tupled (map ePretty (conSlots con))
vt = text "virtual:" <+> tshow conVirtual
ih = text "inhabits:" <+> tshow conInhabits
ch = text "children:" <+> tshow conChildren
mc = text "CTYPE:" <+> tshow conCTYPE
Constructor { .. } = con
xs = [text x <+> hang 0 (c y) | (x,y) <- ds ]
ds = sortBy (\(x,_) (y,_) -> compare x y) [(show x,y) | (x,y) <- Map.toList mp]
# NOINLINE samplePrimitiveDataTable #
samplePrimitiveDataTable :: DataTable
samplePrimitiveDataTable = DataTable $ Map.fromList [ (x,c) | x <- xs, c <- getConstructor x mempty] where
nt v = map (flip name_UnboxedTupleConstructor (v::Int)) [termLevel, typeLevel]
xs = nt 0 ++ nt 3 ++ [nameConjured modAbsurd eStar,nameConjured modBox hs, nameConjured modAbsurd hs', nameConjured modBox hs',rt_bits16,rt_bits_ptr_]
hs = EPi (tVr emptyId eHash) eStar
hs' = tFunc eStar (tFunc (tFunc eStar eHash) eStar)
getSiblings :: DataTable -> Name -> Maybe [Name]
getSiblings dt n
| Just c <- getConstructor n dt, Just Constructor { conChildren = DataNormal cs } <- getConstructor (conInhabits c) dt = Just cs
| otherwise = Nothing
numberSiblings :: DataTable -> Name -> Maybe Int
numberSiblings dt n
| Just c <- getConstructor n dt, Just Constructor { conChildren = cc } <- getConstructor (conInhabits c) dt = case cc of
DataNormal ds -> Just $ length ds
DataEnum n -> Just n
_ -> Nothing
| otherwise = Nothing
-- whether the type has a single slot
onlyChild :: DataTable -> Name -> Bool
onlyChild dt n = isJust ans where
ans = do
c <- getConstructor n dt
case conChildren c of
DataNormal [_] -> return ()
_ -> do
c <- getConstructor (conInhabits c) dt
case conChildren c of
DataNormal [_] -> return ()
_ -> fail "not cpr"
pprintTypeOfCons :: (Monad m,DocLike a) => DataTable -> Name -> m a
pprintTypeOfCons dataTable name = do
c <- getConstructor name dataTable
return $ pprintTypeAsHs (conType c)
pprintTypeAsHs :: DocLike a => E -> a
pprintTypeAsHs e = unparse $ runVarName (f e) where
f e | e == eStar = return $ atom $ text "*"
| e == eHash = return $ atom $ text "#"
f (EPi (TVr { tvrIdent = eid, tvrType = t1 }) t2) | eid == emptyId = do
t1 <- f t1
t2 <- f t2
return $ t1 `arr` t2
f (ELit LitCons { litName = n, litArgs = as }) | (a:as') <- reverse as = f $ EAp (ELit litCons { litName = n, litArgs = reverse as' }) a
f (ELit LitCons { litName = n, litArgs = [] }) = return $ atom $ text $ show n
f (EAp a b) = do
a <- f a
b <- f b
return $ a `app` b
f (EVar v) = do
vo <- newLookupName ['a' .. ] () (tvrIdent v)
return $ atom $ char vo
f v | (e,ts@(_:_)) <- fromPi v = do
ts' <- mapM (newLookupName ['a'..] () . tvrIdent) ts
r <- f e
return $ fixitize (N,-3) $ pop (text "forall" <+> hsep (map char ts') <+> text ". ") (atomize r)
f e = error $ "printTypeAsHs: " ++ show e
arr = bop (R,0) (space D.<> text "->" D.<> space)
app = bop (L,100) (text " ")
class Monad m => DataTableMonad m where
getDataTable :: m DataTable
getDataTable = return mempty
instance DataTableMonad Identity
-- | list of declared data types that map
-- directly to primitive real types
primitiveAliases :: Map.Map Name Name
primitiveAliases = Map.fromList [
(tc_Bits1, rt_bool),
(tc_Bits8, rt_bits8),
(tc_Bits16, rt_bits16),
(tc_Bits32, rt_bits32),
(tc_Bits64, rt_bits64),
(tc_Bits128, rt_bits128),
(tc_BitsPtr, rt_bits_ptr_),
(tc_BitsMax, rt_bits_max_),
(tc_Float32, rt_float32),
(tc_Float64, rt_float64),
(tc_Float80, rt_float80),
(tc_Float128, rt_float128)
]
-- mapping of primitive types to the C calling convention used
-- when passing to/from foreign functions
rawExtTypeMap :: Map.Map Name ExtType
rawExtTypeMap = Map.fromList [
(rt_bool, "bool"),
(rt_bits8, "uint8_t"),
(rt_bits16, "uint16_t"),
(rt_bits32, "uint32_t"),
(rt_bits64, "uint64_t"),
(rt_bits128, "uint128_t"),
(rt_bits_ptr_, "uintptr_t" ),
(rt_bits_max_, "uintmax_t"),
(rt_float32, "float"),
(rt_float64, "double"),
(rt_float80, "long double"),
(rt_float128, "__float128")
]
{-* Generated by DrIFT : Look, but Don't Touch. *-}
instance Data.Binary.Binary AliasType where
put ErasedAlias = do
Data.Binary.putWord8 0
put RecursiveAlias = do
Data.Binary.putWord8 1
get = do
h <- Data.Binary.getWord8
case h of
0 -> do
return ErasedAlias
1 -> do
return RecursiveAlias
_ -> fail "invalid binary data found"
instance Data.Binary.Binary DataFamily where
put DataAbstract = do
Data.Binary.putWord8 0
put DataNone = do
Data.Binary.putWord8 1
put DataPrimitive = do
Data.Binary.putWord8 2
put (DataEnum aa) = do
Data.Binary.putWord8 3
Data.Binary.put aa
put (DataNormal ab) = do
Data.Binary.putWord8 4
Data.Binary.put ab
put (DataAlias ac) = do
Data.Binary.putWord8 5
Data.Binary.put ac
get = do
h <- Data.Binary.getWord8
case h of
0 -> do
return DataAbstract
1 -> do
return DataNone
2 -> do
return DataPrimitive
3 -> do
aa <- Data.Binary.get
return (DataEnum aa)
4 -> do
ab <- Data.Binary.get
return (DataNormal ab)
5 -> do
ac <- Data.Binary.get
return (DataAlias ac)
_ -> fail "invalid binary data found"
instance Data.Binary.Binary Constructor where
put (Constructor aa ab ac ad ae af ag ah) = do
Data.Binary.put aa
Data.Binary.put ab
Data.Binary.put ac
Data.Binary.put ad
Data.Binary.put ae
Data.Binary.put af
Data.Binary.put ag
Data.Binary.put ah
get = do
aa <- get
ab <- get
ac <- get
ad <- get
ae <- get
af <- get
ag <- get
ah <- get
return (Constructor aa ab ac ad ae af ag ah)
instance Data.Binary.Binary Slot where
put (SlotNormal aa) = do
Data.Binary.putWord8 0
Data.Binary.put aa
put (SlotUnpacked ab ac ad) = do
Data.Binary.putWord8 1
Data.Binary.put ab
Data.Binary.put ac
Data.Binary.put ad
put (SlotExistential ae) = do
Data.Binary.putWord8 2
Data.Binary.put ae
get = do
h <- Data.Binary.getWord8
case h of
0 -> do
aa <- Data.Binary.get
return (SlotNormal aa)
1 -> do
ab <- Data.Binary.get
ac <- Data.Binary.get
ad <- Data.Binary.get
return (SlotUnpacked ab ac ad)
2 -> do
ae <- Data.Binary.get
return (SlotExistential ae)
_ -> fail "invalid binary data found"
-- Imported from other files :-
| null | https://raw.githubusercontent.com/csabahruska/jhc-components/a7dace481d017f5a83fbfc062bdd2d099133adf1/jhc-core/src/DataConstructors.hs | haskell | # LANGUAGE OverloadedStrings #
[ tVr n (kind k) | n <- [2,4..] | k <- xs ]
return $ tVr v (kind $ tyvarKind tv)
! derive: Binary !
these apply to types
abstract internal type, has children of representation unknown and irrelevant.
children don't apply. data constructor for instance
primitive type, children are all numbers.
# UNPACK #
bounded integral type, argument is maximum number
child constructors
! derive: Binary !
| Record describing a data type.
* is also a data type containing the type constructors, which are unlifted, yet boxed.
name of constructor
type of constructor
expression which constructs this value
original slots
whether this is a virtual constructor that translates into an enum and its siblings
external type
! derive: Binary !
! derive: Binary !
# NOINLINE getConstructor #
| return the single constructor of product types
| conjured data types, these data types are created as needed and can be of any type, their
actual type is encoded in their names.
Absurd - this is a type that it used to default otherwise unconstrained
types, it is not special in any particular way but is just an arbitrary type
to give to things.
Box - this type can be used to represent any boxed values. It is considered
equivalent to all boxed values so is not a very precise type. It is used in
the final stages of compilation before core mangling so that optimizations
that were previously blocked by type variables can be carried out.
^ the sort of boxed lazy types
^ the sort of boxed strict types
^ the sort of unboxed types
^ the sort of unboxed tuples
^ the supersort of unboxed types
^ the supersort of boxed types
^ user defined sorts
primitive to box
what type we want it to have
f (fromPi -> (getType -> s',[getType -> s''])) | isState_ s' && isState_ s'' = (True,tUnit)
maps to 'void'
value is an unboxed type suitable for passing with the argument calling convention
if the constructor is in the external type map, replace its external
type with the one in the map
if we are a raw type, we can be foreigned
if we are a single constructor data type with a single foreignable unboxed
slot, we are foreiginable
iv v_inRange inRange_body,
substitution is only about substituting type variables
existentials are free variables in the arguments, that arn't bound in the type
arguments that the front end passes or pulls out of this constructor
^ table of data constructors
^ name of said constructor
^ type of eventual constructor
^ saturated lambda calculus term
^ table of data constructors
^ name of said constructor
^ type of pattern
^ variables to be bound
^ body of alt
^ resulting alternative
^ table of data constructors
^ name of constructor
^ type of value
^ type of each slot
^ table of data constructors
^ name of constructor
^ type of value
^ type of each slot
whether the type has a single slot
| list of declared data types that map
directly to primitive real types
mapping of primitive types to the C calling convention used
when passing to/from foreign functions
* Generated by DrIFT : Look, but Don't Touch. *
Imported from other files :- | Generated by DrIFT ( Automatic class derivations for )
# LINE 1 " src / DataConstructors.hs " #
module DataConstructors(
AliasType(..),
boxPrimitive,
collectDeriving,
conSlots,
constructionExpression,
Constructor(..),
DataFamily(..),
DataTable(..),
DataTableMonad(..),
dataTablePrims,
deconstructionExpression,
deriveClasses,
extractIO,
extractIO',
extractPrimitive,
ExtTypeInfo(..),
extTypeInfoExtType,
followAlias,
followAliases,
getConstructor,
getConstructorArities,
getProduct,
getSiblings,
lookupExtTypeInfo,
mktBox,
modBox,
numberSiblings,
onlyChild,
pprintTypeOfCons,
primitiveAliases,
removeNewtypes,
samplePrimitiveDataTable,
showDataTable,
Slot(..),
slotTypes,
slotTypesHs,
tAbsurd,
toDataTable,
typesCompatable,
updateLit
) where
import Control.Monad.Identity
import Control.Monad.Writer(tell,execWriter)
import Data.Maybe
import Data.Monoid hiding(getProduct)
import List(sortBy)
import qualified Data.Map as Map hiding(map)
import qualified Data.Set as Set hiding(map)
import C.Prims
import Data.Binary
import Doc.DocLike as D
import Doc.PPrint
import Doc.Pretty
import E.Binary()
import E.E
import E.Show
import E.Subst
import E.Traverse
import E.TypeCheck
import E.Values
import FrontEnd.Class(instanceName)
import FrontEnd.HsSyn
import FrontEnd.SrcLoc
import FrontEnd.Syn.Traverse
import FrontEnd.Tc.Type
import GenUtil
import Info.Types
import Name.Id
import Name.Name as Name
import Name.Names
import Name.VConsts
import PackedString
import Support.CanType
import Support.FreeVars
import Support.MapBinaryInstance
import Support.Unparse
import Util.HasSize
import Util.SameShape
import Util.SetLike as S
import Util.VarName
import qualified Cmm.Op as Op
import qualified Util.Graph as G
import qualified Util.Seq as Seq
tipe' (TAp t1 t2) = liftM2 eAp (tipe' t1) (tipe' t2)
tipe' (TArrow t1 t2) = do
t1' <- tipe' t1
t2' <- tipe' t2
return $ EPi (tVr emptyId (t1')) t2'
tipe' (TCon (Tycon n k)) | Just n' <- Map.lookup n primitiveAliases = return $ ELit litCons { litName = n', litType = kind k }
tipe' (TCon (Tycon n k)) = return $ ELit litCons { litName = n, litType = kind k }
tipe' (TVar tv@Tyvar { tyvarKind = k}) = do
v <- lookupName tv
return $ EVar $ tVr v (kind k)
tipe' (TForAll [] (_ :=> t)) = tipe' t
tipe' (TExists [] (_ :=> t)) = tipe' t
tipe' (TForAll xs (_ :=> t)) = do
xs' <- flip mapM xs $ \tv -> do
v <- newName (map anonymous [35 .. ]) () tv
return $ tVr v (kind $ tyvarKind tv)
t' <- tipe' t
tipe' ~(TExists xs (_ :=> t)) = do
xs' <- flip mapM xs $ \tv -> do
v < - newName [ 70,72 .. ] ( ) tv
return $ (kind $ tyvarKind tv)
t' <- tipe' t
return $ ELit litCons { litName = name_UnboxedTupleConstructor typeLevel (length xs' + 1), litArgs = (t':xs'), litType = eHash }
kind (KBase KUTuple) = eHash
kind (KBase KHash) = eHash
kind (KBase Star) = eStar
kind (KBase (KNamed t)) = ESort (ESortNamed t)
kind (Kfun k1 k2) = EPi (tVr emptyId (kind k1)) (kind k2)
kind k = error $ "DataConstructors.kind: cannot convert " ++ show k
data AliasType = ErasedAlias | RecursiveAlias
deriving(Eq,Ord,Show)
data DataFamily =
| DataAlias !AliasType
deriving(Eq,Ord,Show)
data Constructor = Constructor {
what constructor it inhabits , similar to , but not quite .
conChildren :: DataFamily,
} deriving(Show)
data Slot =
SlotNormal E
| SlotUnpacked E !Name [E]
| SlotExistential TVr
deriving(Eq,Ord,Show)
mapESlot f (SlotExistential t) = SlotExistential t { tvrType = f (tvrType t) }
mapESlot f (SlotNormal e) = SlotNormal $ f e
mapESlot f (SlotUnpacked e n es) = SlotUnpacked (f e) n (map f es)
conSlots s = getSlots $ conOrigSlots s
getSlots ss = concatMap f ss where
f (SlotNormal e) = [e]
f (SlotUnpacked _ _ es) = es
f (SlotExistential e) = [tvrType e]
getHsSlots ss = map f ss where
f (SlotNormal e) = e
f (SlotUnpacked e _ es) = e
f (SlotExistential e) = tvrType e
newtype DataTable = DataTable (Map.Map Name Constructor)
deriving(Monoid)
instance Binary DataTable where
put (DataTable dt) = putMap dt
get = fmap DataTable getMap
emptyConstructor = Constructor {
conName = error "emptyConstructor.conName",
conType = Unknown,
conOrigSlots = [],
conExpr = Unknown,
conInhabits = error "emptyConstructor.conInhabits",
conVirtual = Nothing,
conCTYPE = Nothing,
conChildren = DataNone
}
instance HasSize DataTable where
size (DataTable d) = Map.size d
getConstructor :: Monad m => Name -> DataTable -> m Constructor
getConstructor n _ | isJust me = return (emptyConstructor {
conName = n, conType = e,
conExpr = foldr ELam (foldl eAp (mktBox e) (map EVar tvrs)) tvrs,
conInhabits = s_Star, conOrigSlots = map SlotNormal sts }) where
sts = map tvrType ss
tvrs = [ tvr { tvrIdent = i , tvrType = t } | i <- anonymousIds | t <- sts ]
(_,ss) = fromPi e
me@(~(Just e)) = fromConjured modBox n `mplus` fromConjured modAbsurd n
getConstructor n _ | RawType <- nameType n = return $ primitiveConstructor n
getConstructor n _ | Just (level,arity) <- fromName_UnboxedTupleConstructor n = return $ if level == termLevel then snd $ tunboxedtuple arity else fst $ tunboxedtuple arity
n _ | Just v < - fromUnboxedNameTuple n , DataConstructor < - nameType n = return $ snd $ tunboxedtuple v
n _ | Just v < - fromUnboxedNameTuple n , TypeConstructor < - nameType n = return $ fst $ tunboxedtuple v
getConstructor n (DataTable map) = case Map.lookup n map of
Just x -> return x
Nothing -> fail $ "getConstructor: " ++ show (nameType n,n)
getProduct :: Monad m => DataTable -> E -> m Constructor
getProduct dataTable e | (ELit LitCons { litName = cn }) <-
followAliases dataTable e, Just c <- getConstructor cn dataTable = f c where
f c | DataNormal [x] <- conChildren c = getConstructor x dataTable
| otherwise = fail "Not Product type"
getProduct _ _ = fail "Not Product type"
tunboxedtuple :: Int -> (Constructor,Constructor)
tunboxedtuple n = (typeCons,dataCons) where
dataCons = emptyConstructor {
conName = dc,
conType = dtipe,
conOrigSlots = map (SlotNormal . EVar) typeVars,
conExpr = foldr ($) (ELit litCons
{ litName = dc
, litArgs = map EVar vars
, litType = ftipe
}) (map ELam vars),
conInhabits = tc
}
typeCons = emptyConstructor {
conName = tc,
conType = foldr EPi eHash (replicate n tvr { tvrType = eStar }),
conOrigSlots = replicate n (SlotNormal eStar),
conExpr = tipe,
conInhabits = s_Hash,
conChildren = DataNormal [dc]
}
dc = name_UnboxedTupleConstructor termLevel n
tc = name_UnboxedTupleConstructor typeLevel n
tipe = foldr ELam ftipe typeVars
typeVars = take n [ tvr { tvrType = eStar, tvrIdent = v } | v <- anonymousIds ]
vars = [ tvr { tvrType = EVar t, tvrIdent = v } | v <- map anonymous [ n + 8, n + 9 ..] | t <- typeVars ]
ftipe = ELit (litCons { litName = tc, litArgs = map EVar typeVars, litType = eHash })
dtipe = foldr EPi (foldr EPi ftipe [ v { tvrIdent = emptyId } | v <- vars]) typeVars
tAbsurd k = ELit (litCons {
litName = nameConjured modAbsurd k, litArgs = [], litType = k })
mktBox k = ELit (litCons {
litName = nameConjured modBox k, litArgs = [],
litType = k, litAliasFor = af }) where
af = case k of
EPi TVr { tvrType = t1 } t2 -> Just (ELam tvr { tvrType = t1 } (mktBox t2))
_ -> Nothing
tarrow = emptyConstructor {
conName = tc_Arrow,
conType = EPi (tVr emptyId eStar) (EPi (tVr emptyId eStar) eStar),
conOrigSlots = [SlotNormal eStar,SlotNormal eStar],
conExpr = ELam (tVr va1 eStar) (ELam (tVr va2 eStar) (EPi (tVr emptyId (EVar $ tVr va1 eStar)) (EVar $ tVr va2 eStar))),
conInhabits = s_Star,
conChildren = DataAbstract
}
primitiveConstructor name = emptyConstructor {
conName = name,
conType = eHash,
conExpr = ELit (litCons { litName = name, litArgs = [], litType = eHash }),
conInhabits = s_Hash,
conChildren = DataPrimitive
}
sortName :: ESort -> Name
sortName s = f s where
sortConstructor name ss = emptyConstructor {
conName = name,
conType = ESort ss,
conExpr = ESort (ESortNamed name),
conInhabits = sortName ss
}
typesCompatable :: forall m . Monad m => E -> E -> m ()
typesCompatable a b = f etherealIds a b where
f :: [Id] -> E -> E -> m ()
f _ (ESort a) (ESort b) = when (a /= b) $ fail $ "Sorts don't match: " ++ pprint (ESort a,ESort b)
f _ (EVar a) (EVar b) = when (a /= b) $ fail $ "Vars don't match: " ++ pprint (a,b)
we expand aliases first , because the newtype might have phantom types as arguments
f c (ELit (LitCons { litAliasFor = Just af, litArgs = as })) b = do
f c (foldl eAp af as) b
f c a (ELit (LitCons { litAliasFor = Just af, litArgs = as })) = do
f c a (foldl eAp af as)
f c (ELit LitCons { litName = n, litArgs = xs, litType = t }) (ELit LitCons { litName = n', litArgs = xs', litType = t' }) | n == n' = do
f c t t'
when (not $ sameShape1 xs xs') $ fail "Arg lists don't match"
zipWithM_ (f c) xs xs'
f c (EAp a b) (EAp a' b') = do
f c a a'
f c b b'
f c (ELam va ea) (ELam vb eb) = lam va ea vb eb c
f c (EPi va ea) (EPi vb eb) = lam va ea vb eb c
f c (EPi (TVr { tvrIdent = eid, tvrType = a}) b) (ELit (LitCons { litName = n, litArgs = [a',b'], litType = t })) | eid == emptyId, conName tarrow == n, t == eStar = do
f c a a'
f c b b'
f c (ELit (LitCons { litName = n, litArgs = [a',b'], litType = t })) (EPi (TVr { tvrIdent = eid, tvrType = a}) b) | eid == emptyId, conName tarrow == n, t == eStar = do
f c a a'
f c b b'
f _ a b | boxCompat a b || boxCompat b a = return ()
f _ a b = fail $ "Types don't match:" ++ pprint (a,b)
lam :: TVr -> E -> TVr -> E -> [Id] -> m ()
lam va ea vb eb ~(c:cs) = do
f (c:cs) (tvrType va) (tvrType vb)
f cs (subst va (EVar va { tvrIdent = c }) ea) (subst vb (EVar vb { tvrIdent = c }) eb)
boxCompat (ELit (LitCons { litName = n })) t | Just e <- fromConjured modBox n = e == getType t
boxCompat _ _ = False
extractPrimitive :: Monad m => DataTable -> E -> m (E,(ExtType,E))
extractPrimitive dataTable e = case followAliases dataTable (getType e) of
st@(ELit LitCons { litName = c, litArgs = [], litType = t })
| t == eHash -> return (e,(ExtType (packString $show c),st))
| otherwise -> do
Constructor { conChildren = DataNormal [cn] } <- getConstructor c dataTable
Constructor { conOrigSlots = [SlotNormal st] } <- getConstructor cn dataTable
(ELit LitCons { litName = n, litArgs = []}) <- return $ followAliases dataTable st
let tvra = tVr vn st
(vn:_) = newIds (freeIds e)
return (eCase e [Alt (litCons { litName = cn, litArgs = [tvra],
litType = (getType e) }) (EVar tvra)] Unknown,(ExtType (packString $ show n),st))
e' -> fail $ "extractPrimitive: " ++ show (e,e')
boxPrimitive ::
Monad m
=> DataTable
-> m (E,(ExtType,E))
boxPrimitive dataTable e et = case followAliases dataTable et of
st@(ELit LitCons { litName = c, litArgs = [], litType = t })
| t == eHash -> return (e,(ExtType . packString $ show c,st))
| otherwise -> do
Constructor { conChildren = DataNormal [cn] } <- getConstructor c dataTable
Constructor { conOrigSlots = [SlotNormal st] } <- getConstructor cn dataTable
(ELit LitCons { litName = n, litArgs = []}) <- return $ followAliases dataTable st
let tvra = tVr vn st
(vn:_) = newIds (freeVars (e,et))
if isManifestAtomic e then
return $ (ELit litCons { litName = cn, litArgs = [e], litType = et },(ExtType . packString $ show n,st))
else
return $ (eStrictLet tvra e $ ELit litCons { litName = cn, litArgs = [EVar tvra], litType = et },(ExtType . packString $ show n,st))
e' -> fail $ "boxPrimitive: " ++ show (e,e')
extractIO :: Monad m => E -> m E
extractIO e = f e where
f (ELit LitCons { litName = c, litArgs = [x] }) | c == tc_IO = return x
f (ELit LitCons { litAliasFor = Just af, litArgs = as }) = f (foldl eAp af as)
f _ = fail "extractIO: not an IO type"
extract IO or an unboxed version of it , ( ST , World - > ( , a # ) )
extractIO' :: E -> ([E],Bool,E)
extractIO' e = f e [] where
f (ELit LitCons { litName = c, litArgs = [x] }) rs | c == tc_IO = (reverse rs, True,x)
f (ELit LitCons { litName = c, litArgs = [_,x] }) rs | c == tc_ST = (reverse rs, True,x)
f (expandAlias -> Just t) rs = f t rs
f (fromPi -> (fromUnboxedTuple -> Just [s',x],[getType -> s''])) rs
| isState_ s' && isState_ s'' = (reverse rs, True,x)
f (EPi v e) rs = f e (getType v:rs)
f e rs = (reverse rs, False,e)
data ExtTypeInfo
boxed type , name is constructor of box , E is type of the slice , and ExtType is the calling convention to use
extTypeInfoExtType (ExtTypeRaw et) = et
extTypeInfoExtType (ExtTypeBoxed _ _ et) = et
extTypeInfoExtType ExtTypeVoid = "void"
lookupExtTypeInfo :: Monad m => DataTable -> E -> m ExtTypeInfo
lookupExtTypeInfo dataTable oe = f Set.empty oe where
f :: Monad m => Set.Set Name -> E -> m ExtTypeInfo
handle the void context ones first
f _ e@(ELit LitCons { litName = c }) | c == tc_Unit || c == tc_State_ = return ExtTypeVoid
f seen e@(ELit LitCons { litName = c, litArgs = [ta] }) | c == tc_Ptr = do
we know a pointer is a boxed BitsPtr
case f seen ta of
Just (ExtTypeBoxed _ _ (ExtType et)) -> return $ ExtTypeBoxed b t (ExtType $ et `mappend` "*")
Just (ExtTypeRaw (ExtType et)) -> return $ ExtTypeBoxed b t (ExtType $ et `mappend` "*")
_ -> return $ ExtTypeBoxed b t "HsPtr"
f seen e@(ELit LitCons { litName = c, litArgs = [ta] }) | c == tc_Complex = do
case f seen ta of
Just (ExtTypeRaw (ExtType et)) -> return $ ExtTypeRaw (ExtType $ "_Complex " `mappend` et)
_ -> fail "invalid _Complex type"
f seen e@(ELit LitCons { litName = c }) | Just (conCTYPE -> Just et) <- getConstructor c dataTable = do
return $ case g seen e of
Just (ExtTypeBoxed b t _) -> ExtTypeBoxed b t et
Just ExtTypeVoid -> ExtTypeVoid
_ -> ExtTypeRaw et
f seen e = g seen e
g _ (ELit LitCons { litName = c })
| Just et <- Map.lookup c rawExtTypeMap = return (ExtTypeRaw et)
g _ (ELit LitCons { litName = c, litAliasFor = Nothing })
| Just Constructor { conChildren = DataNormal [cn] } <- getConstructor c dataTable,
Just Constructor { conOrigSlots = [SlotNormal st] } <- getConstructor cn dataTable,
Just (ExtTypeRaw et) <- lookupExtTypeInfo dataTable st = return $ ExtTypeBoxed cn st et
g seen e@(ELit LitCons { litName = n }) | Just e' <- followAlias dataTable e,
n `Set.notMember` seen = f (Set.insert n seen) e'
g _ e = fail $ "lookupExtTypeInfo: " ++ show (oe,e)
expandAlias :: Monad m => E -> m E
expandAlias (ELit LitCons { litAliasFor = Just af, litArgs = as }) = return (foldl eAp af as)
expandAlias _ = fail "expandAlias: not alias"
followAlias :: Monad m => DataTable -> E -> m E
followAlias _ (ELit LitCons { litAliasFor = Just af, litArgs = as }) = return (foldl eAp af as)
followAlias _ _ = fail "followAlias: not alias"
followAliases :: DataTable -> E -> E
followAliases _dataTable e = f e where
f (ELit LitCons { litAliasFor = Just af, litArgs = as }) = f (foldl eAp af as)
f e = e
dataTablePrims = DataTable $ Map.fromList ([ (conName x,x) | x <- [tarrow] ])
deriveClasses :: IdMap Comb -> DataTable -> [(SrcLoc,Name,Name)] -> [(TVr,E)]
deriveClasses cmap dt@(DataTable mp) ctd = concatMap f ctd where
f (_,cd,t) | Just c <- getConstructor t dt, TypeConstructor == nameType (conName c), Just is <- conVirtual c = g is c cd
f _ = []
g is c cl = h cl where
lupvar v = EVar (combHead comb) where
Just comb = mlookup (toId v) cmap
typ = conExpr c
DataNormal [con] = conChildren c
Just conr = getConstructor con (DataTable mp)
[it@(ELit LitCons { litName = it_name })] = conSlots conr
Just itr = getConstructor it_name (DataTable mp)
DataEnum mv = conChildren itr
v1 = tvr { tvrIdent = anonymous 1, tvrType = typ }
v2 = tvr { tvrIdent = anonymous 2, tvrType = typ }
i1 = tvr { tvrIdent = anonymous 3, tvrType = it }
i2 = tvr { tvrIdent = anonymous 4, tvrType = it }
b3 = tvr { tvrIdent = anonymous 5, tvrType = tBoolzh }
val1 = tvr { tvrIdent = anonymous 7, tvrType = typ }
unbox e = ELam v1 (ELam v2 (ec (EVar v1) i1 (ec (EVar v2) i2 e))) where
ec v i e = eCase v [Alt (litCons { litName = con, litArgs = [i], litType = typ }) e] Unknown
h cl | cl == class_Eq = [mkCmpFunc v_equals Op.Eq]
h cl | cl == class_Ord = [
mkCmpFunc v_geq Op.UGte,
mkCmpFunc v_leq Op.ULte,
mkCmpFunc v_lt Op.ULt,
mkCmpFunc v_gt Op.UGt]
h cl | Just ans <- lookup cl mthds = ans where
mthds = [(class_Enum,[
(iv_te,ib_te),
(iv_fe,ib_fe),
iv v_succ succ_body,
iv v_pred pred_body,
iv v_enumFrom from_body,
iv v_enumFromTo fromTo_body,
iv v_enumFromThen fromThen_body,
iv v_enumFromThenTo fromThenTo_body
]),
(class_Ix,[
iv v_range range_body,
iv v_index index_body
])]
iv_te = setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName v_toEnum (nameName $ conName c), tvrType = getType ib_te }
iv_fe = setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName v_fromEnum (nameName $ conName c), tvrType = getType ib_fe }
iv fname body = (setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName fname (nameName $ conName c), tvrType = getType body },body)
succ_body = foldl EAp (lupvar v_enum_succ) [typ, box, debox, max]
pred_body = foldl EAp (lupvar v_enum_pred) [typ, box, debox]
from_body = foldl EAp (lupvar v_enum_from) [typ, box, debox, max]
fromTo_body = foldl EAp (lupvar v_enum_fromTo) [typ, box, debox]
fromThen_body = foldl EAp (lupvar v_enum_fromThen) [typ, box, debox, max]
fromThenTo_body = foldl EAp (lupvar v_enum_fromThenTo) [typ, box, debox]
range_body = foldl EAp (lupvar v_ix_range) [typ, box, debox]
inRange_body = foldl EAp ( lupvar v_ix_inRange ) [ typ , box , debox ]
index_body = foldl EAp (lupvar v_ix_index) [typ, box, debox]
ib_te = foldl EAp (lupvar v_enum_toEnum) [typ, box, toEzh (mv - 1)]
ib_fe = ELam val1 (create_uintegralCast_toInt con tEnumzh (EVar val1))
max = ELit (LitInt (fromIntegral $ mv - 1) tEnumzh)
box = ELam i1 (ELit (litCons { litName = con, litArgs = [EVar i1], litType = typ }))
debox = ELam v1 (ec (EVar v1) i1 (EVar i1)) where
ec v i e = eCase v [Alt (litCons { litName = con, litArgs = [i], litType = typ }) e] Unknown
h _ = []
mkCmpFunc fname op = (iv_eq,ib_eq) where
ib_eq = unbox (eStrictLet b3 (oper_IIB op (EVar i1) (EVar i2)) (ELit (litCons { litName = dc_Boolzh, litArgs = [EVar b3], litType = tBool })))
iv_eq = setProperty prop_INSTANCE tvr { tvrIdent = toId $ instanceName fname (nameName $ conName c), tvrType = getType ib_eq }
oper_IIB op a b = EPrim (Op (Op.BinOp op Op.bits16 Op.bits16) Op.bits16) [a,b] tBoolzh
create_integralCast conv c1 t1 c2 t2 e t = eCase e [Alt (litCons { litName = c1, litArgs = [tvra], litType = te }) cc] Unknown where
te = getType e
ELit LitCons { litName = n1, litArgs = [] } = t1
ELit LitCons { litName = n2, litArgs = [] } = t2
Just n1' = nameToOpTy n1
Just n2' = nameToOpTy n2
tvra = tVr va2 t1
tvrb = tVr va3 t2
cc = if n1 == n2 then ELit (litCons { litName = c2, litArgs = [EVar tvra], litType = t }) else
eStrictLet tvrb (EPrim (Op (Op.ConvOp conv n1') n2') [EVar tvra] t2) (ELit (litCons { litName = c2, litArgs = [EVar tvrb], litType = t }))
nameToOpTy n = do RawType <- return $ nameType n; Op.readTy (show n)
create_uintegralCast_toInt c1 t1 e = create_integralCast Op.U2U c1 t1 dc_Int tIntzh e tInt
updateLit :: DataTable -> Lit e t -> Lit e t
updateLit _ l@LitInt {} = l
updateLit dataTable lc@LitCons { litAliasFor = Just {} } = lc
updateLit dataTable lc@LitCons { litName = n } = lc { litAliasFor = af } where
af = do
Constructor { conChildren = DataNormal [x], conOrigSlots = cs } <- getConstructor n dataTable
Constructor { conChildren = DataAlias ErasedAlias, conOrigSlots = [SlotNormal sl] } <- getConstructor x dataTable
return (foldr ELam sl [ tVr i s | s <- getSlots cs | i <- anonymousIds])
removeNewtypes :: DataTable -> E -> E
removeNewtypes dataTable e = runIdentity (f e) where
f ec@ECase {} = emapEGH f f return ec { eCaseAlts = map g (eCaseAlts ec) } where
g (Alt l e) = Alt (gl $ updateLit dataTable l) e
f (ELit l) = emapEGH f f return (ELit (gl $ updateLit dataTable l))
f e = emapEGH f f return e
gl lc@LitCons { litAliasFor = Just e } = lc { litAliasFor = Just $ removeNewtypes dataTable e }
gl l = l
collectDeriving :: [HsDecl] -> [(SrcLoc,Name,Name)]
collectDeriving ds = concatMap f ds where
f decl@HsDataDecl {} = g decl
f decl@HsDeclDeriving {} = h decl
f _ = []
g decl = [(hsDeclSrcLoc decl, toName ClassName c,
toName TypeConstructor (hsDeclName decl)) | c <- hsDeclDerives decl ]
h decl@(hsDeclClassHead -> ch) | [(ltc -> Just t)] <- hsClassHeadArgs ch = [(hsDeclSrcLoc decl,toName ClassName (hsClassHead ch), t)] where
ltc (HsTyApp t1 _) = ltc t1
ltc (HsTyCon n) = Just (toName TypeConstructor n)
ltc x = Nothing
h _ = []
# NOINLINE toDataTable #
toDataTable :: (Map.Map Name Kind) -> (Map.Map Name Type) -> [HsDecl] -> DataTable -> DataTable
toDataTable km cm ds currentDataTable = newDataTable where
newDataTable = DataTable (Map.mapWithKey fixupMap $
Map.fromList [ (conName x,procNewTypes x) | x <- ds', conName x `notElem` keys primitiveAliases ])
fullDataTable = (newDataTable `mappend` currentDataTable)
procNewTypes c = c { conExpr = f (conExpr c), conType = f (conType c), conOrigSlots = map (mapESlot f) (conOrigSlots c) } where
f = removeNewtypes fullDataTable
fixupMap k _ | Just n <- getConstructor k dataTablePrims = n
fixupMap _ n = n
ds' = Seq.toList $ execWriter (mapM_ f ds)
newtypeLoopBreakers = map fst $ fst $ G.findLoopBreakers (const 0) (const True) (G.newGraph newtypeDeps fst snd) where
newtypeDeps = [ (n,concatMap (fm . hsBangType) $ hsConDeclArgs c) |
HsDataDecl { hsDeclDeclType = DeclTypeNewtype, hsDeclName = n, hsDeclCons = (head -> c) } <- ds ]
fm t = execWriter $ f t
f HsTyCon { hsTypeName = n } = tell [n]
f t = traverseHsType_ f t
f decl@HsDataDecl { hsDeclDeclType = DeclTypeNewtype, hsDeclName = nn, hsDeclCons = cs } =
dt decl (if nn `elem` newtypeLoopBreakers then DataAlias RecursiveAlias else DataAlias ErasedAlias) cs
f decl@HsDataDecl { hsDeclDeclType = DeclTypeKind } = dkind decl
f decl@HsDataDecl { hsDeclCons = cs } = dt decl DataNone cs
f _ = return ()
dt decl DataNone cs@(_:_:_) | all null (map hsConDeclArgs cs) = do
let virtualCons'@(fc:_) = map (makeData DataNone typeInfo) cs
typeInfo@(theType,_,_) = makeType decl (hsDeclCTYPE decl)
virt = Just (map conName virtualCons')
f (n,vc) = vc { conExpr = ELit (litCons { litName = consName, litArgs = [ELit (LitInt (fromIntegral n) rtype)], litType = conType vc }), conVirtual = virt }
virtualCons = map f (zip [(0 :: Int) ..] virtualCons')
consName = mapName (id,(++ "#")) $ toName DataConstructor (nameName (conName theType))
rtypeName = mapName (id,(++ "#")) $ toName TypeConstructor (nameName (conName theType))
rtype = ELit litCons { litName = rtypeName, litType = eHash, litAliasFor = Just tEnumzh }
dataCons = fc { conName = consName, conType = getType (conExpr dataCons), conOrigSlots = [SlotNormal rtype], conExpr = ELam (tVr (anonymous 3) rtype) (ELit (litCons { litName = consName, litArgs = [EVar (tVr (anonymous 6) rtype)], litType = conExpr theType })) }
rtypeCons = emptyConstructor {
conName = rtypeName,
conType = eHash,
conExpr = rtype,
conInhabits = s_Hash,
conChildren = DataEnum (length virtualCons)
}
tell (Seq.fromList virtualCons)
tell (Seq.singleton dataCons)
tell (Seq.singleton rtypeCons)
tell $ Seq.singleton theType { conChildren = DataNormal [consName], conVirtual = virt }
return ()
dt decl alias cs = do
let dataCons = map (makeData alias typeInfo) cs
typeInfo@(theType,_,_) = makeType decl (hsDeclCTYPE decl)
tell (Seq.fromList dataCons)
tell $ Seq.singleton theType { conChildren = DataNormal (map conName dataCons) }
dkind HsDataDecl { .. } = do
tell $ Seq.singleton $ (sortConstructor hsDeclName EHashHash) {
conChildren = DataNormal (map hsConDeclName hsDeclCons) }
flip mapM_ hsDeclCons $ \ HsConDecl { .. } -> do
let Just theKind = kind `fmap` (Map.lookup hsConDeclName km)
(theTypeFKind,theTypeKArgs') = fromPi theKind
theTypeArgs = [ tvr { tvrIdent = x } | tvr <- theTypeKArgs' | x <- anonymousIds ]
theTypeExpr = ELit litCons {
litName = hsConDeclName,
litArgs = map EVar theTypeArgs,
litType = theTypeFKind }
tell $ Seq.singleton emptyConstructor {
conName = hsConDeclName,
conType = theKind,
conOrigSlots = map (SlotNormal . tvrType) theTypeArgs,
conExpr = foldr ($) theTypeExpr (map ELam theTypeArgs),
conInhabits = hsDeclName
}
dkind _ = error "dkind passed bad decl"
makeData alias (theType,theTypeArgs,theTypeExpr) x = theData where
theData = emptyConstructor {
conName = dataConsName,
conType =foldr ($) (getType theExpr) (map EPi theTypeArgs),
conOrigSlots = origSlots,
conExpr = theExpr,
conInhabits = conName theType,
conChildren = alias
}
dataConsName = toName Name.DataConstructor (hsConDeclName x)
theExpr = foldr ELam (strictize tslots $ ELit litCons { litName = dataConsName, litArgs = map EVar dvars, litType = theTypeExpr }) hsvars
strictize tslots con = E.Subst.subst tvr { tvrIdent = sillyId } Unknown $ f tslots con where
f (Left (v,False):rs) con = f rs con
f (Left (v,True):rs) con = eStrictLet v (EVar v) (f rs con)
f (Right (v,dc,rcs):rs) con = eCase (EVar v) [Alt pat (f rs con)] Unknown where
pat = litCons { litName = dc, litArgs = rcs, litType = (getType v) }
f [] con = con
(ELit LitCons { litArgs = thisTypeArgs }, origArgs) = fromPi $ runVarName $ do
let (vs,ty) = case Map.lookup dataConsName cm of Just (TForAll vs (_ :=> ty)) -> (vs,ty); ~(Just ty) -> ([],ty)
mapM_ (newName anonymousIds ()) vs
tipe' ty
subst = substMap $ fromList [ (tvrIdent tv ,EVar $ tv { tvrIdent = p }) | EVar tv <- thisTypeArgs | p <- anonymousIds ]
origSlots = map SlotExistential existentials ++ map f tslots where
f (Left (e,_)) = SlotNormal (getType e)
f (Right (e,n,es)) = SlotUnpacked (getType e) n (map getType es)
hsvars = existentials ++ map f tslots where
f (Left (e,_)) = e
f (Right (e,_,_)) = e
dvars = existentials ++ concatMap f tslots where
f (Left (e,_)) = [e]
f (Right (_,_,es)) = es
tslots = f (newIds fvset) (map isHsBangedTy (hsConDeclArgs x)) origArgs where
f (i:is) (False:bs) (e:es) = Left (e { tvrIdent = i, tvrType = subst (tvrType e) },False):f is bs es
f (i:j:is) (True:bs) (e:es) = maybe (Left (e { tvrIdent = i, tvrType = subst (tvrType e) },True):f is bs es) id $ g e (tvrType e) where
g e te = do
ELit LitCons { litName = n } <- return $ followAliases fullDataTable te
Constructor { conChildren = DataNormal [dc] } <- getConstructor n fullDataTable
con <- getConstructor dc fullDataTable
case (conChildren con,slotTypes fullDataTable dc te) of
(DataAlias ErasedAlias,[nt]) -> g e nt
(_,[st]) -> do
let nv = tvr { tvrIdent = j, tvrType = st }
return $ Right (e { tvrIdent = i, tvrType = subst (tvrType e)},dc,[nv]):f is bs es
_ -> fail "not unboxable"
f _ [] [] = []
f _ _ _ = error "DataConstructors.tslots"
fvset = freeVars (thisTypeArgs,origArgs) `mappend` fromList (take (length theTypeArgs + 2) anonymousIds)
existentials = values $ freeVars (map getType origArgs) S.\\ (freeVars thisTypeArgs :: IdMap TVr)
hsArgs = existentials + + [ tvr { tvrIdent = x } | tvr < - origArgs | x < - drop ( 5 + length theTypeArgs ) [ 2,4 .. ] ]
makeType decl ct = (theType,theTypeArgs,theTypeExpr) where
theTypeName = toName Name.TypeConstructor (hsDeclName decl)
Just theKind = kind `fmap` (Map.lookup theTypeName km)
(theTypeFKind,theTypeKArgs') = fromPi theKind
theTypeArgs = [ tvr { tvrIdent = x } | tvr <- theTypeKArgs' | x <- anonymousIds ]
theTypeExpr = ELit litCons { litName = theTypeName, litArgs = map EVar theTypeArgs, litType = theTypeFKind }
theType = emptyConstructor {
conCTYPE = fmap (ExtType . packString) ct,
conExpr = foldr ($) theTypeExpr (map ELam theTypeArgs),
conInhabits = if theTypeFKind == eStar then s_Star else s_Hash,
conName = theTypeName,
conOrigSlots = map (SlotNormal . tvrType) theTypeArgs,
conType = theKind,
conVirtual = Nothing
}
isHsBangedTy HsBangedTy {} = True
isHsBangedTy _ = False
getConstructorArities :: DataTable -> [(Name,Int)]
getConstructorArities (DataTable dt) = [ (n,length $ conSlots c) | (n,c) <- Map.toList dt]
constructionExpression ::
constructionExpression dataTable n typ@(ELit LitCons { litName = pn, litArgs = xs })
| DataAlias ErasedAlias <- conChildren mc = ELam var (EVar var)
| DataAlias RecursiveAlias <- conChildren mc = let var' = var { tvrType = st } in ELam var' (prim_unsafeCoerce (EVar var') typ)
| pn == conName pc = sub (conExpr mc) where
~[st] = slotTypes dataTable n typ
var = tvr { tvrIdent = vid, tvrType = typ }
(vid:_) = newIds (freeVars typ)
Just mc = getConstructor n dataTable
Just pc = getConstructor (conInhabits mc) dataTable
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
constructionExpression wdt n e | Just fa <- followAlias wdt e = constructionExpression wdt n fa
constructionExpression _ n e = error $ "constructionExpression: error in " ++ show n ++ ": " ++ show e
deconstructionExpression ::
UniqueProducer m
deconstructionExpression dataTable name typ@(ELit LitCons { litName = pn, litArgs = xs }) vs e | pn == conName pc = ans where
Just mc = getConstructor name dataTable
Just pc = getConstructor (conInhabits mc) dataTable
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
ans = case conVirtual mc of
Just _ -> return $ let ELit LitCons { litArgs = [ELit (LitInt n t)] } = conExpr mc in Alt (LitInt n t) e
Nothing -> do
let f vs (SlotExistential t:ss) rs ls = f vs ss (t:rs) ls
f (v:vs) (SlotNormal _:ss) rs ls = f vs ss (v:rs) ls
f (v:vs) (SlotUnpacked e n es:ss) rs ls = do
let g t = do
s <- newUniq
return $ tVr (anonymous s) t
as <- mapM g (map sub es)
f vs ss (reverse as ++ rs) ((v,ELit litCons { litName = n, litArgs = map EVar as, litType = sub e }):ls)
f [] [] rs ls = return $ Alt (litCons { litName = name, litArgs = reverse rs, litType = typ }) (eLetRec ls e)
f _ _ _ _ = error "DataConstructors.deconstructuonExpression.f"
f vs (conOrigSlots mc) [] []
deconstructionExpression wdt n ty vs e | Just fa <- followAlias wdt ty = deconstructionExpression wdt n fa vs e
deconstructionExpression _ n e _ _ = error $ "deconstructionExpression: error in " ++ show n ++ ": " ++ show e
slotTypes ::
slotTypes wdt n (ELit LitCons { litName = pn, litArgs = xs, litType = _ })
| pn == conName pc = [sub x | x <- conSlots mc ]
where
Identity mc = getConstructor n wdt
Identity pc = getConstructor (conInhabits mc) wdt
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
slotTypes wdt n kind
| sortKindLike kind, (e,ts) <- fromPi kind = take (length (conSlots mc) - length ts) (conSlots mc)
| sortKindLike kind , ( e , ts ) < - fromPi kind = ( )
where Identity mc = getConstructor n wdt
slotTypes wdt n e | Just fa <- followAlias wdt e = slotTypes wdt n fa
slotTypes _ n e = error $ "slotTypes: error in " ++ show n ++ ": " ++ show e
slotTypesHs ::
slotTypesHs wdt n (ELit LitCons { litName = pn, litArgs = xs, litType = _ })
| pn == conName pc = [sub x | x <- getHsSlots $ conOrigSlots mc ]
where
Identity mc = getConstructor n wdt
Identity pc = getConstructor (conInhabits mc) wdt
sub = substMap $ fromDistinctAscList [ (i,sl) | sl <- xs | i <- anonymousIds ]
slotTypesHs wdt n kind
| sortKindLike kind, (e,ts) <- fromPi kind = take (length (conSlots mc) - length ts) (conSlots mc)
where Identity mc = getConstructor n wdt
slotTypesHs wdt n e | Just fa <- followAlias wdt e = slotTypes wdt n fa
slotTypesHs _ n e = error $ "slotTypesHs: error in " ++ show n ++ ": " ++ show e
# NOINLINE showDataTable #
showDataTable (DataTable mp) = vcat xs where
c con = vcat [t,e,cs,vt,ih,ch,mc] where
t = text "::" <+> ePretty conType
e = text "=" <+> ePretty conExpr
cs = text "slots:" <+> tupled (map ePretty (conSlots con))
vt = text "virtual:" <+> tshow conVirtual
ih = text "inhabits:" <+> tshow conInhabits
ch = text "children:" <+> tshow conChildren
mc = text "CTYPE:" <+> tshow conCTYPE
Constructor { .. } = con
xs = [text x <+> hang 0 (c y) | (x,y) <- ds ]
ds = sortBy (\(x,_) (y,_) -> compare x y) [(show x,y) | (x,y) <- Map.toList mp]
# NOINLINE samplePrimitiveDataTable #
samplePrimitiveDataTable :: DataTable
samplePrimitiveDataTable = DataTable $ Map.fromList [ (x,c) | x <- xs, c <- getConstructor x mempty] where
nt v = map (flip name_UnboxedTupleConstructor (v::Int)) [termLevel, typeLevel]
xs = nt 0 ++ nt 3 ++ [nameConjured modAbsurd eStar,nameConjured modBox hs, nameConjured modAbsurd hs', nameConjured modBox hs',rt_bits16,rt_bits_ptr_]
hs = EPi (tVr emptyId eHash) eStar
hs' = tFunc eStar (tFunc (tFunc eStar eHash) eStar)
getSiblings :: DataTable -> Name -> Maybe [Name]
getSiblings dt n
| Just c <- getConstructor n dt, Just Constructor { conChildren = DataNormal cs } <- getConstructor (conInhabits c) dt = Just cs
| otherwise = Nothing
numberSiblings :: DataTable -> Name -> Maybe Int
numberSiblings dt n
| Just c <- getConstructor n dt, Just Constructor { conChildren = cc } <- getConstructor (conInhabits c) dt = case cc of
DataNormal ds -> Just $ length ds
DataEnum n -> Just n
_ -> Nothing
| otherwise = Nothing
onlyChild :: DataTable -> Name -> Bool
onlyChild dt n = isJust ans where
ans = do
c <- getConstructor n dt
case conChildren c of
DataNormal [_] -> return ()
_ -> do
c <- getConstructor (conInhabits c) dt
case conChildren c of
DataNormal [_] -> return ()
_ -> fail "not cpr"
pprintTypeOfCons :: (Monad m,DocLike a) => DataTable -> Name -> m a
pprintTypeOfCons dataTable name = do
c <- getConstructor name dataTable
return $ pprintTypeAsHs (conType c)
pprintTypeAsHs :: DocLike a => E -> a
pprintTypeAsHs e = unparse $ runVarName (f e) where
f e | e == eStar = return $ atom $ text "*"
| e == eHash = return $ atom $ text "#"
f (EPi (TVr { tvrIdent = eid, tvrType = t1 }) t2) | eid == emptyId = do
t1 <- f t1
t2 <- f t2
return $ t1 `arr` t2
f (ELit LitCons { litName = n, litArgs = as }) | (a:as') <- reverse as = f $ EAp (ELit litCons { litName = n, litArgs = reverse as' }) a
f (ELit LitCons { litName = n, litArgs = [] }) = return $ atom $ text $ show n
f (EAp a b) = do
a <- f a
b <- f b
return $ a `app` b
f (EVar v) = do
vo <- newLookupName ['a' .. ] () (tvrIdent v)
return $ atom $ char vo
f v | (e,ts@(_:_)) <- fromPi v = do
ts' <- mapM (newLookupName ['a'..] () . tvrIdent) ts
r <- f e
return $ fixitize (N,-3) $ pop (text "forall" <+> hsep (map char ts') <+> text ". ") (atomize r)
f e = error $ "printTypeAsHs: " ++ show e
arr = bop (R,0) (space D.<> text "->" D.<> space)
app = bop (L,100) (text " ")
class Monad m => DataTableMonad m where
getDataTable :: m DataTable
getDataTable = return mempty
instance DataTableMonad Identity
primitiveAliases :: Map.Map Name Name
primitiveAliases = Map.fromList [
(tc_Bits1, rt_bool),
(tc_Bits8, rt_bits8),
(tc_Bits16, rt_bits16),
(tc_Bits32, rt_bits32),
(tc_Bits64, rt_bits64),
(tc_Bits128, rt_bits128),
(tc_BitsPtr, rt_bits_ptr_),
(tc_BitsMax, rt_bits_max_),
(tc_Float32, rt_float32),
(tc_Float64, rt_float64),
(tc_Float80, rt_float80),
(tc_Float128, rt_float128)
]
rawExtTypeMap :: Map.Map Name ExtType
rawExtTypeMap = Map.fromList [
(rt_bool, "bool"),
(rt_bits8, "uint8_t"),
(rt_bits16, "uint16_t"),
(rt_bits32, "uint32_t"),
(rt_bits64, "uint64_t"),
(rt_bits128, "uint128_t"),
(rt_bits_ptr_, "uintptr_t" ),
(rt_bits_max_, "uintmax_t"),
(rt_float32, "float"),
(rt_float64, "double"),
(rt_float80, "long double"),
(rt_float128, "__float128")
]
instance Data.Binary.Binary AliasType where
put ErasedAlias = do
Data.Binary.putWord8 0
put RecursiveAlias = do
Data.Binary.putWord8 1
get = do
h <- Data.Binary.getWord8
case h of
0 -> do
return ErasedAlias
1 -> do
return RecursiveAlias
_ -> fail "invalid binary data found"
instance Data.Binary.Binary DataFamily where
put DataAbstract = do
Data.Binary.putWord8 0
put DataNone = do
Data.Binary.putWord8 1
put DataPrimitive = do
Data.Binary.putWord8 2
put (DataEnum aa) = do
Data.Binary.putWord8 3
Data.Binary.put aa
put (DataNormal ab) = do
Data.Binary.putWord8 4
Data.Binary.put ab
put (DataAlias ac) = do
Data.Binary.putWord8 5
Data.Binary.put ac
get = do
h <- Data.Binary.getWord8
case h of
0 -> do
return DataAbstract
1 -> do
return DataNone
2 -> do
return DataPrimitive
3 -> do
aa <- Data.Binary.get
return (DataEnum aa)
4 -> do
ab <- Data.Binary.get
return (DataNormal ab)
5 -> do
ac <- Data.Binary.get
return (DataAlias ac)
_ -> fail "invalid binary data found"
instance Data.Binary.Binary Constructor where
put (Constructor aa ab ac ad ae af ag ah) = do
Data.Binary.put aa
Data.Binary.put ab
Data.Binary.put ac
Data.Binary.put ad
Data.Binary.put ae
Data.Binary.put af
Data.Binary.put ag
Data.Binary.put ah
get = do
aa <- get
ab <- get
ac <- get
ad <- get
ae <- get
af <- get
ag <- get
ah <- get
return (Constructor aa ab ac ad ae af ag ah)
instance Data.Binary.Binary Slot where
put (SlotNormal aa) = do
Data.Binary.putWord8 0
Data.Binary.put aa
put (SlotUnpacked ab ac ad) = do
Data.Binary.putWord8 1
Data.Binary.put ab
Data.Binary.put ac
Data.Binary.put ad
put (SlotExistential ae) = do
Data.Binary.putWord8 2
Data.Binary.put ae
get = do
h <- Data.Binary.getWord8
case h of
0 -> do
aa <- Data.Binary.get
return (SlotNormal aa)
1 -> do
ab <- Data.Binary.get
ac <- Data.Binary.get
ad <- Data.Binary.get
return (SlotUnpacked ab ac ad)
2 -> do
ae <- Data.Binary.get
return (SlotExistential ae)
_ -> fail "invalid binary data found"
|
94d99a2755633fa7d9da70cb1a9bd2c6921c8bad300f9d2947668246b9efd70f | rabbitmq/rabbitmq-prometheus | rabbit_prometheus_handler.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(rabbit_prometheus_handler).
-export([init/2]).
-export([generate_response/2, content_types_provided/2, is_authorized/2]).
-export([setup/0]).
-include_lib("amqp_client/include/amqp_client.hrl").
-define(SCRAPE_DURATION, telemetry_scrape_duration_seconds).
-define(SCRAPE_SIZE, telemetry_scrape_size_bytes).
-define(SCRAPE_ENCODED_SIZE, telemetry_scrape_encoded_size_bytes).
%% ===================================================================
%% Cowboy Handler Callbacks
%% ===================================================================
init(Req, _State) ->
{cowboy_rest, Req, #{}}.
content_types_provided(ReqData, Context) ->
Since Prometheus 2.0 Protobuf is no longer supported
{[{{<<"text">>, <<"plain">>, '*'}, generate_response}], ReqData, Context}.
is_authorized(ReqData, Context) ->
{true, ReqData, Context}.
setup() ->
TelemetryRegistry = telemetry_registry(),
ScrapeDuration = [{name, ?SCRAPE_DURATION},
{help, "Scrape duration"},
{labels, ["registry", "content_type"]},
{registry, TelemetryRegistry}],
ScrapeSize = [{name, ?SCRAPE_SIZE},
{help, "Scrape size, not encoded"},
{labels, ["registry", "content_type"]},
{registry, TelemetryRegistry}],
ScrapeEncodedSize = [{name, ?SCRAPE_ENCODED_SIZE},
{help, "Scrape size, encoded"},
{labels, ["registry", "content_type", "encoding"]},
{registry, TelemetryRegistry}],
prometheus_summary:declare(ScrapeDuration),
prometheus_summary:declare(ScrapeSize),
prometheus_summary:declare(ScrapeEncodedSize).
%% ===================================================================
%% Private functions
%% ===================================================================
generate_response(ReqData, Context) ->
Method = cowboy_req:method(ReqData),
Response = gen_response(Method, ReqData),
{stop, Response, Context}.
gen_response(<<"GET">>, Request) ->
Registry0 = cowboy_req:binding(registry, Request, <<"default">>),
case prometheus_registry:exists(Registry0) of
false ->
cowboy_req:reply(404, #{}, <<"Unknown Registry">>, Request);
Registry ->
gen_metrics_response(Registry, Request)
end;
gen_response(_, Request) ->
Request.
gen_metrics_response(Registry, Request) ->
{Code, RespHeaders, Body} = reply(Registry, Request),
Headers = to_cowboy_headers(RespHeaders),
cowboy_req:reply(Code, maps:from_list(Headers), Body, Request).
to_cowboy_headers(RespHeaders) ->
lists:map(fun to_cowboy_headers_/1, RespHeaders).
to_cowboy_headers_({Name, Value}) ->
{to_cowboy_name(Name), Value}.
to_cowboy_name(Name) ->
binary:replace(atom_to_binary(Name, utf8), <<"_">>, <<"-">>).
reply(Registry, Request) ->
case validate_registry(Registry, registry()) of
{true, RealRegistry} ->
format_metrics(Request, RealRegistry);
{registry_conflict, _ReqR, _ConfR} ->
{409, [], <<>>};
{registry_not_found, _ReqR} ->
{404, [], <<>>};
false ->
false
end.
format_metrics(Request, Registry) ->
AcceptEncoding = cowboy_req:header(<<"accept-encoding">>, Request, undefined),
ContentType = prometheus_text_format:content_type(),
Scrape = render_format(ContentType, Registry),
Encoding = accept_encoding_header:negotiate(AcceptEncoding, [<<"identity">>,
<<"gzip">>]),
encode_format(ContentType, binary_to_list(Encoding), Scrape, Registry).
render_format(ContentType, Registry) ->
TelemetryRegistry = telemetry_registry(),
Scrape = prometheus_summary:observe_duration(
TelemetryRegistry,
?SCRAPE_DURATION,
[Registry, ContentType],
fun () -> prometheus_text_format:format(Registry) end),
prometheus_summary:observe(TelemetryRegistry,
?SCRAPE_SIZE,
[Registry, ContentType],
iolist_size(Scrape)),
Scrape.
validate_registry(undefined, auto) ->
{true, default};
validate_registry(Registry, auto) ->
{true, Registry};
validate_registry(Registry, Registry) ->
{true, Registry};
validate_registry(Asked, Conf) ->
{registry_conflict, Asked, Conf}.
telemetry_registry() ->
application:get_env(rabbitmq_prometheus, telemetry_registry, default).
registry() ->
application:get_env(rabbitmq_prometheus, registry, auto).
encode_format(ContentType, Encoding, Scrape, Registry) ->
Encoded = encode_format_(Encoding, Scrape),
prometheus_summary:observe(telemetry_registry(),
?SCRAPE_ENCODED_SIZE,
[Registry, ContentType, Encoding],
iolist_size(Encoded)),
{200, [{content_type, binary_to_list(ContentType)},
{content_encoding, Encoding}], Encoded}.
encode_format_("gzip", Scrape) ->
zlib:gzip(Scrape);
encode_format_("identity", Scrape) ->
Scrape.
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-prometheus/8a700b69d3c10b80a898eb92b5834a5a052af0a5/src/rabbit_prometheus_handler.erl | erlang |
===================================================================
Cowboy Handler Callbacks
===================================================================
===================================================================
Private functions
=================================================================== | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
-module(rabbit_prometheus_handler).
-export([init/2]).
-export([generate_response/2, content_types_provided/2, is_authorized/2]).
-export([setup/0]).
-include_lib("amqp_client/include/amqp_client.hrl").
-define(SCRAPE_DURATION, telemetry_scrape_duration_seconds).
-define(SCRAPE_SIZE, telemetry_scrape_size_bytes).
-define(SCRAPE_ENCODED_SIZE, telemetry_scrape_encoded_size_bytes).
init(Req, _State) ->
{cowboy_rest, Req, #{}}.
content_types_provided(ReqData, Context) ->
Since Prometheus 2.0 Protobuf is no longer supported
{[{{<<"text">>, <<"plain">>, '*'}, generate_response}], ReqData, Context}.
is_authorized(ReqData, Context) ->
{true, ReqData, Context}.
setup() ->
TelemetryRegistry = telemetry_registry(),
ScrapeDuration = [{name, ?SCRAPE_DURATION},
{help, "Scrape duration"},
{labels, ["registry", "content_type"]},
{registry, TelemetryRegistry}],
ScrapeSize = [{name, ?SCRAPE_SIZE},
{help, "Scrape size, not encoded"},
{labels, ["registry", "content_type"]},
{registry, TelemetryRegistry}],
ScrapeEncodedSize = [{name, ?SCRAPE_ENCODED_SIZE},
{help, "Scrape size, encoded"},
{labels, ["registry", "content_type", "encoding"]},
{registry, TelemetryRegistry}],
prometheus_summary:declare(ScrapeDuration),
prometheus_summary:declare(ScrapeSize),
prometheus_summary:declare(ScrapeEncodedSize).
generate_response(ReqData, Context) ->
Method = cowboy_req:method(ReqData),
Response = gen_response(Method, ReqData),
{stop, Response, Context}.
gen_response(<<"GET">>, Request) ->
Registry0 = cowboy_req:binding(registry, Request, <<"default">>),
case prometheus_registry:exists(Registry0) of
false ->
cowboy_req:reply(404, #{}, <<"Unknown Registry">>, Request);
Registry ->
gen_metrics_response(Registry, Request)
end;
gen_response(_, Request) ->
Request.
gen_metrics_response(Registry, Request) ->
{Code, RespHeaders, Body} = reply(Registry, Request),
Headers = to_cowboy_headers(RespHeaders),
cowboy_req:reply(Code, maps:from_list(Headers), Body, Request).
to_cowboy_headers(RespHeaders) ->
lists:map(fun to_cowboy_headers_/1, RespHeaders).
to_cowboy_headers_({Name, Value}) ->
{to_cowboy_name(Name), Value}.
to_cowboy_name(Name) ->
binary:replace(atom_to_binary(Name, utf8), <<"_">>, <<"-">>).
reply(Registry, Request) ->
case validate_registry(Registry, registry()) of
{true, RealRegistry} ->
format_metrics(Request, RealRegistry);
{registry_conflict, _ReqR, _ConfR} ->
{409, [], <<>>};
{registry_not_found, _ReqR} ->
{404, [], <<>>};
false ->
false
end.
format_metrics(Request, Registry) ->
AcceptEncoding = cowboy_req:header(<<"accept-encoding">>, Request, undefined),
ContentType = prometheus_text_format:content_type(),
Scrape = render_format(ContentType, Registry),
Encoding = accept_encoding_header:negotiate(AcceptEncoding, [<<"identity">>,
<<"gzip">>]),
encode_format(ContentType, binary_to_list(Encoding), Scrape, Registry).
render_format(ContentType, Registry) ->
TelemetryRegistry = telemetry_registry(),
Scrape = prometheus_summary:observe_duration(
TelemetryRegistry,
?SCRAPE_DURATION,
[Registry, ContentType],
fun () -> prometheus_text_format:format(Registry) end),
prometheus_summary:observe(TelemetryRegistry,
?SCRAPE_SIZE,
[Registry, ContentType],
iolist_size(Scrape)),
Scrape.
validate_registry(undefined, auto) ->
{true, default};
validate_registry(Registry, auto) ->
{true, Registry};
validate_registry(Registry, Registry) ->
{true, Registry};
validate_registry(Asked, Conf) ->
{registry_conflict, Asked, Conf}.
telemetry_registry() ->
application:get_env(rabbitmq_prometheus, telemetry_registry, default).
registry() ->
application:get_env(rabbitmq_prometheus, registry, auto).
encode_format(ContentType, Encoding, Scrape, Registry) ->
Encoded = encode_format_(Encoding, Scrape),
prometheus_summary:observe(telemetry_registry(),
?SCRAPE_ENCODED_SIZE,
[Registry, ContentType, Encoding],
iolist_size(Encoded)),
{200, [{content_type, binary_to_list(ContentType)},
{content_encoding, Encoding}], Encoded}.
encode_format_("gzip", Scrape) ->
zlib:gzip(Scrape);
encode_format_("identity", Scrape) ->
Scrape.
|
daf81ab08df5f682cd85a27bfa18df416a950b8d8d159fdd0b934ea1024b02e4 | rmloveland/scheme48-0.53 | package-defs.scm | Copyright ( c ) 1993 - 1999 by . See file COPYING .
; The intermediate language (node tree)
The structures VARIABLE and PRIMOP are contained in NODE . They are used
; in client language code where the NODE- names conflict.
(define-structures ((node node-interface)
(variable variable-interface)
(primop primop-interface))
(open scheme big-scheme comp-util arch parameters
defrecord)
(for-syntax (open scheme big-scheme let-nodes))
(begin
(define-syntax let-nodes
(lambda (form rename compare)
(expand-let-nodes form rename compare))))
(files (node node) ; variable and node data structures
primop data structure
(node node-util) ; various small utilities
(node node-equal))) ; node equality
;(define node
; (let ()
; (define-structure let-nodes (export expand-let-nodes)
; (open scheme big-scheme arch)
; (files (node let-nodes)))
; (define-structures ((node node-interface)
; (variable variable-interface)
( primop primop - interface ) )
; (open scheme big-scheme comp-util arch parameters)
; (for-syntax (open scheme big-scheme let-nodes))
; (begin
; (define-syntax let-nodes
; (lambda (form rename compare)
; (expand-let-nodes form rename compare))))
; (files (node node) ; variable and node data structures
( node primop ) ; primop data structure
; (node node-util) ; various small utilities
; (node node-equal) ; node equality
; (node leftovers))) ; more node utilities
; node))
; Pretty printer
(define-structure pp-cps (export pp-cps)
(open scheme big-scheme comp-util node structure-refs)
(access i/o) ; force-output
(files (node pp-cps)))
; Expander for LET-NODES, a macro for creating interconnected nodes.
(define-structure let-nodes (export expand-let-nodes)
(open scheme big-scheme arch)
(files (node let-nodes)))
; Compiler Parameters
; This allows client languages to supply parameters to the compiler
; without introducing circular module dependencies.
(define-structures ((parameters parameter-interface)
(set-parameters (export set-compiler-parameter!)))
(open scheme big-scheme)
(files param))
; An enumerated type defining the standard primops.
(define-structure arch (export (primop :syntax) primop-count)
(open scheme enumerated)
(files (node arch)))
; linearizing node trees for later reuse
(define-structure node-vector (export node->vector
vector->node
vector->leaf-node)
(open scheme big-scheme comp-util node parameters
defrecord)
(files (node vector)))
; Translating the input forms into simplified node trees
(define-structures ((front front-interface)
(front-debug front-debug-interface))
(open scheme big-scheme comp-util node simplify parameters jump
remove-cells flow-values)
(files (front top))) ; main entry points and debugging utilities
(define-structure cps-util (export cps-call cps-sequence)
(open scheme big-scheme comp-util node
define-record-types)
(files (front cps)))
; Converting tail-recursive calls to jumps
(define-structure jump (export integrate-jump-procs!
find-jump-procs
procs->jumps)
(open scheme big-scheme comp-util node parameters ssa
define-record-types)
(files (front jump)))
Program simplification and partial evaluation
(define-structures ((simplify (export simplify-node))
(simplify-internal simplify-internal-interface))
(open scheme big-scheme comp-util node parameters node-vector)
(for-syntax (open scheme big-scheme simp-patterns))
(begin
(define-syntax pattern-simplifier
(lambda (form rename compare)
from SIMP - PATTERNS
(files (simp simplify) ; main entry point and driver
simplifiers for some of the standard primops
; Simplifying calls to lambda nodes
(define-structure simplify-let (export simplify-let)
(open scheme big-scheme comp-util node parameters
simplify-join simplify-internal)
(files (simp let)))
; Substituting lambda nodes that are bound by calls to lambda nodes,
; trying to maximize the further simplification opportunites while
; minimizing code expansion.
(define-structure simplify-join (export substitute-join-arguments)
(open scheme big-scheme comp-util node)
(files (simp join)))
; The expander for PATTERN-SIMPLIFIER, a macro for writing algebraic
; transformations.
(define-structure simp-patterns (export make-pattern-simplifier)
(open scheme big-scheme defrecord)
(files (simp pattern)))
; Replacing cells with values passed as parameters, currently empty
; and unused (the code has not been made compatible with the current
; version of the compiler).
(define-structure remove-cells (export remove-cells-from-tree)
(open scheme big-scheme)
(begin
(define (remove-cells-from-tree . stuff)
(error "REMOVE-CELLS-FROM-TREE is undefined"))))
; Flow analysis, also currently empty and unused for the same reason.
(define-structure flow-values (export flow-values)
(open scheme big-scheme)
(begin
(define (flow-values . stuff)
(error "FLOW-VALUES is undefined"))))
; A random collection of utilities.
(define-structure comp-util utilities-interface
(open scheme big-scheme structure-refs expanding-vectors)
(for-syntax (open scheme big-scheme))
(access primitives features)
(files (util syntax) ; macro for defining subrecords
(util util))) ; random utilities
(define-structure expanding-vectors (export make-xvector
xvector-length
xvector-ref
xvector-set!
xvector-length
xvector->vector)
(open scheme define-record-types)
(files (util expand-vec)))
(define-interface transitive-interface
(export make-graph-from-predecessors
make-graph-from-successors
transitive-or! transitive-or-with-kill! transitive-or-with-pass!
transitive-and! transitive-and-with-kill! transitive-and-with-pass!))
(define-structure transitive transitive-interface
(open scheme big-scheme integer-sets defrecord)
(optimize auto-integrate)
(files (util transitive)))
(define-interface integer-set-interface
(export make-empty-integer-set
add-to-integer-set
integer-set-not
integer-set-ior
integer-set-and
integer-set-subtract
integer-set-equal?
map-over-integer-set))
(define-structure integer-sets integer-set-interface
(open scheme bitwise bigbit)
(optimize auto-integrate)
(files (util z-set)))
(define-structure strongly-connected (export strongly-connected-components)
(open scheme big-scheme defrecord)
(optimize auto-integrate)
(files (util strong)))
(define-structure dominators (export find-dominators!)
(open scheme big-scheme comp-util
define-record-types)
(optimize auto-integrate)
(files (util dominators)))
(define-structure ssa (export graph->ssa-graph! find-joins)
(open scheme big-scheme dominators
define-record-types)
(optimize auto-integrate)
(files (util ssa)))
; Vectors of bytes, a renaming of Scheme 48's code vectors.
(define-structure byte-vectors compiler-byte-vector-interface
(open scheme code-vectors bitwise signals)
(optimize auto-integrate)
(files (util byte-vector)))
; A version of READ that annotates pairs with source file, line, and
; column information.
(define-structure annotated-read annotated-read-interface
; this is correct for linking, but doesn't work when loading
( open defrecord extended - ports primitives scheme assembler )
(open scheme big-scheme primitives fluids assembler)
(files (prescheme track-read)))
| null | https://raw.githubusercontent.com/rmloveland/scheme48-0.53/1ae4531fac7150bd2af42d124da9b50dd1b89ec1/ps-compiler/package-defs.scm | scheme | The intermediate language (node tree)
in client language code where the NODE- names conflict.
variable and node data structures
various small utilities
node equality
(define node
(let ()
(define-structure let-nodes (export expand-let-nodes)
(open scheme big-scheme arch)
(files (node let-nodes)))
(define-structures ((node node-interface)
(variable variable-interface)
(open scheme big-scheme comp-util arch parameters)
(for-syntax (open scheme big-scheme let-nodes))
(begin
(define-syntax let-nodes
(lambda (form rename compare)
(expand-let-nodes form rename compare))))
(files (node node) ; variable and node data structures
primop data structure
(node node-util) ; various small utilities
(node node-equal) ; node equality
(node leftovers))) ; more node utilities
node))
Pretty printer
force-output
Expander for LET-NODES, a macro for creating interconnected nodes.
Compiler Parameters
This allows client languages to supply parameters to the compiler
without introducing circular module dependencies.
An enumerated type defining the standard primops.
linearizing node trees for later reuse
Translating the input forms into simplified node trees
main entry points and debugging utilities
Converting tail-recursive calls to jumps
main entry point and driver
Simplifying calls to lambda nodes
Substituting lambda nodes that are bound by calls to lambda nodes,
trying to maximize the further simplification opportunites while
minimizing code expansion.
The expander for PATTERN-SIMPLIFIER, a macro for writing algebraic
transformations.
Replacing cells with values passed as parameters, currently empty
and unused (the code has not been made compatible with the current
version of the compiler).
Flow analysis, also currently empty and unused for the same reason.
A random collection of utilities.
macro for defining subrecords
random utilities
Vectors of bytes, a renaming of Scheme 48's code vectors.
A version of READ that annotates pairs with source file, line, and
column information.
this is correct for linking, but doesn't work when loading | Copyright ( c ) 1993 - 1999 by . See file COPYING .
The structures VARIABLE and PRIMOP are contained in NODE . They are used
(define-structures ((node node-interface)
(variable variable-interface)
(primop primop-interface))
(open scheme big-scheme comp-util arch parameters
defrecord)
(for-syntax (open scheme big-scheme let-nodes))
(begin
(define-syntax let-nodes
(lambda (form rename compare)
(expand-let-nodes form rename compare))))
primop data structure
( primop primop - interface ) )
(define-structure pp-cps (export pp-cps)
(open scheme big-scheme comp-util node structure-refs)
(files (node pp-cps)))
(define-structure let-nodes (export expand-let-nodes)
(open scheme big-scheme arch)
(files (node let-nodes)))
(define-structures ((parameters parameter-interface)
(set-parameters (export set-compiler-parameter!)))
(open scheme big-scheme)
(files param))
(define-structure arch (export (primop :syntax) primop-count)
(open scheme enumerated)
(files (node arch)))
(define-structure node-vector (export node->vector
vector->node
vector->leaf-node)
(open scheme big-scheme comp-util node parameters
defrecord)
(files (node vector)))
(define-structures ((front front-interface)
(front-debug front-debug-interface))
(open scheme big-scheme comp-util node simplify parameters jump
remove-cells flow-values)
(define-structure cps-util (export cps-call cps-sequence)
(open scheme big-scheme comp-util node
define-record-types)
(files (front cps)))
(define-structure jump (export integrate-jump-procs!
find-jump-procs
procs->jumps)
(open scheme big-scheme comp-util node parameters ssa
define-record-types)
(files (front jump)))
Program simplification and partial evaluation
(define-structures ((simplify (export simplify-node))
(simplify-internal simplify-internal-interface))
(open scheme big-scheme comp-util node parameters node-vector)
(for-syntax (open scheme big-scheme simp-patterns))
(begin
(define-syntax pattern-simplifier
(lambda (form rename compare)
from SIMP - PATTERNS
simplifiers for some of the standard primops
(define-structure simplify-let (export simplify-let)
(open scheme big-scheme comp-util node parameters
simplify-join simplify-internal)
(files (simp let)))
(define-structure simplify-join (export substitute-join-arguments)
(open scheme big-scheme comp-util node)
(files (simp join)))
(define-structure simp-patterns (export make-pattern-simplifier)
(open scheme big-scheme defrecord)
(files (simp pattern)))
(define-structure remove-cells (export remove-cells-from-tree)
(open scheme big-scheme)
(begin
(define (remove-cells-from-tree . stuff)
(error "REMOVE-CELLS-FROM-TREE is undefined"))))
(define-structure flow-values (export flow-values)
(open scheme big-scheme)
(begin
(define (flow-values . stuff)
(error "FLOW-VALUES is undefined"))))
(define-structure comp-util utilities-interface
(open scheme big-scheme structure-refs expanding-vectors)
(for-syntax (open scheme big-scheme))
(access primitives features)
(define-structure expanding-vectors (export make-xvector
xvector-length
xvector-ref
xvector-set!
xvector-length
xvector->vector)
(open scheme define-record-types)
(files (util expand-vec)))
(define-interface transitive-interface
(export make-graph-from-predecessors
make-graph-from-successors
transitive-or! transitive-or-with-kill! transitive-or-with-pass!
transitive-and! transitive-and-with-kill! transitive-and-with-pass!))
(define-structure transitive transitive-interface
(open scheme big-scheme integer-sets defrecord)
(optimize auto-integrate)
(files (util transitive)))
(define-interface integer-set-interface
(export make-empty-integer-set
add-to-integer-set
integer-set-not
integer-set-ior
integer-set-and
integer-set-subtract
integer-set-equal?
map-over-integer-set))
(define-structure integer-sets integer-set-interface
(open scheme bitwise bigbit)
(optimize auto-integrate)
(files (util z-set)))
(define-structure strongly-connected (export strongly-connected-components)
(open scheme big-scheme defrecord)
(optimize auto-integrate)
(files (util strong)))
(define-structure dominators (export find-dominators!)
(open scheme big-scheme comp-util
define-record-types)
(optimize auto-integrate)
(files (util dominators)))
(define-structure ssa (export graph->ssa-graph! find-joins)
(open scheme big-scheme dominators
define-record-types)
(optimize auto-integrate)
(files (util ssa)))
(define-structure byte-vectors compiler-byte-vector-interface
(open scheme code-vectors bitwise signals)
(optimize auto-integrate)
(files (util byte-vector)))
(define-structure annotated-read annotated-read-interface
( open defrecord extended - ports primitives scheme assembler )
(open scheme big-scheme primitives fluids assembler)
(files (prescheme track-read)))
|
86e8de93ba6ed9bef2b4915c9041d68286938f6b8199b6d7027720ea912313a9 | janestreet/sexp_grammar | test_regression.ml | open! Core
open! Expect_test_helpers_base
module Traverse = Sexp_grammar.Fold_recursive (struct
type t = depth:int -> unit
type list_t = (depth:int -> unit) list
let atomic ~depth:_ = ()
let compound ts ~depth = List.iter ts ~f:(fun t -> t ~depth)
let any (_ : string) = atomic
let bool = atomic
let char = atomic
let integer = atomic
let float = atomic
let string = atomic
let option = Fn.id
let union = compound
let list = compound
let empty = []
let cons t ts = t :: ts
let many t = [ t ]
let record alist ~allow_extra_fields:_ =
List.concat_map alist ~f:(fun ((_ : string), (field, (_ : (string * Sexp.t) list))) ->
match (field : _ Sexp_grammar.Field.t) with
| Optional x -> x
| Required x -> x)
;;
let variant cases ~case_sensitivity:_ =
List.concat_map cases ~f:(fun ((_ : string), (case, (_ : (string * Sexp.t) list))) ->
Option.value case ~default:[])
|> compound
;;
let lazy_ lazy_t = force lazy_t
let tag t (_ : string) (_ : Sexp.t) = t
let of_lazy_recursive lazy_t ~depth = if depth > 0 then (force lazy_t) ~depth:(depth - 1)
end)
let test ?cr ?(depth = 1) (module M : Sexp_grammar_validation.With_grammar) =
require_does_not_raise ?cr [%here] (fun () ->
Traverse.of_typed_grammar_exn M.t_sexp_grammar ~depth)
;;
(* Grammar validation should not fail when a type variable appears inside
the body type expression of a recursion expression, e.g.,
... (Recursive (Tycon r ((Tyvar a))) ...) ... *)
let%expect_test "tyvar inside recursion body" =
test
(module struct
type 'a recursive = { self : 'a recursive } [@@deriving sexp_grammar]
type 'b recursive_with_reference =
{ this : 'b recursive_with_reference
; that : 'b recursive
}
[@@deriving sexp_grammar]
type t = int recursive_with_reference [@@deriving sexp_grammar]
end);
[%expect {| |}]
;;
(* Grammar validation should not fail when an earlier-defined type constructor
appears inside the body type expression of a recursion expression, e.g.,
... (Recursive (Tycon l ((Tycon t ()))) ...) ... *)
let%expect_test "tycon inside recursion body" =
test
(module struct
type 'a u = U of 'a u [@@deriving quickcheck, sexp, sexp_grammar]
type t = T of t u [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
(* This test shows a case where a type can refer to another type
of the same base name. *)
let%expect_test "tycon inside recursion body with same base name" =
test
(module struct
module T = struct
type 'a t = { this : 'a t } [@@deriving quickcheck, sexp, sexp_grammar]
end
type t = { that : t T.t } [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
(* This test shows a case where a recursive type can transitively depend on another type
of the same name where no explicit namespace qualification happens in the definition.
*)
let%expect_test "tycon inside recursion body with same explicitly qualified name" =
test
(module struct
module T = struct
type 'a t = { this : 'a t } [@@deriving quickcheck, sexp, sexp_grammar]
type 'a u = 'a t [@@deriving quickcheck, sexp, sexp_grammar]
end
open T
type t = { that : t u } [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
(* This test shows a case where a type can transitively depend on another type
which has the same name in (essentially) the same scope. *)
let%expect_test "tycon inside recursion body with same fully qualified name" =
test
(module struct
open struct
type 'a t = { this : 'a t } [@@deriving quickcheck, sexp, sexp_grammar]
type 'a u = 'a t [@@deriving quickcheck, sexp, sexp_grammar]
end
type t = { that : t u } [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
| null | https://raw.githubusercontent.com/janestreet/sexp_grammar/85812f8e8288836cd0c75597090495aad750c67c/test/test_regression.ml | ocaml | Grammar validation should not fail when a type variable appears inside
the body type expression of a recursion expression, e.g.,
... (Recursive (Tycon r ((Tyvar a))) ...) ...
Grammar validation should not fail when an earlier-defined type constructor
appears inside the body type expression of a recursion expression, e.g.,
... (Recursive (Tycon l ((Tycon t ()))) ...) ...
This test shows a case where a type can refer to another type
of the same base name.
This test shows a case where a recursive type can transitively depend on another type
of the same name where no explicit namespace qualification happens in the definition.
This test shows a case where a type can transitively depend on another type
which has the same name in (essentially) the same scope. | open! Core
open! Expect_test_helpers_base
module Traverse = Sexp_grammar.Fold_recursive (struct
type t = depth:int -> unit
type list_t = (depth:int -> unit) list
let atomic ~depth:_ = ()
let compound ts ~depth = List.iter ts ~f:(fun t -> t ~depth)
let any (_ : string) = atomic
let bool = atomic
let char = atomic
let integer = atomic
let float = atomic
let string = atomic
let option = Fn.id
let union = compound
let list = compound
let empty = []
let cons t ts = t :: ts
let many t = [ t ]
let record alist ~allow_extra_fields:_ =
List.concat_map alist ~f:(fun ((_ : string), (field, (_ : (string * Sexp.t) list))) ->
match (field : _ Sexp_grammar.Field.t) with
| Optional x -> x
| Required x -> x)
;;
let variant cases ~case_sensitivity:_ =
List.concat_map cases ~f:(fun ((_ : string), (case, (_ : (string * Sexp.t) list))) ->
Option.value case ~default:[])
|> compound
;;
let lazy_ lazy_t = force lazy_t
let tag t (_ : string) (_ : Sexp.t) = t
let of_lazy_recursive lazy_t ~depth = if depth > 0 then (force lazy_t) ~depth:(depth - 1)
end)
let test ?cr ?(depth = 1) (module M : Sexp_grammar_validation.With_grammar) =
require_does_not_raise ?cr [%here] (fun () ->
Traverse.of_typed_grammar_exn M.t_sexp_grammar ~depth)
;;
let%expect_test "tyvar inside recursion body" =
test
(module struct
type 'a recursive = { self : 'a recursive } [@@deriving sexp_grammar]
type 'b recursive_with_reference =
{ this : 'b recursive_with_reference
; that : 'b recursive
}
[@@deriving sexp_grammar]
type t = int recursive_with_reference [@@deriving sexp_grammar]
end);
[%expect {| |}]
;;
let%expect_test "tycon inside recursion body" =
test
(module struct
type 'a u = U of 'a u [@@deriving quickcheck, sexp, sexp_grammar]
type t = T of t u [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
let%expect_test "tycon inside recursion body with same base name" =
test
(module struct
module T = struct
type 'a t = { this : 'a t } [@@deriving quickcheck, sexp, sexp_grammar]
end
type t = { that : t T.t } [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
let%expect_test "tycon inside recursion body with same explicitly qualified name" =
test
(module struct
module T = struct
type 'a t = { this : 'a t } [@@deriving quickcheck, sexp, sexp_grammar]
type 'a u = 'a t [@@deriving quickcheck, sexp, sexp_grammar]
end
open T
type t = { that : t u } [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
let%expect_test "tycon inside recursion body with same fully qualified name" =
test
(module struct
open struct
type 'a t = { this : 'a t } [@@deriving quickcheck, sexp, sexp_grammar]
type 'a u = 'a t [@@deriving quickcheck, sexp, sexp_grammar]
end
type t = { that : t u } [@@deriving quickcheck, sexp, sexp_grammar]
end);
[%expect {| |}]
;;
|
5625cf51407729d81e60ac137131d3d51b6f4350754280dae50a4b1687ccac6b | schell/odin | Pane.hs | {-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TupleSections #
module Odin.Engine.New.UI.Pane
( pane
, PaneCfg (..)
) where
import Control.Monad (msum, guard)
import Control.Monad.Trans (lift)
import Data.Word (Word64)
import Gelatin.SDL2 hiding (rotate, scale)
import Reflex.SDL2 hiding (fan)
import Odin.Engine.New
import Odin.Engine.New.UI.Configs
import Odin.Engine.New.UI.Layer
import Odin.Engine.New.UI.Layout
import Odin.Engine.New.UI.Button
import Odin.Engine.New.UI.Painters (getBlankButtonPainter)
fint :: V2 Int -> V2 Float
fint = (fromIntegral <$>)
paneScrollbarColor :: V4 Float
paneScrollbarColor = V4 1 1 1 0.5
paneScrollbarExtent :: Float
paneScrollbarExtent = 16
paneVerticalScrollPic :: Float -> ColorPicture ()
paneVerticalScrollPic h = setGeometry $ fan $
mapVertices (, paneScrollbarColor) $ rectangle 0 (V2 paneScrollbarExtent h)
paneHorizontalScrollPic :: Float -> ColorPicture ()
paneHorizontalScrollPic w = setGeometry $ fan $
mapVertices (, paneScrollbarColor) $ rectangle 0 (V2 w 16)
-- | The minimum (but negative) offset the content should move in each dimension
-- of a window pane.
paneMaxContentOffset :: V2 Int -> V2 Int -> V2 Float
paneMaxContentOffset layerSize paneContentSize = V2 w h
where w = max 0 w0
h = max 0 h0
V2 w0 h0 = fint paneContentSize - fint layerSize
-- | The suggested size of the horizontal and vertical scroll bars.
paneScrollSize :: V2 Int -> V2 Int -> V2 Float
paneScrollSize layerSize paneContentSize = V2 clampw clamph
where clampw = max 0 w
clamph = max 0 h
V2 w h = pane * (min 1 <$> (pane / content))
pane = fromIntegral <$> layerSize
content = fromIntegral <$> paneContentSize
-- | The maximum distance the scrollbars should move in each dimension of a
-- window pane.
maxScrollBarPos :: V2 Int -> V2 Int -> V2 Float
maxScrollBarPos layerSize paneContentSize = fint layerSize - sbsize
where sbsize = paneScrollSize layerSize paneContentSize
-- | The suggested position of the horizontal and vertical scroll bars.
scrollBarPos :: V2 Int -> V2 Int -> V2 Int -> V2 Float
scrollBarPos layerSize paneContentSize paneContentOffset = maxpos * percnt
where maxpos = maxScrollBarPos layerSize paneContentSize
minoff = paneMaxContentOffset layerSize paneContentSize
offset = fint paneContentOffset
percnt = fnan <$> (offset / minoff)
fnan t = if isNaN t then 0 else t
mouseUnitsToContentOffset :: V2 Int -> V2 Int -> V2 Int -> V2 Int
mouseUnitsToContentOffset layerSize paneContentSize units =
floor <$> (maxoff * percnt)
where maxpos = maxScrollBarPos layerSize paneContentSize
maxoff = paneMaxContentOffset layerSize paneContentSize
percnt = fint units / maxpos
clampContentOffset :: V2 Int -> V2 Int -> V2 Int -> V2 Int
clampContentOffset layerSize paneContentSize (V2 x y) = newoffset
where V2 mxx mxy = floor <$> paneMaxContentOffset layerSize paneContentSize
newoffset = V2 (max 0 $ min mxx x) (max 0 $ min mxy y)
--------------------------------------------------------------------------------
data PaneState = PaneStatePassive
| PaneStateScrolling
| PaneStateScrolled
deriving (Show, Eq)
data PaneInternal = PaneInternal { piK :: Word64
, piWidgets :: [Widget]
, piContentOffset :: V2 Int
, piHorScroll :: Renderer2
, piVerScroll :: Renderer2
, piState :: PaneState
}
data PaneUpdate = PaneUpdateWidgets [Widget]
toWidgets
:: PaneInternal
-> [Widget]
toWidgets p = [Widget { widgetUid = piK p
, widgetTransform = []
, widgetBoundary = concatMap widgetBoundary (piWidgets p)
, widgetRenderer2 = mconcat $ map widgetRenderer2 $ piWidgets p
, widgetCursor = msum $ reverse $ map widgetCursor $ piWidgets p
}
]
pane
:: forall r t m a. OdinWidget r t m
=> Shape
-- ^ The initial shape of the layer's boundary.
-> V4 Float
-- ^ The initial background color.
-> V2 Float
-- ^ The initial content offset.
-> PaneCfg t
-- ^ Any event based updates.
-> DynamicWriterT t [Widget] m a
-- ^ The widgets to run within the pane.
-> m a
pane boundIni colorIni scrollIni paneCfg subwidgets = do
let layerCfg = def & setBoundaryEvent .~ (paneCfg ^. setBoundaryEvent)
dBound <- holdDyn boundIni $ paneCfg ^. setBoundaryEvent
dOffsetV2 <- holdDyn scrollIni $ paneCfg ^. setOffsetEvent
subwidgets layer
(a, dWidgetsAABB) <- layer boundIni colorIni layerCfg $ do
(a, dWidgets) <- captureW $
transformSubwidgets (pure . moveV2 <$> dOffsetV2) subwidgets
return (a, widgetsAABB <$> dWidgets)
btnPntr <- getBlankButtonPainter
-- vertical scroll bar
dVst <- transform [moveV2 2] (buttonWith btnPntr (V2 10 100) def)
dVIsScrolling <- (holdUniqDyn =<<) .
holdDyn False $ (== ButtonStateDown) <$> updated dVst
-- horizontal scroll bar
dHst <- transform [move 4 4] (buttonWith btnPntr (V2 100 10) def)
dHIsScrolling <- (holdUniqDyn =<<) .
holdDyn False $ (== ButtonStateDown) <$> updated dHst
putDebugLnE (updated dVIsScrolling) $ ("v:" ++) . show
putDebugLnE (updated dHIsScrolling) $ ("v:" ++) . show
putDebugLnE (updated dWidgetsAABB) $ ("aabb:" ++) . show
return a
| null | https://raw.githubusercontent.com/schell/odin/97ae1610a7abd19aa150bc7dfc132082d88ca9ea/odin-engine/src/Odin/Engine/New/UI/Pane.hs | haskell | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE ScopedTypeVariables #
| The minimum (but negative) offset the content should move in each dimension
of a window pane.
| The suggested size of the horizontal and vertical scroll bars.
| The maximum distance the scrollbars should move in each dimension of a
window pane.
| The suggested position of the horizontal and vertical scroll bars.
------------------------------------------------------------------------------
^ The initial shape of the layer's boundary.
^ The initial background color.
^ The initial content offset.
^ Any event based updates.
^ The widgets to run within the pane.
vertical scroll bar
horizontal scroll bar | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE TupleSections #
module Odin.Engine.New.UI.Pane
( pane
, PaneCfg (..)
) where
import Control.Monad (msum, guard)
import Control.Monad.Trans (lift)
import Data.Word (Word64)
import Gelatin.SDL2 hiding (rotate, scale)
import Reflex.SDL2 hiding (fan)
import Odin.Engine.New
import Odin.Engine.New.UI.Configs
import Odin.Engine.New.UI.Layer
import Odin.Engine.New.UI.Layout
import Odin.Engine.New.UI.Button
import Odin.Engine.New.UI.Painters (getBlankButtonPainter)
fint :: V2 Int -> V2 Float
fint = (fromIntegral <$>)
paneScrollbarColor :: V4 Float
paneScrollbarColor = V4 1 1 1 0.5
paneScrollbarExtent :: Float
paneScrollbarExtent = 16
paneVerticalScrollPic :: Float -> ColorPicture ()
paneVerticalScrollPic h = setGeometry $ fan $
mapVertices (, paneScrollbarColor) $ rectangle 0 (V2 paneScrollbarExtent h)
paneHorizontalScrollPic :: Float -> ColorPicture ()
paneHorizontalScrollPic w = setGeometry $ fan $
mapVertices (, paneScrollbarColor) $ rectangle 0 (V2 w 16)
paneMaxContentOffset :: V2 Int -> V2 Int -> V2 Float
paneMaxContentOffset layerSize paneContentSize = V2 w h
where w = max 0 w0
h = max 0 h0
V2 w0 h0 = fint paneContentSize - fint layerSize
paneScrollSize :: V2 Int -> V2 Int -> V2 Float
paneScrollSize layerSize paneContentSize = V2 clampw clamph
where clampw = max 0 w
clamph = max 0 h
V2 w h = pane * (min 1 <$> (pane / content))
pane = fromIntegral <$> layerSize
content = fromIntegral <$> paneContentSize
maxScrollBarPos :: V2 Int -> V2 Int -> V2 Float
maxScrollBarPos layerSize paneContentSize = fint layerSize - sbsize
where sbsize = paneScrollSize layerSize paneContentSize
scrollBarPos :: V2 Int -> V2 Int -> V2 Int -> V2 Float
scrollBarPos layerSize paneContentSize paneContentOffset = maxpos * percnt
where maxpos = maxScrollBarPos layerSize paneContentSize
minoff = paneMaxContentOffset layerSize paneContentSize
offset = fint paneContentOffset
percnt = fnan <$> (offset / minoff)
fnan t = if isNaN t then 0 else t
mouseUnitsToContentOffset :: V2 Int -> V2 Int -> V2 Int -> V2 Int
mouseUnitsToContentOffset layerSize paneContentSize units =
floor <$> (maxoff * percnt)
where maxpos = maxScrollBarPos layerSize paneContentSize
maxoff = paneMaxContentOffset layerSize paneContentSize
percnt = fint units / maxpos
clampContentOffset :: V2 Int -> V2 Int -> V2 Int -> V2 Int
clampContentOffset layerSize paneContentSize (V2 x y) = newoffset
where V2 mxx mxy = floor <$> paneMaxContentOffset layerSize paneContentSize
newoffset = V2 (max 0 $ min mxx x) (max 0 $ min mxy y)
data PaneState = PaneStatePassive
| PaneStateScrolling
| PaneStateScrolled
deriving (Show, Eq)
data PaneInternal = PaneInternal { piK :: Word64
, piWidgets :: [Widget]
, piContentOffset :: V2 Int
, piHorScroll :: Renderer2
, piVerScroll :: Renderer2
, piState :: PaneState
}
data PaneUpdate = PaneUpdateWidgets [Widget]
toWidgets
:: PaneInternal
-> [Widget]
toWidgets p = [Widget { widgetUid = piK p
, widgetTransform = []
, widgetBoundary = concatMap widgetBoundary (piWidgets p)
, widgetRenderer2 = mconcat $ map widgetRenderer2 $ piWidgets p
, widgetCursor = msum $ reverse $ map widgetCursor $ piWidgets p
}
]
pane
:: forall r t m a. OdinWidget r t m
=> Shape
-> V4 Float
-> V2 Float
-> PaneCfg t
-> DynamicWriterT t [Widget] m a
-> m a
pane boundIni colorIni scrollIni paneCfg subwidgets = do
let layerCfg = def & setBoundaryEvent .~ (paneCfg ^. setBoundaryEvent)
dBound <- holdDyn boundIni $ paneCfg ^. setBoundaryEvent
dOffsetV2 <- holdDyn scrollIni $ paneCfg ^. setOffsetEvent
subwidgets layer
(a, dWidgetsAABB) <- layer boundIni colorIni layerCfg $ do
(a, dWidgets) <- captureW $
transformSubwidgets (pure . moveV2 <$> dOffsetV2) subwidgets
return (a, widgetsAABB <$> dWidgets)
btnPntr <- getBlankButtonPainter
dVst <- transform [moveV2 2] (buttonWith btnPntr (V2 10 100) def)
dVIsScrolling <- (holdUniqDyn =<<) .
holdDyn False $ (== ButtonStateDown) <$> updated dVst
dHst <- transform [move 4 4] (buttonWith btnPntr (V2 100 10) def)
dHIsScrolling <- (holdUniqDyn =<<) .
holdDyn False $ (== ButtonStateDown) <$> updated dHst
putDebugLnE (updated dVIsScrolling) $ ("v:" ++) . show
putDebugLnE (updated dHIsScrolling) $ ("v:" ++) . show
putDebugLnE (updated dWidgetsAABB) $ ("aabb:" ++) . show
return a
|
08e64c95706d6cc89af8e8f8a0e9b23e8113af5403d624c0be976f176c564e70 | softwarelanguageslab/maf | R5RS_WeiChenRompf2019_fermat-3.scm | ; Changes:
* removed : 1
* added : 0
* swaps : 0
; * negated predicates: 0
* swapped branches : 2
* calls to i d fun : 3
(letrec ((square (lambda (x)
(* x x)))
(modulo-power (lambda (base exp n)
(if (= exp 0)
1
(if (odd? exp)
(modulo (* base (modulo-power base (- exp 1) n)) n)
(modulo (square (modulo-power base (/ exp 2) n)) n)))))
(is-trivial-composite? (lambda (n)
(let ((__or_res (= (modulo n 2) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 3) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 5) 0)))
(<change>
(if __or_res
__or_res
(let ((__or_res (= (modulo n 7) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 11) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 13) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 17) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 19) 0)))
(if __or_res __or_res (= (modulo n 23) 0))))))))))))
((lambda (x) x)
(if __or_res
__or_res
(let ((__or_res (= (modulo n 7) 0)))
(<change>
(if __or_res
__or_res
(let ((__or_res (= (modulo n 11) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 13) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 17) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 19) 0)))
(if __or_res __or_res (= (modulo n 23) 0))))))))))
((lambda (x) x)
(if __or_res
__or_res
(let ((__or_res (= (modulo n 11) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 13) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 17) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 19) 0)))
(if __or_res
(<change>
__or_res
(= (modulo n 23) 0))
(<change>
(= (modulo n 23) 0)
__or_res)))))))))))))))))))))))
(is-fermat-prime? (lambda (n iterations)
(let ((__or_res (<= iterations 0)))
(if __or_res
__or_res
(let* ((byte-size (ceiling (/ (log n) (log 2))))
(a (random byte-size)))
(if (= (modulo-power a (- n 1) n) 1)
(<change>
(is-fermat-prime? n (- iterations 1))
#f)
(<change>
#f
(is-fermat-prime? n (- iterations 1)))))))))
(generate-fermat-prime (lambda (byte-size iterations)
(<change>
(let ((n (random byte-size)))
(if (if (not (is-trivial-composite? n)) (is-fermat-prime? n iterations) #f)
n
(generate-fermat-prime byte-size iterations)))
((lambda (x) x)
(let ((n (random byte-size)))
(if (if (not (is-trivial-composite? n)) (is-fermat-prime? n iterations) #f)
n
(generate-fermat-prime byte-size iterations)))))))
(iterations 10)
(byte-size 15))
(display "Generating prime...")
(newline)
(<change>
(display (generate-fermat-prime byte-size iterations))
())
(display " is prime with at least probability 1 - 1/2^")
(display iterations)
(newline)
(display " if it is not a Carmichael number.")
(newline)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_WeiChenRompf2019_fermat-3.scm | scheme | Changes:
* negated predicates: 0 | * removed : 1
* added : 0
* swaps : 0
* swapped branches : 2
* calls to i d fun : 3
(letrec ((square (lambda (x)
(* x x)))
(modulo-power (lambda (base exp n)
(if (= exp 0)
1
(if (odd? exp)
(modulo (* base (modulo-power base (- exp 1) n)) n)
(modulo (square (modulo-power base (/ exp 2) n)) n)))))
(is-trivial-composite? (lambda (n)
(let ((__or_res (= (modulo n 2) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 3) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 5) 0)))
(<change>
(if __or_res
__or_res
(let ((__or_res (= (modulo n 7) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 11) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 13) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 17) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 19) 0)))
(if __or_res __or_res (= (modulo n 23) 0))))))))))))
((lambda (x) x)
(if __or_res
__or_res
(let ((__or_res (= (modulo n 7) 0)))
(<change>
(if __or_res
__or_res
(let ((__or_res (= (modulo n 11) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 13) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 17) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 19) 0)))
(if __or_res __or_res (= (modulo n 23) 0))))))))))
((lambda (x) x)
(if __or_res
__or_res
(let ((__or_res (= (modulo n 11) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 13) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 17) 0)))
(if __or_res
__or_res
(let ((__or_res (= (modulo n 19) 0)))
(if __or_res
(<change>
__or_res
(= (modulo n 23) 0))
(<change>
(= (modulo n 23) 0)
__or_res)))))))))))))))))))))))
(is-fermat-prime? (lambda (n iterations)
(let ((__or_res (<= iterations 0)))
(if __or_res
__or_res
(let* ((byte-size (ceiling (/ (log n) (log 2))))
(a (random byte-size)))
(if (= (modulo-power a (- n 1) n) 1)
(<change>
(is-fermat-prime? n (- iterations 1))
#f)
(<change>
#f
(is-fermat-prime? n (- iterations 1)))))))))
(generate-fermat-prime (lambda (byte-size iterations)
(<change>
(let ((n (random byte-size)))
(if (if (not (is-trivial-composite? n)) (is-fermat-prime? n iterations) #f)
n
(generate-fermat-prime byte-size iterations)))
((lambda (x) x)
(let ((n (random byte-size)))
(if (if (not (is-trivial-composite? n)) (is-fermat-prime? n iterations) #f)
n
(generate-fermat-prime byte-size iterations)))))))
(iterations 10)
(byte-size 15))
(display "Generating prime...")
(newline)
(<change>
(display (generate-fermat-prime byte-size iterations))
())
(display " is prime with at least probability 1 - 1/2^")
(display iterations)
(newline)
(display " if it is not a Carmichael number.")
(newline)) |
114f7f5176a10e8468a25a37fdbbb006eb3c5e2f26d7fb5bd90568caae4484f9 | swannodette/mies-om | core.cljs | (ns {{name}}.core
(:require [om.core :as om]
[om.dom :as dom]))
(enable-console-print!)
(def app-state (atom {:text "Hello world!"}))
(om/root
(fn [app owner]
(reify om/IRender
(render [_]
(dom/h1 nil (:text app)))))
app-state
{:target (. js/document (getElementById "app"))})
| null | https://raw.githubusercontent.com/swannodette/mies-om/4a9fde582b083f59dcb59761391a8a66713a8380/src/leiningen/new/mies_om/core.cljs | clojure | (ns {{name}}.core
(:require [om.core :as om]
[om.dom :as dom]))
(enable-console-print!)
(def app-state (atom {:text "Hello world!"}))
(om/root
(fn [app owner]
(reify om/IRender
(render [_]
(dom/h1 nil (:text app)))))
app-state
{:target (. js/document (getElementById "app"))})
|
|
c59697f5731613a086ec19a2f5bc55cae37233207b67f0f59f8f1bb219d95202 | CryptoKami/cryptokami-core | Event.hs | # LANGUAGE TypeFamilies #
module Test.Pos.Block.Logic.Event
(
-- * Running events and scenarios
runBlockEvent
, runBlockScenario
, BlockScenarioResult(..)
-- * Exceptions
, SnapshotMissingEx(..)
, DbNotEquivalentToSnapshot(..)
) where
import Universum
import Control.Exception.Safe (fromException)
import qualified Data.Map as Map
import qualified Data.Text as T
import Pos.Block.Logic.VAR (BlockLrcMode, rollbackBlocks, verifyAndApplyBlocks)
import Pos.Block.Types (Blund)
import Pos.Core (HasConfiguration, HeaderHash)
import Pos.DB.Pure (DBPureDiff, MonadPureDB, dbPureDiff, dbPureDump, dbPureReset)
import Pos.Exception (CryptokamiFatalError (..))
import Pos.Generator.BlockEvent (BlockApplyResult (..), BlockEvent, BlockEvent' (..),
BlockRollbackFailure (..), BlockRollbackResult (..),
BlockScenario, BlockScenario' (..), SnapshotId,
SnapshotOperation (..), beaInput, beaOutValid, berInput,
berOutValid)
import Pos.Ssc.Configuration (HasSscConfiguration)
import Pos.Txp (MonadTxpLocal)
import Pos.Util.Chrono (NE, OldestFirst)
import Pos.Util.Util (eitherToThrow, lensOf)
import Test.Pos.Block.Logic.Mode (BlockTestContext, PureDBSnapshotsVar (..))
import Test.Pos.Block.Logic.Util (satisfySlotCheck)
data SnapshotMissingEx = SnapshotMissingEx SnapshotId
deriving (Show)
instance Exception SnapshotMissingEx
data DbNotEquivalentToSnapshot = DbNotEquivalentToSnapshot SnapshotId DBPureDiff
deriving (Show)
instance Exception DbNotEquivalentToSnapshot
newtype IsExpected = IsExpected Bool
data BlockEventResult
= BlockEventSuccess IsExpected
| BlockEventFailure IsExpected SomeException
| BlockEventDbChanged DbNotEquivalentToSnapshot
verifyAndApplyBlocks' ::
( HasSscConfiguration
, HasConfiguration
, BlockLrcMode BlockTestContext m
, MonadTxpLocal m
)
=> OldestFirst NE Blund
-> m ()
verifyAndApplyBlocks' blunds = do
satisfySlotCheck blocks $ do
(_ :: HeaderHash) <- eitherToThrow =<<
verifyAndApplyBlocks True blocks
return ()
where
blocks = fst <$> blunds
-- | Execute a single block event.
runBlockEvent ::
( HasSscConfiguration
, HasConfiguration
, BlockLrcMode BlockTestContext m
, MonadTxpLocal m
)
=> BlockEvent
-> m BlockEventResult
runBlockEvent (BlkEvApply ev) =
(onSuccess <$ verifyAndApplyBlocks' (ev ^. beaInput))
`catch` (return . onFailure)
where
onSuccess = case ev ^. beaOutValid of
BlockApplySuccess -> BlockEventSuccess (IsExpected True)
BlockApplyFailure -> BlockEventSuccess (IsExpected False)
onFailure (e :: SomeException) = case ev ^. beaOutValid of
BlockApplySuccess -> BlockEventFailure (IsExpected False) e
BlockApplyFailure -> BlockEventFailure (IsExpected True) e
runBlockEvent (BlkEvRollback ev) =
(onSuccess <$ rollbackBlocks (ev ^. berInput))
`catch` (return . onFailure)
where
onSuccess = case ev ^. berOutValid of
BlockRollbackSuccess -> BlockEventSuccess (IsExpected True)
BlockRollbackFailure _ -> BlockEventSuccess (IsExpected False)
onFailure (e :: SomeException) = case ev ^. berOutValid of
BlockRollbackSuccess -> BlockEventFailure (IsExpected False) e
BlockRollbackFailure brf ->
let
isExpected = case brf of
BlkRbSecurityLimitExceeded
| Just cfe <- fromException e
, CryptokamiFatalError msg <- cfe
, "security risk" `T.isInfixOf` msg ->
True
| otherwise ->
False
in
BlockEventFailure (IsExpected isExpected) e
runBlockEvent (BlkEvSnap ev) =
(onSuccess <$ runSnapshotOperation ev)
`catch` (return . onFailure)
where
onSuccess = BlockEventSuccess (IsExpected True)
onFailure = BlockEventDbChanged
-- | Execute a snapshot operation.
runSnapshotOperation ::
MonadPureDB BlockTestContext m
=> SnapshotOperation
-> m ()
runSnapshotOperation snapOp = do
PureDBSnapshotsVar snapsRef <- view (lensOf @PureDBSnapshotsVar)
case snapOp of
SnapshotSave snapId -> do
currentDbState <- dbPureDump
modifyIORef snapsRef $ Map.insert snapId currentDbState
SnapshotLoad snapId -> do
snap <- getSnap snapsRef snapId
dbPureReset snap
SnapshotEq snapId -> do
currentDbState <- dbPureDump
snap <- getSnap snapsRef snapId
whenJust (dbPureDiff snap currentDbState) $ \dbDiff ->
throwM $ DbNotEquivalentToSnapshot snapId dbDiff
where
getSnap snapsRef snapId = do
mSnap <- Map.lookup snapId <$> readIORef snapsRef
maybe (throwM $ SnapshotMissingEx snapId) return mSnap
data BlockScenarioResult
= BlockScenarioFinishedOk
| BlockScenarioUnexpectedSuccess
| BlockScenarioUnexpectedFailure SomeException
| BlockScenarioDbChanged DbNotEquivalentToSnapshot
-- | Execute a block scenario: a sequence of block events that either ends with
-- an expected failure or with a rollback to the initial state.
runBlockScenario ::
( MonadPureDB ctx m
, ctx ~ BlockTestContext
, HasSscConfiguration
, HasConfiguration
, BlockLrcMode BlockTestContext m
, MonadTxpLocal m
)
=> BlockScenario
-> m BlockScenarioResult
runBlockScenario (BlockScenario []) =
return BlockScenarioFinishedOk
runBlockScenario (BlockScenario (ev:evs)) = do
runBlockEvent ev >>= \case
BlockEventSuccess (IsExpected isExp) ->
if isExp
then runBlockScenario (BlockScenario evs)
else return BlockScenarioUnexpectedSuccess
BlockEventFailure (IsExpected isExp) e ->
return $ if isExp
then BlockScenarioFinishedOk
else BlockScenarioUnexpectedFailure e
BlockEventDbChanged d ->
return $ BlockScenarioDbChanged d
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/generator/src/Test/Pos/Block/Logic/Event.hs | haskell | * Running events and scenarios
* Exceptions
| Execute a single block event.
| Execute a snapshot operation.
| Execute a block scenario: a sequence of block events that either ends with
an expected failure or with a rollback to the initial state. | # LANGUAGE TypeFamilies #
module Test.Pos.Block.Logic.Event
(
runBlockEvent
, runBlockScenario
, BlockScenarioResult(..)
, SnapshotMissingEx(..)
, DbNotEquivalentToSnapshot(..)
) where
import Universum
import Control.Exception.Safe (fromException)
import qualified Data.Map as Map
import qualified Data.Text as T
import Pos.Block.Logic.VAR (BlockLrcMode, rollbackBlocks, verifyAndApplyBlocks)
import Pos.Block.Types (Blund)
import Pos.Core (HasConfiguration, HeaderHash)
import Pos.DB.Pure (DBPureDiff, MonadPureDB, dbPureDiff, dbPureDump, dbPureReset)
import Pos.Exception (CryptokamiFatalError (..))
import Pos.Generator.BlockEvent (BlockApplyResult (..), BlockEvent, BlockEvent' (..),
BlockRollbackFailure (..), BlockRollbackResult (..),
BlockScenario, BlockScenario' (..), SnapshotId,
SnapshotOperation (..), beaInput, beaOutValid, berInput,
berOutValid)
import Pos.Ssc.Configuration (HasSscConfiguration)
import Pos.Txp (MonadTxpLocal)
import Pos.Util.Chrono (NE, OldestFirst)
import Pos.Util.Util (eitherToThrow, lensOf)
import Test.Pos.Block.Logic.Mode (BlockTestContext, PureDBSnapshotsVar (..))
import Test.Pos.Block.Logic.Util (satisfySlotCheck)
data SnapshotMissingEx = SnapshotMissingEx SnapshotId
deriving (Show)
instance Exception SnapshotMissingEx
data DbNotEquivalentToSnapshot = DbNotEquivalentToSnapshot SnapshotId DBPureDiff
deriving (Show)
instance Exception DbNotEquivalentToSnapshot
newtype IsExpected = IsExpected Bool
data BlockEventResult
= BlockEventSuccess IsExpected
| BlockEventFailure IsExpected SomeException
| BlockEventDbChanged DbNotEquivalentToSnapshot
verifyAndApplyBlocks' ::
( HasSscConfiguration
, HasConfiguration
, BlockLrcMode BlockTestContext m
, MonadTxpLocal m
)
=> OldestFirst NE Blund
-> m ()
verifyAndApplyBlocks' blunds = do
satisfySlotCheck blocks $ do
(_ :: HeaderHash) <- eitherToThrow =<<
verifyAndApplyBlocks True blocks
return ()
where
blocks = fst <$> blunds
runBlockEvent ::
( HasSscConfiguration
, HasConfiguration
, BlockLrcMode BlockTestContext m
, MonadTxpLocal m
)
=> BlockEvent
-> m BlockEventResult
runBlockEvent (BlkEvApply ev) =
(onSuccess <$ verifyAndApplyBlocks' (ev ^. beaInput))
`catch` (return . onFailure)
where
onSuccess = case ev ^. beaOutValid of
BlockApplySuccess -> BlockEventSuccess (IsExpected True)
BlockApplyFailure -> BlockEventSuccess (IsExpected False)
onFailure (e :: SomeException) = case ev ^. beaOutValid of
BlockApplySuccess -> BlockEventFailure (IsExpected False) e
BlockApplyFailure -> BlockEventFailure (IsExpected True) e
runBlockEvent (BlkEvRollback ev) =
(onSuccess <$ rollbackBlocks (ev ^. berInput))
`catch` (return . onFailure)
where
onSuccess = case ev ^. berOutValid of
BlockRollbackSuccess -> BlockEventSuccess (IsExpected True)
BlockRollbackFailure _ -> BlockEventSuccess (IsExpected False)
onFailure (e :: SomeException) = case ev ^. berOutValid of
BlockRollbackSuccess -> BlockEventFailure (IsExpected False) e
BlockRollbackFailure brf ->
let
isExpected = case brf of
BlkRbSecurityLimitExceeded
| Just cfe <- fromException e
, CryptokamiFatalError msg <- cfe
, "security risk" `T.isInfixOf` msg ->
True
| otherwise ->
False
in
BlockEventFailure (IsExpected isExpected) e
runBlockEvent (BlkEvSnap ev) =
(onSuccess <$ runSnapshotOperation ev)
`catch` (return . onFailure)
where
onSuccess = BlockEventSuccess (IsExpected True)
onFailure = BlockEventDbChanged
runSnapshotOperation ::
MonadPureDB BlockTestContext m
=> SnapshotOperation
-> m ()
runSnapshotOperation snapOp = do
PureDBSnapshotsVar snapsRef <- view (lensOf @PureDBSnapshotsVar)
case snapOp of
SnapshotSave snapId -> do
currentDbState <- dbPureDump
modifyIORef snapsRef $ Map.insert snapId currentDbState
SnapshotLoad snapId -> do
snap <- getSnap snapsRef snapId
dbPureReset snap
SnapshotEq snapId -> do
currentDbState <- dbPureDump
snap <- getSnap snapsRef snapId
whenJust (dbPureDiff snap currentDbState) $ \dbDiff ->
throwM $ DbNotEquivalentToSnapshot snapId dbDiff
where
getSnap snapsRef snapId = do
mSnap <- Map.lookup snapId <$> readIORef snapsRef
maybe (throwM $ SnapshotMissingEx snapId) return mSnap
data BlockScenarioResult
= BlockScenarioFinishedOk
| BlockScenarioUnexpectedSuccess
| BlockScenarioUnexpectedFailure SomeException
| BlockScenarioDbChanged DbNotEquivalentToSnapshot
runBlockScenario ::
( MonadPureDB ctx m
, ctx ~ BlockTestContext
, HasSscConfiguration
, HasConfiguration
, BlockLrcMode BlockTestContext m
, MonadTxpLocal m
)
=> BlockScenario
-> m BlockScenarioResult
runBlockScenario (BlockScenario []) =
return BlockScenarioFinishedOk
runBlockScenario (BlockScenario (ev:evs)) = do
runBlockEvent ev >>= \case
BlockEventSuccess (IsExpected isExp) ->
if isExp
then runBlockScenario (BlockScenario evs)
else return BlockScenarioUnexpectedSuccess
BlockEventFailure (IsExpected isExp) e ->
return $ if isExp
then BlockScenarioFinishedOk
else BlockScenarioUnexpectedFailure e
BlockEventDbChanged d ->
return $ BlockScenarioDbChanged d
|
cdf68b3f13ee7d8496b36a532d572b61441cea05ceeee9d5be641c69ab74ae4d | bos/stanford-cs240h | St.hs | import Control.Monad.ST
import Data.STRef
whee :: Int -> ST s Int
whee z = do
r <- newSTRef z
modifySTRef r (+1)
readSTRef r
| null | https://raw.githubusercontent.com/bos/stanford-cs240h/ef304e15ae74bb13bdcbb432b18519b9b24a1a14/notes/l7/St.hs | haskell | import Control.Monad.ST
import Data.STRef
whee :: Int -> ST s Int
whee z = do
r <- newSTRef z
modifySTRef r (+1)
readSTRef r
|
|
a94284f0c442701727a3a7a502a87e1967306135781f833b82a16e075a51e055 | johnlawrenceaspden/hobby-code | deterministicrandom.clj | There should be 64 bits in a random long . Each random number in 0 .. 7 is three bits
So we can get 21 random numbers in 0 .. 7 out of a random long , with a bit left
(def doom (java.util.Random. 0)) ;-> #'user/doom
(unpack (repeat 21 8) (.nextLong doom)) ;-> (0 7 4 0 5 2 6 6 4 2 7 7 5 0 5 5 0 4 4 5 3)
;; Attempts to get more are somewhat unsuccessful
- > ( 6 7 2 0 7 2 6 1 3 3 6 4 7 6 2 6 3 2 6 6 3 0 )
- > ( 2 7 0 0 7 3 7 0 7 1 1 3 7 3 2 6 5 5 4 1 2 7 7 )
- > ( 3 0 4 2 2 0 5 7 5 3 4 3 3 1 4 3 1 5 3 6 0 7 7 )
- > ( 5 5 5 3 2 5 4 0 0 1 7 6 3 4 6 5 0 7 3 4 1 7 7 )
- > ( 3 1 1 0 7 3 5 5 0 0 5 5 2 2 3 6 5 1 5 2 5 0 0 )
- > ( 5 1 5 4 6 7 3 7 4 3 2 1 2 4 0 6 3 3 2 1 6 0 0 )
(unpack (repeat 23 8) (.nextLong doom)) ;-> (0 3 5 7 0 4 6 4 3 7 3 7 2 6 4 4 6 3 0 6 7 7 7)
- > ( 7 0 4 0 3 1 2 7 5 0 0 4 0 6 6 0 2 2 4 0 6 7 7 )
(unpack (repeat 23 8) (.nextLong doom)) ;-> (1 6 3 0 5 1 3 0 5 5 0 1 7 1 5 5 5 6 3 0 7 7 7)
Linear Congruential Random Number Generators
(defn iterator [a b]
(fn[x] (mod (+ (* a x) b) (bit-shift-left 1 31))))
(def bsd (drop 1 (iterate (iterator 1103515245 12345) 0)))
(def ms (drop 1 (for [x (iterate (iterator 214013 2531011) 0)] (bit-shift-right x 16))))
- > ( 12345 1406932606 654583775 1449466924 229283573 1109335178 1051550459 1293799192 794471793 551188310 )
- > ( 38 7719 21238 2437 8855 11797 8365 32285 10450 30612 )
(time (nth (drop 1 (iterate (iterator 1103515245 12345) 0)) 1000000))
"Elapsed time: 2587.596789 msecs"
1905486841
| null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/deterministicrandom.clj | clojure | -> #'user/doom
-> (0 7 4 0 5 2 6 6 4 2 7 7 5 0 5 5 0 4 4 5 3)
Attempts to get more are somewhat unsuccessful
-> (0 3 5 7 0 4 6 4 3 7 3 7 2 6 4 4 6 3 0 6 7 7 7)
-> (1 6 3 0 5 1 3 0 5 5 0 1 7 1 5 5 5 6 3 0 7 7 7) | There should be 64 bits in a random long . Each random number in 0 .. 7 is three bits
So we can get 21 random numbers in 0 .. 7 out of a random long , with a bit left
- > ( 6 7 2 0 7 2 6 1 3 3 6 4 7 6 2 6 3 2 6 6 3 0 )
- > ( 2 7 0 0 7 3 7 0 7 1 1 3 7 3 2 6 5 5 4 1 2 7 7 )
- > ( 3 0 4 2 2 0 5 7 5 3 4 3 3 1 4 3 1 5 3 6 0 7 7 )
- > ( 5 5 5 3 2 5 4 0 0 1 7 6 3 4 6 5 0 7 3 4 1 7 7 )
- > ( 3 1 1 0 7 3 5 5 0 0 5 5 2 2 3 6 5 1 5 2 5 0 0 )
- > ( 5 1 5 4 6 7 3 7 4 3 2 1 2 4 0 6 3 3 2 1 6 0 0 )
- > ( 7 0 4 0 3 1 2 7 5 0 0 4 0 6 6 0 2 2 4 0 6 7 7 )
Linear Congruential Random Number Generators
(defn iterator [a b]
(fn[x] (mod (+ (* a x) b) (bit-shift-left 1 31))))
(def bsd (drop 1 (iterate (iterator 1103515245 12345) 0)))
(def ms (drop 1 (for [x (iterate (iterator 214013 2531011) 0)] (bit-shift-right x 16))))
- > ( 12345 1406932606 654583775 1449466924 229283573 1109335178 1051550459 1293799192 794471793 551188310 )
- > ( 38 7719 21238 2437 8855 11797 8365 32285 10450 30612 )
(time (nth (drop 1 (iterate (iterator 1103515245 12345) 0)) 1000000))
"Elapsed time: 2587.596789 msecs"
1905486841
|
5c6f7d4467ffc7697feeb94fc96e8e2ed849c9a24ff34a0f138be98d6ef56515 | thephoeron/quipper-language | Circuit.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
{-# LANGUAGE BangPatterns #-}
# LANGUAGE ExistentialQuantification #
{-# LANGUAGE DeriveDataTypeable #-}
-- | Low-level quantum circuit implementation. This is our backend
-- implementation of quantum circuits. Note: there is no run-time
-- error checking at the moment.
--
-- At its heart, a circuit is a list of gates. All well-definedness
-- checking (e.g. input arity, output arity, and checking that the
-- intermediate gates are connected to legitimate wires) is done
-- dynamically, at circuit generation time, and is not stored within
-- the circuit itself. This allows circuits to be produced and
-- consumed lazily.
--
-- Implementation note: this file is in the intermediate stage of a
-- code refactoring, and should be considered \"under renovation\".
module Quipper.Circuit where
import other Quipper stuff
import Libraries.Auxiliary
-- import other stuff
import Data.List
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Typeable
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
-- ----------------------------------------------------------------------
* Quantum circuit data type
-- | Wire identifier. Wires are currently identified by an integer,
-- but the users of this interface should be oblivious to this.
type Wire = Int
-- | Wire type. A wire is either quantum or classical.
^ Quantum wire .
| Cbit -- ^ Classical wire.
deriving (Show, Eq, Typeable)
-- | An arity, also known as a typing context, is a map from a finite
-- set of wires to wire types.
type Arity = IntMap Wiretype
| A signed item of type /a/. ' Signed ' /x/ ' True ' represents a
-- positive item, and 'Signed' /x/ 'False' represents a negative item.
--
-- When used with wires in a circuit, a positive sign is used to
-- represent a positive control, i.e., a filled dot, and a negative
-- sign is used to represent a negative control, i.e., an empty dot.
data Signed a = Signed a Bool
deriving (Show, Typeable)
-- | Extract the underlying item of a signed item.
from_signed :: Signed a -> a
from_signed (Signed a b) = a
-- | Extract the sign of a signed item: 'True' is positive, and
-- 'False' is negative.
get_sign :: Signed a -> Bool
get_sign (Signed a b) = b
-- | A list of controlled wires, possibly empty.
type Controls = [Signed Wire]
-- | A time step is a small floating point number used as a
-- parameter to certain gates, such as rotation gates or the
-- [exp −/iZt/] gate.
type Timestep = Double
-- | A flag that, if 'True', indicates that the gate is inverted.
type InverseFlag = Bool
-- | A flag that, if 'True', indicates that the gate is controllable,
-- but any further controls on the gate should be ignored. This is
-- used, e.g., for circuits consisting of a basis change, some
-- operation, and the inverse basis change. When controlling such a
-- circuit, it is sufficient to control the middle operation, so the
-- gates belonging to the basis change and its inverse will have the
NoControlFlag set .
type NoControlFlag = Bool
-- | A flag, to specify if the corresponding subroutine can be controlled.
-- Either no control allowed, or all controls, or only classical.
data ControllableFlag = NoCtl | AllCtl | OnlyClassicalCtl
deriving (Eq, Ord, Show)
-- | An identifier for a subroutine. A boxed subroutine is currently
-- identified by a pair of: the user-defined name of the subroutine;
-- and a value uniquely identifying the type and shape of the argument.
--
-- For now, we represent the shape as a string, because this gives an
easy total ' ' instance , needed for " Data . Map " . However , in
-- principle, one could also use a pair of a type representation and a
-- shape term. The implementation of this may change later.
data BoxId = BoxId String String
deriving (Eq, Ord, Show)
-- | A flag that indicates how many times a particular subroutine
should be repeated . If non - zero , it implies some constraints on
-- the type of the subroutine.
data RepeatFlag = RepeatFlag Integer
deriving (Eq,Ord)
instance Show RepeatFlag where
show (RepeatFlag n) = show n
-- When changing the 'Gate' datatype, also remember to update
' gate_arity ' , ' gate_controls ' , and ' ' below .
-- | The low-level representation of gates.
data Gate =
-- Named reversible quantum gates.
QGate String InverseFlag [Wire] [Wire] Controls NoControlFlag
-- ^ A named reversible quantum gate: @'Qbit'^(m+n) ->
' Qbit'^(m+n)@. The second @['Wire']@ argument should be
-- \"generalized controls\", i.e. wires not modified by the
-- gate. The gate type is uniquely determined by: the name, the
-- number of inputs, and the number of generalized controls. Gates
-- that differ in one of these respects should be regarded as
-- different gates.
| QRot String InverseFlag Timestep [Wire] [Wire] Controls NoControlFlag
-- ^ A named reversible quantum gate that also depends on a real
-- parameter. This is typically used for phase and rotation
-- gates. The gate name can contain \'%\' as a place holder for
-- the parameter, e.g., @\"exp(-i%Z)\"@. The remaining arguments
are as for ' QGate ' .
-- A nullary quantum gate.
| GPhase Timestep [Wire] Controls NoControlFlag
^ Global phase gate : @'1 ' - > ' 1'@. The list of wires is just a hint for graphical rendering .
-- Some classical gates.
| CNot Wire Controls NoControlFlag
-- ^ Classical not: @'Cbit' -> 'Cbit'@.
| CGate String Wire [Wire] NoControlFlag
-- ^ Generic classical gate @1 -> 'Cbit'@.
| CGateInv String Wire [Wire] NoControlFlag
-- ^ Uncompute classical gate @'Cbit' -> 1@, asserting that the
-- classical bit is in the state specified by the corresponding
-- 'CGate'.
| CSwap Wire Wire Controls NoControlFlag
-- ^ Classical swap gate: @'Cbit' * 'Cbit' -> 'Cbit' * 'Cbit'@.
-- Initialization and assertive termination.
| QPrep Wire NoControlFlag
-- ^ Initialization: @'Cbit' -> 'Qbit'@.
| QUnprep Wire NoControlFlag
-- ^ Measurement @'Qbit' -> 'Cbit'@ with an assertion that the
-- qubit is already in a computational basis state. This kind of
-- measurement loses no information, and is formally the inverse
of ' QPrep ' .
| QInit Bool Wire NoControlFlag
-- ^ Initialization: @'Bool' -> 'Qbit'@.
| CInit Bool Wire NoControlFlag
-- ^ Initialization: @'Bool' -> 'Cbit'@.
| QTerm Bool Wire NoControlFlag
-- ^ Termination of a 'Qbit' wire with assertion
-- that the qubit is in the specified state:
-- @'Qbit' * 'Bool' -> 1@.
| CTerm Bool Wire NoControlFlag
-- ^ Termination of a 'Cbit' wire with assertion
-- that the bit is in the specified state:
-- @'Cbit' * 'Bool' -> 1@.
-- Measurement.
| QMeas Wire
-- ^ Measurement: @'Qbit' -> 'Cbit'@.
| QDiscard Wire
-- ^ Termination of a 'Qbit' wire without
assertion : ' - > 1@
| CDiscard Wire
-- ^ Termination of a 'Cbit' wire without
-- assertion: @'Cbit' -> 1@
-- Dynamic termination.
| DTerm Bool Wire
-- ^ Termination of a 'Cbit' wire, with a comment indicating what
-- the observed state of that wire was. This is typically inserted
-- in a circuit after a dynamic lifting is performed. Unlike
-- 'CTerm', this is in no way an assertion, but simply a record of
-- observed behavior during a particular run of the algorithm.
-- Subroutines.
| Subroutine BoxId InverseFlag [Wire] Arity [Wire] Arity Controls NoControlFlag ControllableFlag RepeatFlag
-- ^ Reference to a subroutine, assumed to be bound to another
-- circuit. Arbitrary input and output arities. The domain of /a1/
-- must include the range of /ws1/, and similarly for /a2/ and /ws2/.
-- Comments.
| Comment String InverseFlag [(Wire,String)]
-- ^ A comment. Does nothing, but can be useful for marking a
-- location or some wires in a circuit.
deriving Show
-- ----------------------------------------------------------------------
-- * Basic information about gates
-- The following functions must be updated each time the 'Gate' data
-- type is changed.
-- | Compute the incoming and outgoing wires of a given gate
-- (excluding controls, comments, and anchors). This essentially
-- encodes the type information of the basic gates. If a wire is used
-- multiple times as an input or output, then 'gate_arity' also
-- returns it multiple times; this enables run-time type checking.
--
-- Note that 'gate_arity' returns the /logical/ wires, and therefore
-- excludes things like labels, comments, and graphical anchors. This
-- is in contrast to 'wires_of_gate', which returns the /syntactic/
-- set of wires used by the gate.
gate_arity :: Gate -> ([(Wire, Wiretype)], [(Wire, Wiretype)])
gate_arity (QGate n inv ws1 ws2 c ncf) = (map (\w -> (w,Qbit)) (ws1 ++ ws2) ,map (\w -> (w,Qbit)) (ws1 ++ ws2))
gate_arity (QRot n inv t ws1 ws2 c ncf) = (map (\w -> (w,Qbit)) (ws1 ++ ws2) ,map (\w -> (w,Qbit)) (ws1 ++ ws2))
gate_arity (GPhase t w c ncf) = ([], [])
gate_arity (CNot w c ncf) = ([(w, Cbit)], [(w, Cbit)])
gate_arity (CGate n w ws ncf) = (cs, (w, Cbit) : cs)
where cs = map (\x -> (x, Cbit)) ws
gate_arity (CGateInv n w ws ncf) = ((w, Cbit) : cs, cs)
where cs = map (\x -> (x, Cbit)) ws
gate_arity (CSwap w1 w2 c ncf) = ([(w1, Cbit), (w2, Cbit)], [(w1, Cbit), (w2, Cbit)])
gate_arity (QPrep w ncf) = ([(w, Cbit)], [(w, Qbit)])
gate_arity (QUnprep w ncf) = ([(w, Qbit)], [(w, Cbit)])
gate_arity (QInit b w ncf) = ([], [(w, Qbit)])
gate_arity (CInit b w ncf) = ([], [(w, Cbit)])
gate_arity (QTerm b w ncf) = ([(w, Qbit)], [])
gate_arity (CTerm b w ncf) = ([(w, Cbit)], [])
gate_arity (QMeas w) = ([(w, Qbit)], [(w, Cbit)])
gate_arity (QDiscard w) = ([(w, Qbit)], [])
gate_arity (CDiscard w) = ([(w, Cbit)], [])
gate_arity (DTerm b w) = ([(w, Cbit)], [])
gate_arity (Subroutine n inv ws1 a1 ws2 a2 c ncf ctrble _) = (getTypes ws1 a1, getTypes ws2 a2)
where getTypes ws a = map (\n -> (n, fromJust (IntMap.lookup n a))) ws
gate_arity (Comment s inv ws) = ([], [])
-- | Return the controls of a gate (or an empty list if the gate has
-- no controls).
gate_controls :: Gate -> Controls
gate_controls (QGate n inv ws1 ws2 c ncf) = c
gate_controls (QRot n inv t ws1 ws2 c ncf) = c
gate_controls (GPhase t w c ncf) = c
gate_controls (CNot w c ncf) = c
gate_controls (CGate n w ws ncf) = []
gate_controls (CGateInv n w ws ncf) = []
gate_controls (CSwap w1 w2 c ncf) = c
gate_controls (QPrep w ncf) = []
gate_controls (QUnprep w ncf) = []
gate_controls (QInit b w ncf) = []
gate_controls (CInit b w ncf) = []
gate_controls (QTerm b w ncf) = []
gate_controls (CTerm b w ncf) = []
gate_controls (QMeas w) = []
gate_controls (QDiscard w) = []
gate_controls (CDiscard w) = []
gate_controls (DTerm b w) = []
gate_controls (Subroutine n inv ws1 a1 ws2 a2 c ncf ctrble _) = c
gate_controls (Comment s inv ws) = []
-- | Return the 'NoControlFlag' of a gate, or 'False' if it doesn't have one.
gate_ncflag :: Gate -> NoControlFlag
gate_ncflag (QGate n inv ws1 ws2 c ncf) = ncf
gate_ncflag (QRot n inv t ws1 ws2 c ncf) = ncf
gate_ncflag (GPhase t w c ncf) = ncf
gate_ncflag (CNot w c ncf) = ncf
gate_ncflag (CGate n w ws ncf) = ncf
gate_ncflag (CGateInv n w ws ncf) = ncf
gate_ncflag (CSwap w1 w2 c ncf) = ncf
gate_ncflag (QPrep w ncf) = ncf
gate_ncflag (QUnprep w ncf) = ncf
gate_ncflag (QInit b w ncf) = ncf
gate_ncflag (CInit b w ncf) = ncf
gate_ncflag (QTerm b w ncf) = ncf
gate_ncflag (CTerm b w ncf) = ncf
gate_ncflag (Subroutine n inv ws1 a1 ws2 a2 c ncf ctrble _) = ncf
-- The remaining gates don't have a 'NoControlFlag'. We list them
-- explicitly, so that the typechecker can warn us about new gates
-- that must be added here.
gate_ncflag (QMeas _) = False
gate_ncflag (QDiscard _) = False
gate_ncflag (CDiscard _) = False
gate_ncflag (DTerm _ _) = False
gate_ncflag (Comment _ _ _) = False
-- | Apply the given 'NoControlFlag' to the given 'Gate'. This means,
if the first parameter is ' True ' , set the gate 's ' NoControlFlag ' ,
-- otherwise do nothing. Throw an error if attempting to set the
-- 'NoControlFlag' on a gate that can't support this flag.
gate_with_ncflag :: NoControlFlag -> Gate -> Gate
gate_with_ncflag False gate = gate
gate_with_ncflag True (QGate n inv ws1 ws2 c _) = (QGate n inv ws1 ws2 c True)
gate_with_ncflag True (QRot n inv t ws1 ws2 c _) = (QRot n inv t ws1 ws2 c True)
gate_with_ncflag True (GPhase t w c _) = (GPhase t w c True)
gate_with_ncflag True (CNot w c _) = (CNot w c True)
gate_with_ncflag True (CGate n w ws _) = (CGate n w ws True)
gate_with_ncflag True (CGateInv n w ws _) = (CGateInv n w ws True)
gate_with_ncflag True (CSwap w1 w2 c _) = (CSwap w1 w2 c True)
gate_with_ncflag True (QPrep w _) = (QPrep w True)
gate_with_ncflag True (QUnprep w _) = (QUnprep w True)
gate_with_ncflag True (QInit b w _) = (QInit b w True)
gate_with_ncflag True (CInit b w _) = (CInit b w True)
gate_with_ncflag True (QTerm b w _) = (QTerm b w True)
gate_with_ncflag True (CTerm b w _) = (CTerm b w True)
gate_with_ncflag True (Subroutine n inv ws1 a1 ws2 a2 c _ ctrble repeat) = (Subroutine n inv ws1 a1 ws2 a2 c True ctrble repeat)
gate_with_ncflag True (Comment s inv ws) = (Comment s inv ws)
-- The remaining gates can't have their 'NoControlFlag' set. We list
-- them explicitly, so that the typechecker can warn us about new
-- gates that must be added here.
gate_with_ncflag True g@(QMeas _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
gate_with_ncflag True g@(QDiscard _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
gate_with_ncflag True g@(CDiscard _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
gate_with_ncflag True g@(DTerm _ _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
-- | Reverse a gate. Throw an error if the gate is not reversible.
gate_reverse :: Gate -> Gate
gate_reverse (QGate n inv ws1 ws2 c ncf) = QGate n (not inv) ws1 ws2 c ncf
gate_reverse (QRot n inv t ws1 ws2 c ncf) = QRot n (not inv) t ws1 ws2 c ncf
gate_reverse (GPhase t w c ncf) = GPhase (-t) w c ncf
gate_reverse (CNot w c ncf) = CNot w c ncf
gate_reverse (CGate n w ws ncf) = CGateInv n w ws ncf
gate_reverse (CGateInv n w ws ncf) = CGate n w ws ncf
gate_reverse (CSwap w1 w2 c ncf) = CSwap w1 w2 c ncf
gate_reverse (QPrep w ncf) = QUnprep w ncf
gate_reverse (QUnprep w ncf) = QPrep w ncf
gate_reverse (QInit b w ncf) = QTerm b w ncf
gate_reverse (CInit b w ncf) = CTerm b w ncf
gate_reverse (QTerm b w ncf) = QInit b w ncf
gate_reverse (CTerm b w ncf) = CInit b w ncf
gate_reverse (Subroutine name inv ws1 a1 ws2 a2 c ncf ctrble repeat) = Subroutine name (not inv) ws2 a2 ws1 a1 c ncf ctrble repeat
gate_reverse (Comment s inv ws) = Comment s (not inv) ws
-- The remaining gates are not reversible. We list them explicitly, so
-- that the typechecker can warn us about new gates that must be added
-- here.
gate_reverse g@(QMeas _) = error ("gate_reverse: gate not reversible: " ++ show g)
gate_reverse g@(QDiscard _) = error ("gate_reverse: gate not reversible: " ++ show g)
gate_reverse g@(CDiscard _) = error ("gate_reverse: gate not reversible: " ++ show g)
gate_reverse g@(DTerm _ _) = error ("gate_reverse: gate not reversible: " ++ show g)
-- ----------------------------------------------------------------------
-- * Auxiliary functions on gates and wires
-- | Return the set of wires used by a list of controls.
wires_of_controls :: Controls -> IntSet
wires_of_controls c = IntSet.fromList (map from_signed c)
-- | Return the set of wires used by a gate (including controls,
-- labels, and anchors).
--
-- Unlike 'gate_arity', the function 'wires_of_gate' is used for
-- printing, and therefore returns all wires that are syntactically
-- used by the gate, irrespective of whether they have a logical
-- meaning.
wires_of_gate :: Gate -> IntSet
wires_of_gate (Comment s inv ws) =
intset_inserts (map fst ws) (IntSet.empty)
wires_of_gate (GPhase t w c ncf) =
intset_inserts w (wires_of_controls c)
wires_of_gate g = intset_inserts w1 (intset_inserts w2 (wires_of_controls c))
where
(a1, a2) = gate_arity g
c = gate_controls g
w1 = map fst a1
w2 = map fst a2
-- | Like 'wires_of_gate', except return a list of wires.
wirelist_of_gate :: Gate -> [Wire]
wirelist_of_gate g = IntSet.toList (wires_of_gate g)
-- ----------------------------------------------------------------------
-- * Dynamic arities
-- | Recall that an 'Arity' is a set of typed wires, and it determines
-- the external interfaces at which circuits and gates can be
connected . The type ' ExtArity ' stores the same information as the
-- type 'Arity', but in a format that is more optimized for efficient
-- updating. Additionally, it also stores the set of wires ever used.
type ExtArity = XIntMap Wiretype
-- | Check whether the given gate is well-formed and can be legally
-- applied in the context of the given arity. If successful, return
-- the updated arity resulting from the gate application. If
-- unsuccessful, raise an error. Properties checked are:
--
-- * that each gate has non-overlapping inputs, including controls;
--
-- * that each gate has non-overlapping outputs, including controls;
--
-- * that the inputs of the gate (including controls) are actually
-- present in the current arity;
--
-- * that the types of the inputs (excluding controls) match those of
-- the current arity;
--
-- * that the outputs of the gate (excluding controls) don't conflict
-- with any wires already existing in the current arity.
arity_append_safe :: Gate -> ExtArity -> ExtArity
arity_append_safe gate a0 =
case (err0, err1, err2, err3, err4) of
(True, _, _, _, _) ->
error $ "Gate error: duplicate inputs in " ++ show gate
(_, True, _, _, _) ->
error $ "Gate error: duplicate outputs in " ++ show gate
(_, _, Just w, _, _) ->
error $ "Gate application error: no such wire " ++ show w ++ ": " ++ show gate
(_, _, _, Just (w,t), _) ->
error $ "Gate application error: wire " ++ show w ++ ":" ++ show t ++ " has wrong type " ++ show t' ++ ": " ++ show gate
where
Just t' = xintmap_lookup w a0
(_, _, _, _, Just w) ->
error $ "Gate application error: wire " ++ show w ++ " already exists: " ++ show gate
_ -> a2
where
(win, wout) = gate_arity gate
c_ids = map from_signed (gate_controls gate)
win_ids = map fst win
wout_ids = map fst wout
err0 = has_duplicates (win_ids ++ c_ids)
err1 = has_duplicates (wout_ids ++ c_ids)
err2 = find (\w -> not $ xintmap_member w a0) (win_ids ++ c_ids)
err3 = find (\(w,t) -> not $ xintmap_lookup w a0 == Just t) win
err4 = find (\w -> xintmap_member w a1) wout_ids
a1 = xintmap_deletes win_ids a0
a2 = xintmap_inserts wout a1
-- | Like 'arity_append', but without type checking. This is
-- potentially faster, but should only used in applications that have
-- already been thoroughly tested or type-checked.
arity_append_unsafe :: Gate -> ExtArity -> ExtArity
arity_append_unsafe gate a0 = a2
where
(win, wout) = gate_arity gate
a1 = xintmap_deletes (map fst win) a0
a2 = xintmap_inserts wout a1
-- | For now, we disable run-time type checking, because we have not
-- yet implemented run-time types properly. Therefore, we define
-- 'arity_append' to be a synonym for 'arity_append_unsafe'.
arity_append :: Gate -> ExtArity -> ExtArity
arity_append = arity_append_unsafe
-- | Return an empty arity.
arity_empty :: ExtArity
arity_empty = xintmap_empty
-- | Return a wire unused in the current arity.
arity_unused_wire :: ExtArity -> Wire
arity_unused_wire = xintmap_freshkey
-- | Return the next /k/ wires unused in the current arity.
arity_unused_wires :: Int -> ExtArity -> [Wire]
arity_unused_wires = xintmap_freshkeys
-- | Add a new typed wire to the current arity. This returns a new
-- wire and the updated arity.
arity_alloc :: Wiretype -> ExtArity -> (Wire, ExtArity)
arity_alloc t arity = (w, arity') where
w = xintmap_freshkey arity
arity' = xintmap_insert w t arity
-- | Convert an extended arity to an ordinary arity.
arity_of_extarity :: ExtArity -> Arity
arity_of_extarity = xintmap_to_intmap
-- | Return the smallest wire id nowhere used in the circuit.
n_of_extarity :: ExtArity -> Int
n_of_extarity = xintmap_size
-- ----------------------------------------------------------------------
-- * Circuit abstraction
-- | A completed circuit /(a1,gs,a2,n)/ has an input arity /a1/, a
-- list of gates /gs/, and an output arity /a2/. We also record /n/,
-- the total number of wires used by the circuit. Because wires are
-- allocated consecutively, this means that the wire id's used are
-- [0../n/-1].
type Circuit = (Arity, [Gate], Arity, Int)
-- | Return the set of all the wires in a circuit.
wirelist_of_circuit :: Circuit -> [Wire]
wirelist_of_circuit (_, _, _, n) = [0..n-1]
-- ----------------------------------------------------------------------
-- ** Reversing low-level circuits
-- | Reverse a gate list.
reverse_gatelist :: [Gate] -> [Gate]
reverse_gatelist gates = reverse (map gate_reverse gates)
-- | Reverse a circuit. Throw an error if the circuit is not reversible.
reverse_circuit :: Circuit -> Circuit
reverse_circuit (a1, gates, a2, n) = (a2, reverse_gatelist gates, a1, n)
-- ----------------------------------------------------------------------
-- ** NoControlFlag on low-level circuits
-- | Set the 'NoControlFlag' on all gates of a circuit.
circuit_to_nocontrol :: Circuit -> Circuit
circuit_to_nocontrol (a1, gates, a2, n) = (a1, gates', a2, n) where
gates' = map (gate_with_ncflag True) gates
-- ----------------------------------------------------------------------
-- ** Ordered circuits
-- | An ordered circuit is a 'Circuit' together with an ordering on
-- (usually all, but potentially a subset of) the input and output
-- endpoints.
--
-- This extra information is required when a circuit is used within a
-- larger circuit (e.g. via a 'Subroutine' gate), to identify which wires
-- of the sub-circuit should be bound to which wires of the surrounding
-- circuit.
newtype OCircuit = OCircuit ([Wire], Circuit, [Wire])
| Reverse an ' OCircuit ' . Throw an error if the circuit is not reversible .
reverse_ocircuit :: OCircuit -> OCircuit
reverse_ocircuit (OCircuit (ws_in, circ, ws_out)) = OCircuit (ws_out, reverse_circuit circ, ws_out)
-- ----------------------------------------------------------------------
-- ** Annotated circuits
-- | One often wants to consider the inputs and outputs of a circuit as
-- more structured/typed than just lists of bits/qubits; for instance,
a list of six qubits could be structured as a pair of triples , or a
triple of pairs , or a six - bit ' QDInt ' .
--
-- While for the most part this typing information is not included in
-- low-level circuits, we need to consider it in hierarchical circuits,
-- so that the information stored in a subroutine is sufficient to call
-- the subroutine in a typed context.
--
-- Specifically, the extra information needed consists of functions to
-- destructure the input/output data as a list of typed wires, and
-- restructure such a list of wires into a piece of data of the appropriate
-- type.
data CircuitTypeStructure a = CircuitTypeStructure (a -> ([Wire],Arity)) (([Wire],Arity) -> a)
deriving (Typeable)
-- | The trivial 'CircuitTypeStructure' on @(['Wire'],'Arity')@.
id_CircuitTypeStructure :: CircuitTypeStructure ([Wire],Arity)
id_CircuitTypeStructure = CircuitTypeStructure id id
-- | Use a 'CircuitTypeStructure' to destructure a piece of (suitably
-- typed) data into a list of typed wires.
destructure_with :: CircuitTypeStructure a -> a -> ([Wire],Arity)
destructure_with (CircuitTypeStructure f _) = f
-- | Use a 'CircuitTypeStructure' to structure a list of typed wires
-- (of the appropriate length/arity) into a piece of structured data.
structure_with :: CircuitTypeStructure a -> ([Wire],Arity) -> a
structure_with (CircuitTypeStructure _ g) = g
-- ======================================================================
-- * Boxed circuits
-- | A typed subroutine consists of:
--
-- * a low-level circuit, ordered to allow binding of incoming and outgoing wires;
--
-- * functions for structuring/destructuring the inputs and outputs to and
-- from lists of wires (these functions being dynamically typed, since the
-- input/output type may vary between subroutines);
--
* a ' ControllableFlag ' , recording whether the circuit is controllable .
data TypedSubroutine = forall a b. (Typeable a, Typeable b) =>
TypedSubroutine OCircuit (CircuitTypeStructure a) (CircuitTypeStructure b) ControllableFlag
-- | Extract just the 'Circuit' from a 'TypedSubroutine'.
circuit_of_typedsubroutine :: TypedSubroutine -> Circuit
circuit_of_typedsubroutine (TypedSubroutine (OCircuit (_,circ,_)) _ _ _) = circ
-- | A name space is a map from names to subroutine bindings. These
-- subroutines can reference each other; it is the programmer’s
-- responsibility to ensure there is no circular dependency, and no
-- clash of names.
type Namespace = Map BoxId TypedSubroutine
-- | The empty namespace.
namespace_empty :: Namespace
namespace_empty = Map.empty
| A function to display the names of all the subroutines in a ' ' .
showNames :: Namespace -> String
showNames ns = show (map (\(n,_) -> n) (Map.toList ns))
-- | A boxed circuit is a distinguished simple circuit (analogous to a “main” function) together with a namespace.
type BCircuit = (Circuit,Namespace)
-- ----------------------------------------------------------------------
-- ** Ordered circuits
-- | An ordered boxed circuit is a 'BCircuit' together with an
-- ordering on the input and output endpoints, or equivalently, an
' OCircuit ' together with a namespace .
type OBCircuit = (OCircuit,Namespace)
| Construct an ' OBCircuit ' from a ' BCircuit ' and an ordering on the
-- input and output endpoints.
ob_circuit :: [Wire] -> BCircuit -> [Wire] -> OBCircuit
ob_circuit w_in (circ, ns) w_out = (OCircuit (w_in, circ, w_out), ns)
-- ======================================================================
-- ** Basic functions lifted to boxed circuits
-- All the basic functions defined on simple circuits now lift
-- trivially to boxed circuits:
-- | Reverse a simple boxed circuit, or throw an error if not reversible.
reverse_bcircuit :: BCircuit -> BCircuit
reverse_bcircuit (c,s) = (reverse_circuit c,s)
-- ----------------------------------------------------------------------
* The ReadWrite monad
-- $ The 'ReadWrite' monad encapsulates the interaction with a (real
-- or simulated) low-level quantum device.
-- | The 'ReadWrite' monad describes a standard read-write computation,
-- here specialized to the case where writes are 'Gate's, prompts are
' Bit 's , and reads are ' 's . Thus , a read - write computation can
do three things :
--
-- * terminate with a result. This is the case 'RW_Return'.
--
-- * write a single 'Gate' and continue. This is the case 'RW_Write'.
--
-- * issue a prompt, which is a 'Wire', then read a 'Bool', then
continue . This is the case ' RW_Read ' .
data ReadWrite a = RW_Return a
| RW_Write !Gate (ReadWrite a)
| RW_Read !Wire (Bool -> ReadWrite a)
| RW_Subroutine BoxId TypedSubroutine (ReadWrite a)
instance Monad ReadWrite where
return a = RW_Return a
f >>= g = case f of
RW_Return a -> g a
RW_Write gate f' -> RW_Write gate (f' >>= g)
RW_Read bit cont -> RW_Read bit (\bool -> cont bool >>= g)
RW_Subroutine name subroutine f' -> RW_Subroutine name subroutine (f' >>= g)
instance Applicative ReadWrite where
pure = return
(<*>) = ap
instance Functor ReadWrite where
fmap = liftM
-- | Transforms a read-write computation into one that behaves identically,
-- but also returns the list of gates generated.
--
-- This is used as a building block, for example to allow a read-write
-- computation to be run in a simulator while simultaneously using a
-- static backend to print the list of generated gates.
readwrite_wrap :: ReadWrite a -> ReadWrite ([Gate], a)
readwrite_wrap (RW_Return a) = do
RW_Return ([], a)
readwrite_wrap (RW_Write gate comp) = do
~(gates, a) <- readwrite_wrap comp
RW_Write gate (return (gate:gates, a))
readwrite_wrap (RW_Read bit cont) = do
RW_Read bit (\bool -> readwrite_wrap (cont bool))
readwrite_wrap (RW_Subroutine name subroutine comp) =
RW_Subroutine name subroutine (readwrite_wrap comp)
| Extract the contents of a static ' ReadWrite ' computation . A
' ReadWrite ' computation is said to be static if it contains no
' RW_Read ' instructions , or in other words , no dynamic lifting . If
an ' RW_Read ' instruction is encountered , issue an error message
-- using the given stub.
readwrite_unwind_static :: ErrMsg -> ReadWrite a -> a
readwrite_unwind_static e (RW_Return a) = a
readwrite_unwind_static e (RW_Write gate comp) = readwrite_unwind_static e comp
readwrite_unwind_static e (RW_Read bit cont) = error $ e "dynamic lifting"
readwrite_unwind_static e (RW_Subroutine name subroutine comp) = readwrite_unwind_static e comp
-- | Turn a static read-write computation into a list of gates, while
-- also updating a namespace. \"Static\" means that the computation
may not contain any ' RW_Read ' operations . If it does , the message
-- \"dynamic lifting\" is passed to the given error handler.
--
-- Important usage note: This function returns a triple (/gates/,
-- /ns/, /x/). The list of gates is generated lazily, and can be
consumed one gate at a time . However , the values /ns/ and /x/ are
-- only computed at the end of the computation. Any function using
-- them should not apply a strict pattern match to /ns/ or /x/, or
-- else the whole list of gates will be generated in memory. For
-- example, the following will blow up the memory:
--
> ( gates , ns , ( a , n , x ) ) = gatelist_of_readwrite errmsg comp
--
-- whereas the following will work as intended:
--
> ( gates , ns , ~(a , n , x ) ) = gatelist_of_readwrite errmsg comp
gatelist_of_readwrite :: ErrMsg -> ReadWrite a -> Namespace -> ([Gate], Namespace, a)
gatelist_of_readwrite e (RW_Return a) ns = ([], ns, a)
gatelist_of_readwrite e (RW_Write gate comp) ns = (gate : gates, ns', a) where
(gates, ns', a) = gatelist_of_readwrite e comp ns
gatelist_of_readwrite e (RW_Read bit cont) ns = error (e "dynamic lifting")
gatelist_of_readwrite e (RW_Subroutine name subroutine comp) ns =
let ns' = map_provide name subroutine ns in
gatelist_of_readwrite e comp ns'
-- This version is inefficient . Why ?
gatelist_of_readwrite_xxx : : ErrMsg - > ReadWrite a - > ( [ Gate ] , a )
gatelist_of_readwrite_xxx e comp =
readwrite_unwind_static e ( readwrite_wrap comp )
-- This version is inefficient. Why?
gatelist_of_readwrite_xxx :: ErrMsg -> ReadWrite a -> ([Gate], a)
gatelist_of_readwrite_xxx e comp =
readwrite_unwind_static e (readwrite_wrap comp)
-}
-- ----------------------------------------------------------------------
-- * Dynamic boxed circuits
| The type of dynamic boxed circuits . The type ' DBCircuit ' /a/ is
-- the appropriate generalization of ('BCircuit', /a/), in a setting
-- that is dynamic rather than static (i.e., with dynamic lifting or
\"interactive measurement\ " ) .
type DBCircuit a = (Arity, ReadWrite (Arity, Int, a))
-- | Convert a dynamic boxed circuit to a static boxed circuit. The
-- dynamic boxed circuit may not contain any dynamic liftings, since
-- these cannot be performed in a static setting. In case any output
-- liftings are encountered, try to issue a meaningful error via the
-- given stub error message.
bcircuit_of_static_dbcircuit :: ErrMsg -> DBCircuit a -> (BCircuit, a)
bcircuit_of_static_dbcircuit e dbcirc = (bcirc, x) where
(a0, comp) = dbcirc
bcirc = (circ, ns)
circ = (a0, gates, a1, n)
(gates, ns, ~(a1, n, x)) = gatelist_of_readwrite e comp namespace_empty
-- | Convert a boxed circuit to a dynamic boxed circuit. The latter,
of course , contains no ' RW_Read ' instructions .
dbcircuit_of_bcircuit :: BCircuit -> a -> DBCircuit a
dbcircuit_of_bcircuit bcircuit x = (a0, comp (Map.toList ns) gates) where
(circuit, ns) = bcircuit
(a0, gates, a1, n) = circuit
comp ((boxid,subroutine):ns) gs = RW_Subroutine boxid subroutine (comp ns gs)
comp [] [] = RW_Return (a1, n, x)
comp [] (g:gs) = RW_Write g (comp [] gs)
| null | https://raw.githubusercontent.com/thephoeron/quipper-language/15e555343a15c07b9aa97aced1ada22414f04af6/quipper/Quipper/Circuit.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
# LANGUAGE BangPatterns #
# LANGUAGE DeriveDataTypeable #
| Low-level quantum circuit implementation. This is our backend
implementation of quantum circuits. Note: there is no run-time
error checking at the moment.
At its heart, a circuit is a list of gates. All well-definedness
checking (e.g. input arity, output arity, and checking that the
intermediate gates are connected to legitimate wires) is done
dynamically, at circuit generation time, and is not stored within
the circuit itself. This allows circuits to be produced and
consumed lazily.
Implementation note: this file is in the intermediate stage of a
code refactoring, and should be considered \"under renovation\".
import other stuff
----------------------------------------------------------------------
| Wire identifier. Wires are currently identified by an integer,
but the users of this interface should be oblivious to this.
| Wire type. A wire is either quantum or classical.
^ Classical wire.
| An arity, also known as a typing context, is a map from a finite
set of wires to wire types.
positive item, and 'Signed' /x/ 'False' represents a negative item.
When used with wires in a circuit, a positive sign is used to
represent a positive control, i.e., a filled dot, and a negative
sign is used to represent a negative control, i.e., an empty dot.
| Extract the underlying item of a signed item.
| Extract the sign of a signed item: 'True' is positive, and
'False' is negative.
| A list of controlled wires, possibly empty.
| A time step is a small floating point number used as a
parameter to certain gates, such as rotation gates or the
[exp −/iZt/] gate.
| A flag that, if 'True', indicates that the gate is inverted.
| A flag that, if 'True', indicates that the gate is controllable,
but any further controls on the gate should be ignored. This is
used, e.g., for circuits consisting of a basis change, some
operation, and the inverse basis change. When controlling such a
circuit, it is sufficient to control the middle operation, so the
gates belonging to the basis change and its inverse will have the
| A flag, to specify if the corresponding subroutine can be controlled.
Either no control allowed, or all controls, or only classical.
| An identifier for a subroutine. A boxed subroutine is currently
identified by a pair of: the user-defined name of the subroutine;
and a value uniquely identifying the type and shape of the argument.
For now, we represent the shape as a string, because this gives an
principle, one could also use a pair of a type representation and a
shape term. The implementation of this may change later.
| A flag that indicates how many times a particular subroutine
the type of the subroutine.
When changing the 'Gate' datatype, also remember to update
| The low-level representation of gates.
Named reversible quantum gates.
^ A named reversible quantum gate: @'Qbit'^(m+n) ->
\"generalized controls\", i.e. wires not modified by the
gate. The gate type is uniquely determined by: the name, the
number of inputs, and the number of generalized controls. Gates
that differ in one of these respects should be regarded as
different gates.
^ A named reversible quantum gate that also depends on a real
parameter. This is typically used for phase and rotation
gates. The gate name can contain \'%\' as a place holder for
the parameter, e.g., @\"exp(-i%Z)\"@. The remaining arguments
A nullary quantum gate.
Some classical gates.
^ Classical not: @'Cbit' -> 'Cbit'@.
^ Generic classical gate @1 -> 'Cbit'@.
^ Uncompute classical gate @'Cbit' -> 1@, asserting that the
classical bit is in the state specified by the corresponding
'CGate'.
^ Classical swap gate: @'Cbit' * 'Cbit' -> 'Cbit' * 'Cbit'@.
Initialization and assertive termination.
^ Initialization: @'Cbit' -> 'Qbit'@.
^ Measurement @'Qbit' -> 'Cbit'@ with an assertion that the
qubit is already in a computational basis state. This kind of
measurement loses no information, and is formally the inverse
^ Initialization: @'Bool' -> 'Qbit'@.
^ Initialization: @'Bool' -> 'Cbit'@.
^ Termination of a 'Qbit' wire with assertion
that the qubit is in the specified state:
@'Qbit' * 'Bool' -> 1@.
^ Termination of a 'Cbit' wire with assertion
that the bit is in the specified state:
@'Cbit' * 'Bool' -> 1@.
Measurement.
^ Measurement: @'Qbit' -> 'Cbit'@.
^ Termination of a 'Qbit' wire without
^ Termination of a 'Cbit' wire without
assertion: @'Cbit' -> 1@
Dynamic termination.
^ Termination of a 'Cbit' wire, with a comment indicating what
the observed state of that wire was. This is typically inserted
in a circuit after a dynamic lifting is performed. Unlike
'CTerm', this is in no way an assertion, but simply a record of
observed behavior during a particular run of the algorithm.
Subroutines.
^ Reference to a subroutine, assumed to be bound to another
circuit. Arbitrary input and output arities. The domain of /a1/
must include the range of /ws1/, and similarly for /a2/ and /ws2/.
Comments.
^ A comment. Does nothing, but can be useful for marking a
location or some wires in a circuit.
----------------------------------------------------------------------
* Basic information about gates
The following functions must be updated each time the 'Gate' data
type is changed.
| Compute the incoming and outgoing wires of a given gate
(excluding controls, comments, and anchors). This essentially
encodes the type information of the basic gates. If a wire is used
multiple times as an input or output, then 'gate_arity' also
returns it multiple times; this enables run-time type checking.
Note that 'gate_arity' returns the /logical/ wires, and therefore
excludes things like labels, comments, and graphical anchors. This
is in contrast to 'wires_of_gate', which returns the /syntactic/
set of wires used by the gate.
| Return the controls of a gate (or an empty list if the gate has
no controls).
| Return the 'NoControlFlag' of a gate, or 'False' if it doesn't have one.
The remaining gates don't have a 'NoControlFlag'. We list them
explicitly, so that the typechecker can warn us about new gates
that must be added here.
| Apply the given 'NoControlFlag' to the given 'Gate'. This means,
otherwise do nothing. Throw an error if attempting to set the
'NoControlFlag' on a gate that can't support this flag.
The remaining gates can't have their 'NoControlFlag' set. We list
them explicitly, so that the typechecker can warn us about new
gates that must be added here.
| Reverse a gate. Throw an error if the gate is not reversible.
The remaining gates are not reversible. We list them explicitly, so
that the typechecker can warn us about new gates that must be added
here.
----------------------------------------------------------------------
* Auxiliary functions on gates and wires
| Return the set of wires used by a list of controls.
| Return the set of wires used by a gate (including controls,
labels, and anchors).
Unlike 'gate_arity', the function 'wires_of_gate' is used for
printing, and therefore returns all wires that are syntactically
used by the gate, irrespective of whether they have a logical
meaning.
| Like 'wires_of_gate', except return a list of wires.
----------------------------------------------------------------------
* Dynamic arities
| Recall that an 'Arity' is a set of typed wires, and it determines
the external interfaces at which circuits and gates can be
type 'Arity', but in a format that is more optimized for efficient
updating. Additionally, it also stores the set of wires ever used.
| Check whether the given gate is well-formed and can be legally
applied in the context of the given arity. If successful, return
the updated arity resulting from the gate application. If
unsuccessful, raise an error. Properties checked are:
* that each gate has non-overlapping inputs, including controls;
* that each gate has non-overlapping outputs, including controls;
* that the inputs of the gate (including controls) are actually
present in the current arity;
* that the types of the inputs (excluding controls) match those of
the current arity;
* that the outputs of the gate (excluding controls) don't conflict
with any wires already existing in the current arity.
| Like 'arity_append', but without type checking. This is
potentially faster, but should only used in applications that have
already been thoroughly tested or type-checked.
| For now, we disable run-time type checking, because we have not
yet implemented run-time types properly. Therefore, we define
'arity_append' to be a synonym for 'arity_append_unsafe'.
| Return an empty arity.
| Return a wire unused in the current arity.
| Return the next /k/ wires unused in the current arity.
| Add a new typed wire to the current arity. This returns a new
wire and the updated arity.
| Convert an extended arity to an ordinary arity.
| Return the smallest wire id nowhere used in the circuit.
----------------------------------------------------------------------
* Circuit abstraction
| A completed circuit /(a1,gs,a2,n)/ has an input arity /a1/, a
list of gates /gs/, and an output arity /a2/. We also record /n/,
the total number of wires used by the circuit. Because wires are
allocated consecutively, this means that the wire id's used are
[0../n/-1].
| Return the set of all the wires in a circuit.
----------------------------------------------------------------------
** Reversing low-level circuits
| Reverse a gate list.
| Reverse a circuit. Throw an error if the circuit is not reversible.
----------------------------------------------------------------------
** NoControlFlag on low-level circuits
| Set the 'NoControlFlag' on all gates of a circuit.
----------------------------------------------------------------------
** Ordered circuits
| An ordered circuit is a 'Circuit' together with an ordering on
(usually all, but potentially a subset of) the input and output
endpoints.
This extra information is required when a circuit is used within a
larger circuit (e.g. via a 'Subroutine' gate), to identify which wires
of the sub-circuit should be bound to which wires of the surrounding
circuit.
----------------------------------------------------------------------
** Annotated circuits
| One often wants to consider the inputs and outputs of a circuit as
more structured/typed than just lists of bits/qubits; for instance,
While for the most part this typing information is not included in
low-level circuits, we need to consider it in hierarchical circuits,
so that the information stored in a subroutine is sufficient to call
the subroutine in a typed context.
Specifically, the extra information needed consists of functions to
destructure the input/output data as a list of typed wires, and
restructure such a list of wires into a piece of data of the appropriate
type.
| The trivial 'CircuitTypeStructure' on @(['Wire'],'Arity')@.
| Use a 'CircuitTypeStructure' to destructure a piece of (suitably
typed) data into a list of typed wires.
| Use a 'CircuitTypeStructure' to structure a list of typed wires
(of the appropriate length/arity) into a piece of structured data.
======================================================================
* Boxed circuits
| A typed subroutine consists of:
* a low-level circuit, ordered to allow binding of incoming and outgoing wires;
* functions for structuring/destructuring the inputs and outputs to and
from lists of wires (these functions being dynamically typed, since the
input/output type may vary between subroutines);
| Extract just the 'Circuit' from a 'TypedSubroutine'.
| A name space is a map from names to subroutine bindings. These
subroutines can reference each other; it is the programmer’s
responsibility to ensure there is no circular dependency, and no
clash of names.
| The empty namespace.
| A boxed circuit is a distinguished simple circuit (analogous to a “main” function) together with a namespace.
----------------------------------------------------------------------
** Ordered circuits
| An ordered boxed circuit is a 'BCircuit' together with an
ordering on the input and output endpoints, or equivalently, an
input and output endpoints.
======================================================================
** Basic functions lifted to boxed circuits
All the basic functions defined on simple circuits now lift
trivially to boxed circuits:
| Reverse a simple boxed circuit, or throw an error if not reversible.
----------------------------------------------------------------------
$ The 'ReadWrite' monad encapsulates the interaction with a (real
or simulated) low-level quantum device.
| The 'ReadWrite' monad describes a standard read-write computation,
here specialized to the case where writes are 'Gate's, prompts are
* terminate with a result. This is the case 'RW_Return'.
* write a single 'Gate' and continue. This is the case 'RW_Write'.
* issue a prompt, which is a 'Wire', then read a 'Bool', then
| Transforms a read-write computation into one that behaves identically,
but also returns the list of gates generated.
This is used as a building block, for example to allow a read-write
computation to be run in a simulator while simultaneously using a
static backend to print the list of generated gates.
using the given stub.
| Turn a static read-write computation into a list of gates, while
also updating a namespace. \"Static\" means that the computation
\"dynamic lifting\" is passed to the given error handler.
Important usage note: This function returns a triple (/gates/,
/ns/, /x/). The list of gates is generated lazily, and can be
only computed at the end of the computation. Any function using
them should not apply a strict pattern match to /ns/ or /x/, or
else the whole list of gates will be generated in memory. For
example, the following will blow up the memory:
whereas the following will work as intended:
This version is inefficient . Why ?
This version is inefficient. Why?
----------------------------------------------------------------------
* Dynamic boxed circuits
the appropriate generalization of ('BCircuit', /a/), in a setting
that is dynamic rather than static (i.e., with dynamic lifting or
| Convert a dynamic boxed circuit to a static boxed circuit. The
dynamic boxed circuit may not contain any dynamic liftings, since
these cannot be performed in a static setting. In case any output
liftings are encountered, try to issue a meaningful error via the
given stub error message.
| Convert a boxed circuit to a dynamic boxed circuit. The latter, | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
# LANGUAGE ExistentialQuantification #
module Quipper.Circuit where
import other Quipper stuff
import Libraries.Auxiliary
import Data.List
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Typeable
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
* Quantum circuit data type
type Wire = Int
^ Quantum wire .
deriving (Show, Eq, Typeable)
type Arity = IntMap Wiretype
| A signed item of type /a/. ' Signed ' /x/ ' True ' represents a
data Signed a = Signed a Bool
deriving (Show, Typeable)
from_signed :: Signed a -> a
from_signed (Signed a b) = a
get_sign :: Signed a -> Bool
get_sign (Signed a b) = b
type Controls = [Signed Wire]
type Timestep = Double
type InverseFlag = Bool
NoControlFlag set .
type NoControlFlag = Bool
data ControllableFlag = NoCtl | AllCtl | OnlyClassicalCtl
deriving (Eq, Ord, Show)
easy total ' ' instance , needed for " Data . Map " . However , in
data BoxId = BoxId String String
deriving (Eq, Ord, Show)
should be repeated . If non - zero , it implies some constraints on
data RepeatFlag = RepeatFlag Integer
deriving (Eq,Ord)
instance Show RepeatFlag where
show (RepeatFlag n) = show n
' gate_arity ' , ' gate_controls ' , and ' ' below .
data Gate =
QGate String InverseFlag [Wire] [Wire] Controls NoControlFlag
' Qbit'^(m+n)@. The second @['Wire']@ argument should be
| QRot String InverseFlag Timestep [Wire] [Wire] Controls NoControlFlag
are as for ' QGate ' .
| GPhase Timestep [Wire] Controls NoControlFlag
^ Global phase gate : @'1 ' - > ' 1'@. The list of wires is just a hint for graphical rendering .
| CNot Wire Controls NoControlFlag
| CGate String Wire [Wire] NoControlFlag
| CGateInv String Wire [Wire] NoControlFlag
| CSwap Wire Wire Controls NoControlFlag
| QPrep Wire NoControlFlag
| QUnprep Wire NoControlFlag
of ' QPrep ' .
| QInit Bool Wire NoControlFlag
| CInit Bool Wire NoControlFlag
| QTerm Bool Wire NoControlFlag
| CTerm Bool Wire NoControlFlag
| QMeas Wire
| QDiscard Wire
assertion : ' - > 1@
| CDiscard Wire
| DTerm Bool Wire
| Subroutine BoxId InverseFlag [Wire] Arity [Wire] Arity Controls NoControlFlag ControllableFlag RepeatFlag
| Comment String InverseFlag [(Wire,String)]
deriving Show
gate_arity :: Gate -> ([(Wire, Wiretype)], [(Wire, Wiretype)])
gate_arity (QGate n inv ws1 ws2 c ncf) = (map (\w -> (w,Qbit)) (ws1 ++ ws2) ,map (\w -> (w,Qbit)) (ws1 ++ ws2))
gate_arity (QRot n inv t ws1 ws2 c ncf) = (map (\w -> (w,Qbit)) (ws1 ++ ws2) ,map (\w -> (w,Qbit)) (ws1 ++ ws2))
gate_arity (GPhase t w c ncf) = ([], [])
gate_arity (CNot w c ncf) = ([(w, Cbit)], [(w, Cbit)])
gate_arity (CGate n w ws ncf) = (cs, (w, Cbit) : cs)
where cs = map (\x -> (x, Cbit)) ws
gate_arity (CGateInv n w ws ncf) = ((w, Cbit) : cs, cs)
where cs = map (\x -> (x, Cbit)) ws
gate_arity (CSwap w1 w2 c ncf) = ([(w1, Cbit), (w2, Cbit)], [(w1, Cbit), (w2, Cbit)])
gate_arity (QPrep w ncf) = ([(w, Cbit)], [(w, Qbit)])
gate_arity (QUnprep w ncf) = ([(w, Qbit)], [(w, Cbit)])
gate_arity (QInit b w ncf) = ([], [(w, Qbit)])
gate_arity (CInit b w ncf) = ([], [(w, Cbit)])
gate_arity (QTerm b w ncf) = ([(w, Qbit)], [])
gate_arity (CTerm b w ncf) = ([(w, Cbit)], [])
gate_arity (QMeas w) = ([(w, Qbit)], [(w, Cbit)])
gate_arity (QDiscard w) = ([(w, Qbit)], [])
gate_arity (CDiscard w) = ([(w, Cbit)], [])
gate_arity (DTerm b w) = ([(w, Cbit)], [])
gate_arity (Subroutine n inv ws1 a1 ws2 a2 c ncf ctrble _) = (getTypes ws1 a1, getTypes ws2 a2)
where getTypes ws a = map (\n -> (n, fromJust (IntMap.lookup n a))) ws
gate_arity (Comment s inv ws) = ([], [])
gate_controls :: Gate -> Controls
gate_controls (QGate n inv ws1 ws2 c ncf) = c
gate_controls (QRot n inv t ws1 ws2 c ncf) = c
gate_controls (GPhase t w c ncf) = c
gate_controls (CNot w c ncf) = c
gate_controls (CGate n w ws ncf) = []
gate_controls (CGateInv n w ws ncf) = []
gate_controls (CSwap w1 w2 c ncf) = c
gate_controls (QPrep w ncf) = []
gate_controls (QUnprep w ncf) = []
gate_controls (QInit b w ncf) = []
gate_controls (CInit b w ncf) = []
gate_controls (QTerm b w ncf) = []
gate_controls (CTerm b w ncf) = []
gate_controls (QMeas w) = []
gate_controls (QDiscard w) = []
gate_controls (CDiscard w) = []
gate_controls (DTerm b w) = []
gate_controls (Subroutine n inv ws1 a1 ws2 a2 c ncf ctrble _) = c
gate_controls (Comment s inv ws) = []
gate_ncflag :: Gate -> NoControlFlag
gate_ncflag (QGate n inv ws1 ws2 c ncf) = ncf
gate_ncflag (QRot n inv t ws1 ws2 c ncf) = ncf
gate_ncflag (GPhase t w c ncf) = ncf
gate_ncflag (CNot w c ncf) = ncf
gate_ncflag (CGate n w ws ncf) = ncf
gate_ncflag (CGateInv n w ws ncf) = ncf
gate_ncflag (CSwap w1 w2 c ncf) = ncf
gate_ncflag (QPrep w ncf) = ncf
gate_ncflag (QUnprep w ncf) = ncf
gate_ncflag (QInit b w ncf) = ncf
gate_ncflag (CInit b w ncf) = ncf
gate_ncflag (QTerm b w ncf) = ncf
gate_ncflag (CTerm b w ncf) = ncf
gate_ncflag (Subroutine n inv ws1 a1 ws2 a2 c ncf ctrble _) = ncf
gate_ncflag (QMeas _) = False
gate_ncflag (QDiscard _) = False
gate_ncflag (CDiscard _) = False
gate_ncflag (DTerm _ _) = False
gate_ncflag (Comment _ _ _) = False
if the first parameter is ' True ' , set the gate 's ' NoControlFlag ' ,
gate_with_ncflag :: NoControlFlag -> Gate -> Gate
gate_with_ncflag False gate = gate
gate_with_ncflag True (QGate n inv ws1 ws2 c _) = (QGate n inv ws1 ws2 c True)
gate_with_ncflag True (QRot n inv t ws1 ws2 c _) = (QRot n inv t ws1 ws2 c True)
gate_with_ncflag True (GPhase t w c _) = (GPhase t w c True)
gate_with_ncflag True (CNot w c _) = (CNot w c True)
gate_with_ncflag True (CGate n w ws _) = (CGate n w ws True)
gate_with_ncflag True (CGateInv n w ws _) = (CGateInv n w ws True)
gate_with_ncflag True (CSwap w1 w2 c _) = (CSwap w1 w2 c True)
gate_with_ncflag True (QPrep w _) = (QPrep w True)
gate_with_ncflag True (QUnprep w _) = (QUnprep w True)
gate_with_ncflag True (QInit b w _) = (QInit b w True)
gate_with_ncflag True (CInit b w _) = (CInit b w True)
gate_with_ncflag True (QTerm b w _) = (QTerm b w True)
gate_with_ncflag True (CTerm b w _) = (CTerm b w True)
gate_with_ncflag True (Subroutine n inv ws1 a1 ws2 a2 c _ ctrble repeat) = (Subroutine n inv ws1 a1 ws2 a2 c True ctrble repeat)
gate_with_ncflag True (Comment s inv ws) = (Comment s inv ws)
gate_with_ncflag True g@(QMeas _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
gate_with_ncflag True g@(QDiscard _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
gate_with_ncflag True g@(CDiscard _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
gate_with_ncflag True g@(DTerm _ _) =
error ("gate " ++ show g ++ " can't be used in a without_controls context")
gate_reverse :: Gate -> Gate
gate_reverse (QGate n inv ws1 ws2 c ncf) = QGate n (not inv) ws1 ws2 c ncf
gate_reverse (QRot n inv t ws1 ws2 c ncf) = QRot n (not inv) t ws1 ws2 c ncf
gate_reverse (GPhase t w c ncf) = GPhase (-t) w c ncf
gate_reverse (CNot w c ncf) = CNot w c ncf
gate_reverse (CGate n w ws ncf) = CGateInv n w ws ncf
gate_reverse (CGateInv n w ws ncf) = CGate n w ws ncf
gate_reverse (CSwap w1 w2 c ncf) = CSwap w1 w2 c ncf
gate_reverse (QPrep w ncf) = QUnprep w ncf
gate_reverse (QUnprep w ncf) = QPrep w ncf
gate_reverse (QInit b w ncf) = QTerm b w ncf
gate_reverse (CInit b w ncf) = CTerm b w ncf
gate_reverse (QTerm b w ncf) = QInit b w ncf
gate_reverse (CTerm b w ncf) = CInit b w ncf
gate_reverse (Subroutine name inv ws1 a1 ws2 a2 c ncf ctrble repeat) = Subroutine name (not inv) ws2 a2 ws1 a1 c ncf ctrble repeat
gate_reverse (Comment s inv ws) = Comment s (not inv) ws
gate_reverse g@(QMeas _) = error ("gate_reverse: gate not reversible: " ++ show g)
gate_reverse g@(QDiscard _) = error ("gate_reverse: gate not reversible: " ++ show g)
gate_reverse g@(CDiscard _) = error ("gate_reverse: gate not reversible: " ++ show g)
gate_reverse g@(DTerm _ _) = error ("gate_reverse: gate not reversible: " ++ show g)
wires_of_controls :: Controls -> IntSet
wires_of_controls c = IntSet.fromList (map from_signed c)
wires_of_gate :: Gate -> IntSet
wires_of_gate (Comment s inv ws) =
intset_inserts (map fst ws) (IntSet.empty)
wires_of_gate (GPhase t w c ncf) =
intset_inserts w (wires_of_controls c)
wires_of_gate g = intset_inserts w1 (intset_inserts w2 (wires_of_controls c))
where
(a1, a2) = gate_arity g
c = gate_controls g
w1 = map fst a1
w2 = map fst a2
wirelist_of_gate :: Gate -> [Wire]
wirelist_of_gate g = IntSet.toList (wires_of_gate g)
connected . The type ' ExtArity ' stores the same information as the
type ExtArity = XIntMap Wiretype
arity_append_safe :: Gate -> ExtArity -> ExtArity
arity_append_safe gate a0 =
case (err0, err1, err2, err3, err4) of
(True, _, _, _, _) ->
error $ "Gate error: duplicate inputs in " ++ show gate
(_, True, _, _, _) ->
error $ "Gate error: duplicate outputs in " ++ show gate
(_, _, Just w, _, _) ->
error $ "Gate application error: no such wire " ++ show w ++ ": " ++ show gate
(_, _, _, Just (w,t), _) ->
error $ "Gate application error: wire " ++ show w ++ ":" ++ show t ++ " has wrong type " ++ show t' ++ ": " ++ show gate
where
Just t' = xintmap_lookup w a0
(_, _, _, _, Just w) ->
error $ "Gate application error: wire " ++ show w ++ " already exists: " ++ show gate
_ -> a2
where
(win, wout) = gate_arity gate
c_ids = map from_signed (gate_controls gate)
win_ids = map fst win
wout_ids = map fst wout
err0 = has_duplicates (win_ids ++ c_ids)
err1 = has_duplicates (wout_ids ++ c_ids)
err2 = find (\w -> not $ xintmap_member w a0) (win_ids ++ c_ids)
err3 = find (\(w,t) -> not $ xintmap_lookup w a0 == Just t) win
err4 = find (\w -> xintmap_member w a1) wout_ids
a1 = xintmap_deletes win_ids a0
a2 = xintmap_inserts wout a1
arity_append_unsafe :: Gate -> ExtArity -> ExtArity
arity_append_unsafe gate a0 = a2
where
(win, wout) = gate_arity gate
a1 = xintmap_deletes (map fst win) a0
a2 = xintmap_inserts wout a1
arity_append :: Gate -> ExtArity -> ExtArity
arity_append = arity_append_unsafe
arity_empty :: ExtArity
arity_empty = xintmap_empty
arity_unused_wire :: ExtArity -> Wire
arity_unused_wire = xintmap_freshkey
arity_unused_wires :: Int -> ExtArity -> [Wire]
arity_unused_wires = xintmap_freshkeys
arity_alloc :: Wiretype -> ExtArity -> (Wire, ExtArity)
arity_alloc t arity = (w, arity') where
w = xintmap_freshkey arity
arity' = xintmap_insert w t arity
arity_of_extarity :: ExtArity -> Arity
arity_of_extarity = xintmap_to_intmap
n_of_extarity :: ExtArity -> Int
n_of_extarity = xintmap_size
type Circuit = (Arity, [Gate], Arity, Int)
wirelist_of_circuit :: Circuit -> [Wire]
wirelist_of_circuit (_, _, _, n) = [0..n-1]
reverse_gatelist :: [Gate] -> [Gate]
reverse_gatelist gates = reverse (map gate_reverse gates)
reverse_circuit :: Circuit -> Circuit
reverse_circuit (a1, gates, a2, n) = (a2, reverse_gatelist gates, a1, n)
circuit_to_nocontrol :: Circuit -> Circuit
circuit_to_nocontrol (a1, gates, a2, n) = (a1, gates', a2, n) where
gates' = map (gate_with_ncflag True) gates
newtype OCircuit = OCircuit ([Wire], Circuit, [Wire])
| Reverse an ' OCircuit ' . Throw an error if the circuit is not reversible .
reverse_ocircuit :: OCircuit -> OCircuit
reverse_ocircuit (OCircuit (ws_in, circ, ws_out)) = OCircuit (ws_out, reverse_circuit circ, ws_out)
a list of six qubits could be structured as a pair of triples , or a
triple of pairs , or a six - bit ' QDInt ' .
data CircuitTypeStructure a = CircuitTypeStructure (a -> ([Wire],Arity)) (([Wire],Arity) -> a)
deriving (Typeable)
id_CircuitTypeStructure :: CircuitTypeStructure ([Wire],Arity)
id_CircuitTypeStructure = CircuitTypeStructure id id
destructure_with :: CircuitTypeStructure a -> a -> ([Wire],Arity)
destructure_with (CircuitTypeStructure f _) = f
structure_with :: CircuitTypeStructure a -> ([Wire],Arity) -> a
structure_with (CircuitTypeStructure _ g) = g
* a ' ControllableFlag ' , recording whether the circuit is controllable .
data TypedSubroutine = forall a b. (Typeable a, Typeable b) =>
TypedSubroutine OCircuit (CircuitTypeStructure a) (CircuitTypeStructure b) ControllableFlag
circuit_of_typedsubroutine :: TypedSubroutine -> Circuit
circuit_of_typedsubroutine (TypedSubroutine (OCircuit (_,circ,_)) _ _ _) = circ
type Namespace = Map BoxId TypedSubroutine
namespace_empty :: Namespace
namespace_empty = Map.empty
| A function to display the names of all the subroutines in a ' ' .
showNames :: Namespace -> String
showNames ns = show (map (\(n,_) -> n) (Map.toList ns))
type BCircuit = (Circuit,Namespace)
' OCircuit ' together with a namespace .
type OBCircuit = (OCircuit,Namespace)
| Construct an ' OBCircuit ' from a ' BCircuit ' and an ordering on the
ob_circuit :: [Wire] -> BCircuit -> [Wire] -> OBCircuit
ob_circuit w_in (circ, ns) w_out = (OCircuit (w_in, circ, w_out), ns)
reverse_bcircuit :: BCircuit -> BCircuit
reverse_bcircuit (c,s) = (reverse_circuit c,s)
* The ReadWrite monad
' Bit 's , and reads are ' 's . Thus , a read - write computation can
do three things :
continue . This is the case ' RW_Read ' .
data ReadWrite a = RW_Return a
| RW_Write !Gate (ReadWrite a)
| RW_Read !Wire (Bool -> ReadWrite a)
| RW_Subroutine BoxId TypedSubroutine (ReadWrite a)
instance Monad ReadWrite where
return a = RW_Return a
f >>= g = case f of
RW_Return a -> g a
RW_Write gate f' -> RW_Write gate (f' >>= g)
RW_Read bit cont -> RW_Read bit (\bool -> cont bool >>= g)
RW_Subroutine name subroutine f' -> RW_Subroutine name subroutine (f' >>= g)
instance Applicative ReadWrite where
pure = return
(<*>) = ap
instance Functor ReadWrite where
fmap = liftM
readwrite_wrap :: ReadWrite a -> ReadWrite ([Gate], a)
readwrite_wrap (RW_Return a) = do
RW_Return ([], a)
readwrite_wrap (RW_Write gate comp) = do
~(gates, a) <- readwrite_wrap comp
RW_Write gate (return (gate:gates, a))
readwrite_wrap (RW_Read bit cont) = do
RW_Read bit (\bool -> readwrite_wrap (cont bool))
readwrite_wrap (RW_Subroutine name subroutine comp) =
RW_Subroutine name subroutine (readwrite_wrap comp)
| Extract the contents of a static ' ReadWrite ' computation . A
' ReadWrite ' computation is said to be static if it contains no
' RW_Read ' instructions , or in other words , no dynamic lifting . If
an ' RW_Read ' instruction is encountered , issue an error message
readwrite_unwind_static :: ErrMsg -> ReadWrite a -> a
readwrite_unwind_static e (RW_Return a) = a
readwrite_unwind_static e (RW_Write gate comp) = readwrite_unwind_static e comp
readwrite_unwind_static e (RW_Read bit cont) = error $ e "dynamic lifting"
readwrite_unwind_static e (RW_Subroutine name subroutine comp) = readwrite_unwind_static e comp
may not contain any ' RW_Read ' operations . If it does , the message
consumed one gate at a time . However , the values /ns/ and /x/ are
> ( gates , ns , ( a , n , x ) ) = gatelist_of_readwrite errmsg comp
> ( gates , ns , ~(a , n , x ) ) = gatelist_of_readwrite errmsg comp
gatelist_of_readwrite :: ErrMsg -> ReadWrite a -> Namespace -> ([Gate], Namespace, a)
gatelist_of_readwrite e (RW_Return a) ns = ([], ns, a)
gatelist_of_readwrite e (RW_Write gate comp) ns = (gate : gates, ns', a) where
(gates, ns', a) = gatelist_of_readwrite e comp ns
gatelist_of_readwrite e (RW_Read bit cont) ns = error (e "dynamic lifting")
gatelist_of_readwrite e (RW_Subroutine name subroutine comp) ns =
let ns' = map_provide name subroutine ns in
gatelist_of_readwrite e comp ns'
gatelist_of_readwrite_xxx : : ErrMsg - > ReadWrite a - > ( [ Gate ] , a )
gatelist_of_readwrite_xxx e comp =
readwrite_unwind_static e ( readwrite_wrap comp )
gatelist_of_readwrite_xxx :: ErrMsg -> ReadWrite a -> ([Gate], a)
gatelist_of_readwrite_xxx e comp =
readwrite_unwind_static e (readwrite_wrap comp)
-}
| The type of dynamic boxed circuits . The type ' DBCircuit ' /a/ is
\"interactive measurement\ " ) .
type DBCircuit a = (Arity, ReadWrite (Arity, Int, a))
bcircuit_of_static_dbcircuit :: ErrMsg -> DBCircuit a -> (BCircuit, a)
bcircuit_of_static_dbcircuit e dbcirc = (bcirc, x) where
(a0, comp) = dbcirc
bcirc = (circ, ns)
circ = (a0, gates, a1, n)
(gates, ns, ~(a1, n, x)) = gatelist_of_readwrite e comp namespace_empty
of course , contains no ' RW_Read ' instructions .
dbcircuit_of_bcircuit :: BCircuit -> a -> DBCircuit a
dbcircuit_of_bcircuit bcircuit x = (a0, comp (Map.toList ns) gates) where
(circuit, ns) = bcircuit
(a0, gates, a1, n) = circuit
comp ((boxid,subroutine):ns) gs = RW_Subroutine boxid subroutine (comp ns gs)
comp [] [] = RW_Return (a1, n, x)
comp [] (g:gs) = RW_Write g (comp [] gs)
|
d4e7732c0813b3231c48cc4e0c50ba754e0dc3cab3438321e4a426e681a57b75 | softlab-ntua/bencherl | class_ResultProducer.erl | Copyright ( C ) 2010 - 2014 EDF R&D
This file is part of Sim - Diasca .
Sim - Diasca is free software : you can redistribute it and/or modify
% it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
Sim - Diasca is distributed in the hope that it will be useful ,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with .
% If not, see </>.
Author : ( )
% Base class for all result producers.
%
% It allows them to be declared automatically to the result manager, and
% provides the basic behaviour so that they can interact with it.
%
-module(class_ResultProducer).
% Determines what are the mother classes of this class (if any):
-define( wooper_superclasses, [ class_TraceEmitter ] ).
% Parameters taken by the constructor ('construct').
-define( wooper_construct_parameters, ProducerName ).
Declaring all variations of WOOPER standard life - cycle operations :
( template pasted , two replacements performed to update arities )
-define( wooper_construct_export, new/1, new_link/1,
synchronous_new/1, synchronous_new_link/1,
synchronous_timed_new/1, synchronous_timed_new_link/1,
remote_new/2, remote_new_link/2, remote_synchronous_new/2,
remote_synchronous_new_link/2, remote_synchronisable_new_link/2,
remote_synchronous_timed_new/2, remote_synchronous_timed_new_link/2,
construct/2, destruct/1 ).
% Member method declarations.
-define( wooper_method_export, setEnableStatus/2, getEnableStatus/1,
sendResults/2, setResultProducedStatus/2, setResultCollectedStatus/2 ).
% Static method declarations.
-define( wooper_static_method_export, get_producer_options/0 ).
% Type section.
-type producer_name() :: string().
% Describes the types of output expected from a producer:
-type producer_option() :: 'data_only' | 'plot_only' | 'data_and_plot'.
-type producer_options() :: producer_option() | [ producer_option() ].
% Describes the precise nature of a producer:
-type producer_nature() :: 'basic_probe' | 'virtual_probe' | 'undefined'.
-type producer_result() :: { pid(), 'archive', binary() }
| { pid(), 'raw', { file_utils:bin_file_name(), binary() } }.
-export_type([ producer_name/0, producer_option/0, producer_options/0,
producer_nature/0, producer_result/0 ]).
Allows to define WOOPER base variables and methods for that class :
-include("wooper.hrl").
% Must be included before class_TraceEmitter header:
-define(TraceEmitterCategorization,"Core.ResultManagement.ResultProducer").
% Allows to use macros for trace sending:
-include("class_TraceEmitter.hrl").
% For result_manager_name:
-include("class_ResultManager.hrl").
% The class-specific attributes of a result producer are:
%
% - result_manager_pid :: pid() is the PID of the result manager, necessary for
% a producer to declare its outputs before being fed with data
%
% - enabled_producer :: boolean() is a boolean telling whether the outputs of
% this producer have a chance of being of interest for the simulation; if false,
% then the producer may simply drop incoming samples, if not being asked by the
% data source(s) that is using it about its enable status: it is still better to
% have data sources stop sending samples instead of having the producer drop
% them on receiving
%
% - result_produced :: boolean() tells whether this producer already fully
% generated its expected results
%
% - result_collected :: boolean() tells wheter the results generated by this
% producer have been already collected (by the result manager)
Constructs a new result producer .
%
% ProducerName is the name of this producer, specified as a plain string.
%
-spec construct( wooper:state(), string() ) -> wooper:state().
construct( State, ProducerName ) ->
First the direct mother classes :
TraceState = class_TraceEmitter:construct( State, ProducerName ),
TrackerPid = class_InstanceTracker:get_local_tracker(),
{ _SameState, BinName } = executeRequest( TraceState, getName ),
TrackerPid ! { registerResultProducer, BinName, self() },
% Then look-up the result manager, so that the actual producer child class
% can send a notification to it later.
%
% As the deployment is synchronous, the manager must already be available
% (no specific waiting to perform):
%
ResultManagerPid = basic_utils:get_registered_pid_for( ?result_manager_name,
global ),
? send_debug_fmt ( TraceState , " Creating result producer ' ~s ' . " ,
% [ ProducerName ] ),
% Deferred reception for registerResultProducer:
receive
{ wooper_result, result_producer_registered } ->
ok
end,
setAttributes( TraceState, [
{ result_manager_pid, ResultManagerPid },
% By default:
{ enabled_producer, true },
{ result_produced, false },
{ result_collected, false }
] ).
destructor .
%
-spec destruct( wooper:state() ) -> wooper:state().
destruct( State ) ->
% Class-specific actions:
%?trace( "Deleting result producer." ),
case ?getAttr(result_produced) of
true ->
case ?getAttr(result_collected) of
true ->
ok;
false ->
throw( result_produced_yet_not_collected )
end;
false ->
throw( result_not_produced )
end,
TrackerPid = class_InstanceTracker:get_local_tracker(),
TrackerPid ! { unregisterResultProducer, self() },
% Then call the direct mother class counterparts and allow chaining:
State.
% Methods section.
% Sets the enable status for this producer.
%
% (oneway)
%
-spec setEnableStatus( wooper:state(), boolean() ) -> oneway_return().
setEnableStatus( State, NewStatus ) ->
?wooper_return_state_only(
setAttribute( State, enabled_producer, NewStatus ) ).
% Returns true iff the outputs of that producer are enabled.
%
% (const request)
%
-spec getEnableStatus( wooper:state() ) -> request_return( boolean() ).
getEnableStatus( State ) ->
?wooper_return_state_result( State, ?getAttr(enabled_producer) ).
% Sends the specified results to the caller (generally the result manager).
%
% Note: must be overridden by the actual result producer.
%
% It expected to return either:
%
- { self ( ) , archive , BinArchive } where BinArchive is a binary corresponding
% to a ZIP archive of a set of files (ex: data and command file)
%
- { self ( ) , raw , { BinFilename , } } where BinFilename is the
filename ( as a binary ) of the transferred file , and BinContent is a binary of
% its content (ex: a PNG file, which should better not be transferred as an
% archive)
%
% The PID of the producer is sent, so that the caller is able to discriminate
% between multiple parallel calls.
%
% (const request, for synchronous yet concurrent operations)
%
-spec sendResults( wooper:state(), producer_options() ) ->
request_return( producer_result() ).
sendResults( _State, _Options ) ->
throw( result_sending_not_implemented ).
% Forces the status of this producer regarding its results being produced or
% not.
%
-spec setResultProducedStatus( wooper:state(), boolean() ) ->
oneway_return().
setResultProducedStatus( State, AreProduced ) ->
?wooper_return_state_only(
setAttribute( State, result_produced, AreProduced ) ).
% Forces the status of this producer regarding its results being collected or
% not.
%
-spec setResultCollectedStatus( wooper:state(), boolean() ) ->
oneway_return().
setResultCollectedStatus( State, AreCollected ) ->
?wooper_return_state_only(
setAttribute( State, result_collected, AreCollected ) ).
% Static section.
% Returns a list of all possible generation-time options for result producers.
%
% (static)
%
-spec get_producer_options() -> [ producer_option() ].
get_producer_options() ->
[ data_only, plot_only, data_and_plot ].
| null | https://raw.githubusercontent.com/softlab-ntua/bencherl/317bdbf348def0b2f9ed32cb6621e21083b7e0ca/app/sim-diasca/sim-diasca/src/core/src/data-management/result-management/class_ResultProducer.erl | erlang | it under the terms of the GNU Lesser General Public License as
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
If not, see </>.
Base class for all result producers.
It allows them to be declared automatically to the result manager, and
provides the basic behaviour so that they can interact with it.
Determines what are the mother classes of this class (if any):
Parameters taken by the constructor ('construct').
Member method declarations.
Static method declarations.
Type section.
Describes the types of output expected from a producer:
Describes the precise nature of a producer:
Must be included before class_TraceEmitter header:
Allows to use macros for trace sending:
For result_manager_name:
The class-specific attributes of a result producer are:
- result_manager_pid :: pid() is the PID of the result manager, necessary for
a producer to declare its outputs before being fed with data
- enabled_producer :: boolean() is a boolean telling whether the outputs of
this producer have a chance of being of interest for the simulation; if false,
then the producer may simply drop incoming samples, if not being asked by the
data source(s) that is using it about its enable status: it is still better to
have data sources stop sending samples instead of having the producer drop
them on receiving
- result_produced :: boolean() tells whether this producer already fully
generated its expected results
- result_collected :: boolean() tells wheter the results generated by this
producer have been already collected (by the result manager)
ProducerName is the name of this producer, specified as a plain string.
Then look-up the result manager, so that the actual producer child class
can send a notification to it later.
As the deployment is synchronous, the manager must already be available
(no specific waiting to perform):
[ ProducerName ] ),
Deferred reception for registerResultProducer:
By default:
Class-specific actions:
?trace( "Deleting result producer." ),
Then call the direct mother class counterparts and allow chaining:
Methods section.
Sets the enable status for this producer.
(oneway)
Returns true iff the outputs of that producer are enabled.
(const request)
Sends the specified results to the caller (generally the result manager).
Note: must be overridden by the actual result producer.
It expected to return either:
to a ZIP archive of a set of files (ex: data and command file)
its content (ex: a PNG file, which should better not be transferred as an
archive)
The PID of the producer is sent, so that the caller is able to discriminate
between multiple parallel calls.
(const request, for synchronous yet concurrent operations)
Forces the status of this producer regarding its results being produced or
not.
Forces the status of this producer regarding its results being collected or
not.
Static section.
Returns a list of all possible generation-time options for result producers.
(static)
| Copyright ( C ) 2010 - 2014 EDF R&D
This file is part of Sim - Diasca .
Sim - Diasca is free software : you can redistribute it and/or modify
published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
Sim - Diasca is distributed in the hope that it will be useful ,
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with .
Author : ( )
-module(class_ResultProducer).
-define( wooper_superclasses, [ class_TraceEmitter ] ).
-define( wooper_construct_parameters, ProducerName ).
Declaring all variations of WOOPER standard life - cycle operations :
( template pasted , two replacements performed to update arities )
-define( wooper_construct_export, new/1, new_link/1,
synchronous_new/1, synchronous_new_link/1,
synchronous_timed_new/1, synchronous_timed_new_link/1,
remote_new/2, remote_new_link/2, remote_synchronous_new/2,
remote_synchronous_new_link/2, remote_synchronisable_new_link/2,
remote_synchronous_timed_new/2, remote_synchronous_timed_new_link/2,
construct/2, destruct/1 ).
-define( wooper_method_export, setEnableStatus/2, getEnableStatus/1,
sendResults/2, setResultProducedStatus/2, setResultCollectedStatus/2 ).
-define( wooper_static_method_export, get_producer_options/0 ).
-type producer_name() :: string().
-type producer_option() :: 'data_only' | 'plot_only' | 'data_and_plot'.
-type producer_options() :: producer_option() | [ producer_option() ].
-type producer_nature() :: 'basic_probe' | 'virtual_probe' | 'undefined'.
-type producer_result() :: { pid(), 'archive', binary() }
| { pid(), 'raw', { file_utils:bin_file_name(), binary() } }.
-export_type([ producer_name/0, producer_option/0, producer_options/0,
producer_nature/0, producer_result/0 ]).
Allows to define WOOPER base variables and methods for that class :
-include("wooper.hrl").
-define(TraceEmitterCategorization,"Core.ResultManagement.ResultProducer").
-include("class_TraceEmitter.hrl").
-include("class_ResultManager.hrl").
Constructs a new result producer .
-spec construct( wooper:state(), string() ) -> wooper:state().
construct( State, ProducerName ) ->
First the direct mother classes :
TraceState = class_TraceEmitter:construct( State, ProducerName ),
TrackerPid = class_InstanceTracker:get_local_tracker(),
{ _SameState, BinName } = executeRequest( TraceState, getName ),
TrackerPid ! { registerResultProducer, BinName, self() },
ResultManagerPid = basic_utils:get_registered_pid_for( ?result_manager_name,
global ),
? send_debug_fmt ( TraceState , " Creating result producer ' ~s ' . " ,
receive
{ wooper_result, result_producer_registered } ->
ok
end,
setAttributes( TraceState, [
{ result_manager_pid, ResultManagerPid },
{ enabled_producer, true },
{ result_produced, false },
{ result_collected, false }
] ).
destructor .
-spec destruct( wooper:state() ) -> wooper:state().
destruct( State ) ->
case ?getAttr(result_produced) of
true ->
case ?getAttr(result_collected) of
true ->
ok;
false ->
throw( result_produced_yet_not_collected )
end;
false ->
throw( result_not_produced )
end,
TrackerPid = class_InstanceTracker:get_local_tracker(),
TrackerPid ! { unregisterResultProducer, self() },
State.
-spec setEnableStatus( wooper:state(), boolean() ) -> oneway_return().
setEnableStatus( State, NewStatus ) ->
?wooper_return_state_only(
setAttribute( State, enabled_producer, NewStatus ) ).
-spec getEnableStatus( wooper:state() ) -> request_return( boolean() ).
getEnableStatus( State ) ->
?wooper_return_state_result( State, ?getAttr(enabled_producer) ).
- { self ( ) , archive , BinArchive } where BinArchive is a binary corresponding
- { self ( ) , raw , { BinFilename , } } where BinFilename is the
filename ( as a binary ) of the transferred file , and BinContent is a binary of
-spec sendResults( wooper:state(), producer_options() ) ->
request_return( producer_result() ).
sendResults( _State, _Options ) ->
throw( result_sending_not_implemented ).
-spec setResultProducedStatus( wooper:state(), boolean() ) ->
oneway_return().
setResultProducedStatus( State, AreProduced ) ->
?wooper_return_state_only(
setAttribute( State, result_produced, AreProduced ) ).
-spec setResultCollectedStatus( wooper:state(), boolean() ) ->
oneway_return().
setResultCollectedStatus( State, AreCollected ) ->
?wooper_return_state_only(
setAttribute( State, result_collected, AreCollected ) ).
-spec get_producer_options() -> [ producer_option() ].
get_producer_options() ->
[ data_only, plot_only, data_and_plot ].
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.