_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
699f18571b5929971ca7799fc7a05c27a5049809a32445b3c3ab809fc78b49dc | input-output-hk/ouroboros-network | Main.hs | module Main (main) where
import Test.Tasty
import qualified Test.Ouroboros.Network.Testing.Data.AbsBearerInfo as AbsBearerInfo
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests =
testGroup "ouroboros-network-testing"
[ AbsBearerInfo.tests
]
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/7f51891ae0af376b3e912d098e4082d32df30ffe/ouroboros-network-testing/test/Main.hs | haskell | module Main (main) where
import Test.Tasty
import qualified Test.Ouroboros.Network.Testing.Data.AbsBearerInfo as AbsBearerInfo
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests =
testGroup "ouroboros-network-testing"
[ AbsBearerInfo.tests
]
|
|
f1c32b49102bf2a50dfe0209eb0fe0d1f898488344a018ecf26a7facda9cfd28 | camfort/fortran-src | Fortran2008Spec.hs | module Language.Fortran.Parser.Free.Fortran2008Spec ( spec ) where
import Test.Hspec
spec :: Spec
spec =
describe "Fortran 2008 Parser" $
it "TODO" pending
| null | https://raw.githubusercontent.com/camfort/fortran-src/a78ce84d4b0ce198ae765511b6c604a0663f8480/test/Language/Fortran/Parser/Free/Fortran2008Spec.hs | haskell | module Language.Fortran.Parser.Free.Fortran2008Spec ( spec ) where
import Test.Hspec
spec :: Spec
spec =
describe "Fortran 2008 Parser" $
it "TODO" pending
|
|
abff8ed4e9ab60cb3dba6387dc1e90e76f05638953cc4742a8c5af59dbd6f5a1 | nushio3/learn-haskell | greet-using-string.hs | main :: IO ()
main = do
name <- getLine
putStrLn $ "Hello, " ++ name
| null | https://raw.githubusercontent.com/nushio3/learn-haskell/eda0fd0b33e9c4b7552afd24c6a25a105cca5f94/greet-using-string.hs | haskell | main :: IO ()
main = do
name <- getLine
putStrLn $ "Hello, " ++ name
|
|
07e5f483ef249edd0408e1654f5490f9a8bd6172892c74924ff66188b2633f6d | SahilKang/cl-rdkafka | toppar.lisp | Copyright ( C ) 2018 - 2020 < >
;;;
;;; This file is part of cl-rdkafka.
;;;
;;; cl-rdkafka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;; (at your option) any later version.
;;;
;;; cl-rdkafka is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with cl-rdkafka. If not, see </>.
(in-package #:cl-rdkafka)
(defmacro foreach-toppar (toppar-list (&rest fields) &body body)
"For each element in TOPPAR-LIST, BODY is evaluated under FIELDS bindings.
The symbols in FIELDS are bound to the corresponding fields of each
TOPPAR-LIST element.
TOPPAR-LIST should be a pointer to a
cl-rdkafka/ll:rd-kafka-topic-partition-list."
(let* ((*toppar-list (gensym))
(elems (gensym))
(elem (gensym))
(count (gensym))
(i (gensym))
(field-bindings (mapcar
(lambda (symbol)
(let ((field (find-symbol (string symbol)
'cl-rdkafka/ll)))
(unless field
(error "~&Could not find symbol for ~S" symbol))
`(,symbol (getf ,elem ',field))))
fields)))
`(loop
with ,*toppar-list = (cffi:mem-ref
,toppar-list
'(:struct cl-rdkafka/ll:rd-kafka-topic-partition-list))
with ,elems = (getf ,*toppar-list 'cl-rdkafka/ll:elems)
with ,count = (getf ,*toppar-list 'cl-rdkafka/ll:cnt)
for ,i below ,count
for ,elem = (cffi:mem-aref
,elems
'(:struct cl-rdkafka/ll:rd-kafka-topic-partition)
,i)
do (let ,field-bindings
,@body))))
(defun add-toppar (toppar-list topic partition offset metadata)
(let ((toppar (cl-rdkafka/ll:rd-kafka-topic-partition-list-add
toppar-list
topic
partition)))
(flet ((set-field (field value)
(setf (cffi:foreign-slot-value
toppar
'(:struct cl-rdkafka/ll:rd-kafka-topic-partition)
field)
value)))
(set-field 'cl-rdkafka/ll:offset offset)
(when metadata
(set-field 'cl-rdkafka/ll:metadata (bytes->pointer metadata))
(set-field 'cl-rdkafka/ll:metadata-size (length metadata))))
toppar))
(defun alloc-toppar-list
(seq
&key
(topic #'identity)
(partition (lambda (x)
(declare (ignore x))
-1))
(offset (lambda (x)
(declare (ignore x))
cl-rdkafka/ll:rd-kafka-offset-invalid))
(metadata (lambda (x)
(declare (ignore x))
nil)))
"Returns a newly allocated
cl-rdkafka/ll:rd-kafka-topic-partition-list initialized with the
elements in SEQ.
The keyword args denote functions which will be applied to each
element of SEQ to extract the corresponding
cl-rdkafka/ll:rd-kafka-topic-partition struct field."
(let ((toppar-list (cl-rdkafka/ll:rd-kafka-topic-partition-list-new
(length seq))))
(when (cffi:null-pointer-p toppar-list)
(error 'allocation-error :name "rd-kafka-topic-partition-list"))
(handler-case
(flet ((add-toppar (x)
(add-toppar toppar-list
(funcall topic x)
(funcall partition x)
(funcall offset x)
(funcall metadata x))))
(map nil #'add-toppar seq)
toppar-list)
(condition (c)
(cl-rdkafka/ll:rd-kafka-topic-partition-list-destroy toppar-list)
(error c)))))
(defun alloc-toppar-list-from-alist (seq)
"Convenience wrapper for ALLOC-TOPPAR-LIST.
The elements of SEQ should look like either:
* ((topic . partition) . (offset . metadata))
* ((topic . partition) . offset)"
(alloc-toppar-list seq
:topic #'caar
:partition #'cdar
:offset (lambda (pair)
(if (consp (cdr pair))
(cadr pair)
(cdr pair)))
:metadata (lambda (pair)
(when (consp (cdr pair))
(cddr pair)))))
(defmacro with-toppar-list (symbol alloc-form &body body)
`(let ((,symbol ,alloc-form))
(unwind-protect
(progn
,@body)
(unless (or (null ,symbol) (cffi:null-pointer-p ,symbol))
(cl-rdkafka/ll:rd-kafka-topic-partition-list-destroy ,symbol)))))
| null | https://raw.githubusercontent.com/SahilKang/cl-rdkafka/4d0b6f7f9b102769cb91a020e4e192a2ea066e0b/src/high-level/toppar.lisp | lisp |
This file is part of cl-rdkafka.
cl-rdkafka is free software: you can redistribute it and/or modify
(at your option) any later version.
cl-rdkafka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with cl-rdkafka. If not, see </>. | Copyright ( C ) 2018 - 2020 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(in-package #:cl-rdkafka)
(defmacro foreach-toppar (toppar-list (&rest fields) &body body)
"For each element in TOPPAR-LIST, BODY is evaluated under FIELDS bindings.
The symbols in FIELDS are bound to the corresponding fields of each
TOPPAR-LIST element.
TOPPAR-LIST should be a pointer to a
cl-rdkafka/ll:rd-kafka-topic-partition-list."
(let* ((*toppar-list (gensym))
(elems (gensym))
(elem (gensym))
(count (gensym))
(i (gensym))
(field-bindings (mapcar
(lambda (symbol)
(let ((field (find-symbol (string symbol)
'cl-rdkafka/ll)))
(unless field
(error "~&Could not find symbol for ~S" symbol))
`(,symbol (getf ,elem ',field))))
fields)))
`(loop
with ,*toppar-list = (cffi:mem-ref
,toppar-list
'(:struct cl-rdkafka/ll:rd-kafka-topic-partition-list))
with ,elems = (getf ,*toppar-list 'cl-rdkafka/ll:elems)
with ,count = (getf ,*toppar-list 'cl-rdkafka/ll:cnt)
for ,i below ,count
for ,elem = (cffi:mem-aref
,elems
'(:struct cl-rdkafka/ll:rd-kafka-topic-partition)
,i)
do (let ,field-bindings
,@body))))
(defun add-toppar (toppar-list topic partition offset metadata)
(let ((toppar (cl-rdkafka/ll:rd-kafka-topic-partition-list-add
toppar-list
topic
partition)))
(flet ((set-field (field value)
(setf (cffi:foreign-slot-value
toppar
'(:struct cl-rdkafka/ll:rd-kafka-topic-partition)
field)
value)))
(set-field 'cl-rdkafka/ll:offset offset)
(when metadata
(set-field 'cl-rdkafka/ll:metadata (bytes->pointer metadata))
(set-field 'cl-rdkafka/ll:metadata-size (length metadata))))
toppar))
(defun alloc-toppar-list
(seq
&key
(topic #'identity)
(partition (lambda (x)
(declare (ignore x))
-1))
(offset (lambda (x)
(declare (ignore x))
cl-rdkafka/ll:rd-kafka-offset-invalid))
(metadata (lambda (x)
(declare (ignore x))
nil)))
"Returns a newly allocated
cl-rdkafka/ll:rd-kafka-topic-partition-list initialized with the
elements in SEQ.
The keyword args denote functions which will be applied to each
element of SEQ to extract the corresponding
cl-rdkafka/ll:rd-kafka-topic-partition struct field."
(let ((toppar-list (cl-rdkafka/ll:rd-kafka-topic-partition-list-new
(length seq))))
(when (cffi:null-pointer-p toppar-list)
(error 'allocation-error :name "rd-kafka-topic-partition-list"))
(handler-case
(flet ((add-toppar (x)
(add-toppar toppar-list
(funcall topic x)
(funcall partition x)
(funcall offset x)
(funcall metadata x))))
(map nil #'add-toppar seq)
toppar-list)
(condition (c)
(cl-rdkafka/ll:rd-kafka-topic-partition-list-destroy toppar-list)
(error c)))))
(defun alloc-toppar-list-from-alist (seq)
"Convenience wrapper for ALLOC-TOPPAR-LIST.
The elements of SEQ should look like either:
* ((topic . partition) . (offset . metadata))
* ((topic . partition) . offset)"
(alloc-toppar-list seq
:topic #'caar
:partition #'cdar
:offset (lambda (pair)
(if (consp (cdr pair))
(cadr pair)
(cdr pair)))
:metadata (lambda (pair)
(when (consp (cdr pair))
(cddr pair)))))
(defmacro with-toppar-list (symbol alloc-form &body body)
`(let ((,symbol ,alloc-form))
(unwind-protect
(progn
,@body)
(unless (or (null ,symbol) (cffi:null-pointer-p ,symbol))
(cl-rdkafka/ll:rd-kafka-topic-partition-list-destroy ,symbol)))))
|
611ac001fb63afcdfbd97f2a3689368a8b8ea0a087ad3bea5906e46fc6c37898 | alpaca-lang/alpaca | stacktrace_tests.erl | -*- mode : erlang;erlang - indent - level : 4;indent - tabs - mode : nil -*-
ex : ts=4 sw=4 et
Copyright 2018
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%% Some basic tests to check that source (line and file) annotations show up in
%% stack traces. Far from exhaustive, just the beginning of making sure we can
%% get decent runtime feedback on failures.
-module(stacktrace_tests).
-include_lib("eunit/include/eunit.hrl").
-include("alpaca.hrl").
simple_badarith_test() ->
Mod =
"module arith_err \n"
"export main/1 \n"
"let main x = 1 + x",
{error, error, badarith, Trace} =
run_for_trace(
[{"arith_err.alp", Mod}],
fun() -> alpaca_arith_err:main(atom) end),
Expected = {alpaca_arith_err, main, 1, [{file, "arith_err.alp"}, {line, 3}]},
?assertMatch([Expected | _], Trace).
indirect_badarith_test() ->
Mod =
"module indirect_arith \n"
"export foo/1 \n"
"let bar x = x + 1 \n"
"let foo y = bar y",
{error, error, badarith, Trace} =
run_for_trace(
[{"indirect_arith.alp", Mod}],
fun() -> alpaca_indirect_arith:foo(atom_again) end),
Expected1 = {alpaca_indirect_arith, bar, 1, [{file, "indirect_arith.alp"}, {line, 3}]},
?assertMatch([Expected1 | _], Trace).
fun_pattern_test() ->
Mod =
"module fun_pattern \n"
"export f/1 \n"
"let f 0 = :zero \n"
"let f 1 = :one \n",
{error, error, if_clause, Trace} =
run_for_trace(
[{"fun_pattern.alp", Mod}],
fun() -> alpaca_fun_pattern:f(2) end),
Incorrect line number , see the following issue :
%% -lang/alpaca/issues/263
Expected = {alpaca_fun_pattern, f, 1, [{file, "fun_pattern.alp"}, {line, 4}]},
?assertMatch([Expected | _], Trace).
throw_test() ->
Mod =
"module t \n"
"export f/1 \n"
"let f () = throw :wat",
{error, throw, wat, Trace} = run_for_trace(
[{"t.alp", Mod}],
fun() -> alpaca_t:f({}) end),
?assertMatch([{alpaca_t, f, 1, [{file, "t.alp"}, {line, 3}]} | _], Trace).
multi_module_test() ->
Mod1 =
"module a \n"
"export f/1 \n"
"let f x = x + 1",
Mod2 =
"module b \n"
"export g/1 \n"
"let g x = a.f x",
{error, error, badarith, Trace} = run_for_trace(
[{"a.alp", Mod1}, {"b.alp", Mod2}],
fun() -> alpaca_b:g(an_atom) end),
%% Somewhat surprising, I thought I might get the full trace through module
%% b as well.
?assertMatch([{alpaca_a, f, 1, [{file, "a.alp"}, {line, 3}]} | _], Trace).
A wrapper that compiles the provided code for one or more modules and
%% executes the provided operation. Captures any resulting stack trace so that
%% the caller can check correctness.
run_for_trace(ModulesWithFilenames, Expr) ->
Temporary , callers should change :
ToCompile = [{FN, Code} || {FN, Code} <- ModulesWithFilenames],
{ok, Compiled} = alpaca:compile({text_set, ToCompile}),
Ms = lists:map(
fun(#compiled_module{name=M, filename=F, bytes=B}) -> {M, F, B} end,
Compiled
),
[code:load_binary(M, F, B) || {M, F, B} <- Ms],
Ret = try Expr() of
Res -> {ok, Res}
catch Type:Detail ->
Trace = erlang:get_stacktrace(),
{error, Type, Detail, Trace}
end,
[pd(M) || {M, _, _} <- Ms],
Ret.
%% Purge and delete the given module from the VM.
pd(Module) ->
code:purge(Module),
code:delete(Module).
| null | https://raw.githubusercontent.com/alpaca-lang/alpaca/aa2bb5594dda8292ca0bffb8e8a6ebc0f60e8dbc/test/stacktrace_tests.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Some basic tests to check that source (line and file) annotations show up in
stack traces. Far from exhaustive, just the beginning of making sure we can
get decent runtime feedback on failures.
-lang/alpaca/issues/263
Somewhat surprising, I thought I might get the full trace through module
b as well.
executes the provided operation. Captures any resulting stack trace so that
the caller can check correctness.
Purge and delete the given module from the VM. | -*- mode : erlang;erlang - indent - level : 4;indent - tabs - mode : nil -*-
ex : ts=4 sw=4 et
Copyright 2018
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(stacktrace_tests).
-include_lib("eunit/include/eunit.hrl").
-include("alpaca.hrl").
simple_badarith_test() ->
Mod =
"module arith_err \n"
"export main/1 \n"
"let main x = 1 + x",
{error, error, badarith, Trace} =
run_for_trace(
[{"arith_err.alp", Mod}],
fun() -> alpaca_arith_err:main(atom) end),
Expected = {alpaca_arith_err, main, 1, [{file, "arith_err.alp"}, {line, 3}]},
?assertMatch([Expected | _], Trace).
indirect_badarith_test() ->
Mod =
"module indirect_arith \n"
"export foo/1 \n"
"let bar x = x + 1 \n"
"let foo y = bar y",
{error, error, badarith, Trace} =
run_for_trace(
[{"indirect_arith.alp", Mod}],
fun() -> alpaca_indirect_arith:foo(atom_again) end),
Expected1 = {alpaca_indirect_arith, bar, 1, [{file, "indirect_arith.alp"}, {line, 3}]},
?assertMatch([Expected1 | _], Trace).
fun_pattern_test() ->
Mod =
"module fun_pattern \n"
"export f/1 \n"
"let f 0 = :zero \n"
"let f 1 = :one \n",
{error, error, if_clause, Trace} =
run_for_trace(
[{"fun_pattern.alp", Mod}],
fun() -> alpaca_fun_pattern:f(2) end),
Incorrect line number , see the following issue :
Expected = {alpaca_fun_pattern, f, 1, [{file, "fun_pattern.alp"}, {line, 4}]},
?assertMatch([Expected | _], Trace).
throw_test() ->
Mod =
"module t \n"
"export f/1 \n"
"let f () = throw :wat",
{error, throw, wat, Trace} = run_for_trace(
[{"t.alp", Mod}],
fun() -> alpaca_t:f({}) end),
?assertMatch([{alpaca_t, f, 1, [{file, "t.alp"}, {line, 3}]} | _], Trace).
multi_module_test() ->
Mod1 =
"module a \n"
"export f/1 \n"
"let f x = x + 1",
Mod2 =
"module b \n"
"export g/1 \n"
"let g x = a.f x",
{error, error, badarith, Trace} = run_for_trace(
[{"a.alp", Mod1}, {"b.alp", Mod2}],
fun() -> alpaca_b:g(an_atom) end),
?assertMatch([{alpaca_a, f, 1, [{file, "a.alp"}, {line, 3}]} | _], Trace).
A wrapper that compiles the provided code for one or more modules and
run_for_trace(ModulesWithFilenames, Expr) ->
Temporary , callers should change :
ToCompile = [{FN, Code} || {FN, Code} <- ModulesWithFilenames],
{ok, Compiled} = alpaca:compile({text_set, ToCompile}),
Ms = lists:map(
fun(#compiled_module{name=M, filename=F, bytes=B}) -> {M, F, B} end,
Compiled
),
[code:load_binary(M, F, B) || {M, F, B} <- Ms],
Ret = try Expr() of
Res -> {ok, Res}
catch Type:Detail ->
Trace = erlang:get_stacktrace(),
{error, Type, Detail, Trace}
end,
[pd(M) || {M, _, _} <- Ms],
Ret.
pd(Module) ->
code:purge(Module),
code:delete(Module).
|
27fe928bff917512f5b89357f69170b9cab1630b856f7a9f4ecc6e30cd2ccbf8 | finkel-lang/finkel | import.hs | foreign import ccall safe "string.h strlen" -- Haskell
cstrlen :: Ptr CChar -> IO CSize
| null | https://raw.githubusercontent.com/finkel-lang/finkel/c3c7729d5228bd7e0cf76e8ff05fe2f79a0ec0a2/doc/include/language-syntax/ffi/import.hs | haskell | Haskell | cstrlen :: Ptr CChar -> IO CSize
|
7a667dd8a413dc7bb1ded3bf4659c79ed7c0b56e8897ee2f567559d67de2320e | andorp/mini-grin | Definitional.hs | # LANGUAGE LambdaCase , GeneralizedNewtypeDeriving , InstanceSigs , TypeFamilies , TemplateHaskell , ScopedTypeVariables #
module Grin.Interpreter.Definitional where
import Control.Monad (forM_, when)
import Control.Monad.Fail
import Control.Monad.Reader (MonadReader(..))
import Control.Monad.State (MonadState(..))
import Control.Monad.Trans (MonadIO(liftIO), lift)
import Control.Monad.Trans.Reader hiding (ask, local)
import Control.Monad.Trans.State hiding (state, get)
import Data.Int
import Data.Maybe (fromJust, fromMaybe, isNothing)
import Data.Word
import Grin.Exp
import Grin.Interpreter.Base
import Grin.Value (Name, Tag)
import Lens.Micro.Platform
import Prelude hiding (fail)
import Grin.GExpToExp (gexpToExp)
import Grin.Interpreter.Store (Store(..))
import qualified Grin.Interpreter.Store as Store
import Grin.Interpreter.Env (Env)
import qualified Grin.Interpreter.Env as Env
import qualified Data.Map.Strict as Map
import qualified Grin.Value as Grin
import qualified Grin.Examples as Examples
-- * Definitional Interpreter
data SVal
= SInt64 Int64
| SWord64 Word64
| SFloat Float
| SBool Bool
| SChar Char
| SLoc Loc
deriving (Eq, Ord, Show)
simpleValue :: Grin.SimpleValue -> SVal
simpleValue = \case
Grin.SInt64 i -> SInt64 i
Grin.SWord64 w -> SWord64 w
Grin.SFloat f -> SFloat f
Grin.SBool b -> SBool b
Grin.SChar c -> SChar c
data Node = Node Tag [SVal]
deriving (Eq, Ord, Show)
newtype Loc = Loc Int
deriving (Eq, Ord, Show)
data DVal
= DNode Node
| DVal SVal
| DUnit
deriving (Eq, Ord, Show)
data DefEnv m v = DefEnv
{ _defFuns :: Map.Map Name Exp
, _defOps :: Map.Map Name ([v] -> m v)
, _defEnv :: Env v
}
makeLenses ''DefEnv
newtype DefinitionalT m a = DefinitionalT
{ definitionalT :: StateT (Store Loc Node) (ReaderT (DefEnv m DVal) m) a
}
deriving (Functor, Applicative, Monad, MonadFail, MonadIO, MonadReader (DefEnv m DVal), MonadState (Store Loc Node))
runDefinitionalT :: (Monad m) => Exp -> [(Name, [DVal] -> m DVal)] -> DefinitionalT m a -> m a
runDefinitionalT prog ops n = runReaderT (evalStateT (definitionalT n) Store.empty) definitional
where
definitional =
DefEnv
(programToDefs prog)
(Map.fromList ops)
Env.empty
instance (Applicative m, Monad m, MonadFail m) => Interpreter (DefinitionalT m) where
type Val (DefinitionalT m) = DVal
type HeapVal (DefinitionalT m) = Node
type Addr (DefinitionalT m) = Loc
value :: Grin.Value -> DefinitionalT m DVal
value = \case
(Grin.VNode (Grin.Node t0 ps)) -> do
p <- askEnv
vs <- pure $ map (Env.lookup p) ps
pure $ DNode $ Node t0 $ map (\case
DVal v -> v
other -> error $ "value " ++ show other
) vs
(Grin.VPrim sv) -> pure $ DVal $ simpleValue sv
val2addr :: DVal -> DefinitionalT m Loc
val2addr = \case
(DVal (SLoc l)) -> pure l
other -> error $ "val2addr" ++ show other
addr2val :: Loc -> DefinitionalT m DVal
addr2val = pure . DVal . SLoc
heapVal2val :: Node -> DefinitionalT m DVal
heapVal2val = pure . DNode
val2heapVal :: DVal -> DefinitionalT m Node
val2heapVal = \case
DNode n -> pure n
other -> error $ "val2heapVal: " ++ show other
unit :: DefinitionalT m DVal
unit = pure DUnit
bindPattern :: DVal -> (Tag, [Name]) -> DefinitionalT m [(Name, DVal)]
bindPattern (DNode (Node t0 vs)) (t1, ps)
| t0 == t1 = pure (ps `zip` (DVal <$> vs))
bindPattern pattern match = error $ "bindPattern: " ++ show (pattern, match)
askEnv :: (DefinitionalT m) (Env DVal)
askEnv = _defEnv <$> ask
localEnv :: Env DVal -> (DefinitionalT m) DVal -> (DefinitionalT m) DVal
localEnv e = local (defEnv .~ e)
lookupFun :: Name -> (DefinitionalT m) Exp
lookupFun funName = (fromMaybe (error $ "Missing:" ++ show funName) . Map.lookup funName . _defFuns) <$> ask
isExternal :: Name -> (DefinitionalT m) Bool
isExternal funName = (Map.member funName . _defOps) <$> ask
external :: Name -> [DVal] -> (DefinitionalT m) DVal
external funName params = DefinitionalT $ do
op <- lift ((fromJust . Map.lookup funName . _defOps) <$> ask)
lift (lift (op params))
evalCase :: (Exp -> (DefinitionalT m) DVal) -> DVal -> [Alt] -> (DefinitionalT m) DVal
evalCase ev0 v alts = evalBranch v $ head $ filter (\(Alt n p _b) -> match v p) alts
where
match :: DVal -> CPat -> Bool
match DUnit p = error $ "matching failure:" ++ show (DUnit, p)
match (DVal (SLoc l)) p = error $ "matching failure:" ++ show (l, p)
match (DNode (Node t0 _p)) (NodePat t1 _v) = t0 == t1
match (DVal l0) (LitPat l1) = l0 == (simpleValue l1)
match (DNode{}) DefaultPat = True
match (DVal{}) DefaultPat = True
match _ _ = False
evalBranch :: DVal -> Alt -> (DefinitionalT m) DVal
evalBranch (DNode (Node t0 vs)) (Alt n (NodePat t1 nps) body)
| t0 == t1 = do
p0 <- askEnv
let p1 = Env.insert n v p0
let p2 = Env.inserts (nps `zip` (DVal <$> vs)) p1
localEnv p2 (ev0 body)
evalBranch _ (Alt n _ body) = do
p <- askEnv
localEnv (Env.insert n v p) $ ev0 body
evalBranch pat alt = error $ "evalBranch: " ++ show (pat, alt)
funCall :: (Exp -> DefinitionalT m DVal) -> Name -> [DVal] -> DefinitionalT m DVal
funCall ev0 fn vs = do
(Def _ fps body) <- lookupFun fn
let p' = Env.inserts (fps `zip` vs) Env.empty
localEnv p' (ev0 body)
allocStore :: Name -> DefinitionalT m DVal
allocStore _ = do
(Store s) <- get
let a = Loc $ Map.size s
addr2val a
fetchStore :: DVal -> DefinitionalT m DVal
fetchStore l = do
s <- get
a <- val2addr l
heapVal2val $ Store.lookup a s
extStore :: DVal -> DVal -> DefinitionalT m ()
extStore l n = do
a <- val2addr l
v <- val2heapVal n
DefinitionalT $ modify (Store.insert a v)
evalDefinitional :: (Monad m, MonadFail m, MonadIO m) => Program -> m DVal
evalDefinitional prog = do
let ops = [ ("prim_int_add", prim_int_add)
, ("prim_int_sub", prim_int_sub)
, ("prim_int_mul", prim_int_mul)
, ("prim_int_print", prim_int_print)
, ("prim_int_eq", prim_int_eq)
, ("prim_int_gt", prim_int_gt)
]
let opsMap = Map.fromList ops
forM_ exts $ \ext -> do
when (isNothing (Map.lookup (eName ext) opsMap)) $
fail $ "Missing external: " ++ show (eName ext)
runDefinitionalT prog ops (eval (SApp "main" []))
where
exts = externals prog
prim_int_add [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SInt64 (a + b)))
prim_int_add ps = error $ "prim_int_add " ++ show ps
prim_int_sub [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SInt64 (a - b)))
prim_int_sub ps = error $ "prim_int_sub " ++ show ps
prim_int_mul [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SInt64 (a * b)))
prim_int_mul ps = error $ "prim_int_mul " ++ show ps
prim_int_eq [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SBool (a == b)))
prim_int_eq ps = error $ "prim_int_eq " ++ show ps
prim_int_gt [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SBool (a > b)))
prim_int_gt ps = error $ "prim_int_gt " ++ show ps
prim_int_print [(DVal (SInt64 i))] = liftIO $ print i >> pure DUnit
prim_int_print ps = error $ "prim_int_print " ++ show ps
-- * Test runs
tests :: IO ()
tests = do
print =<< (evalDefinitional $ gexpToExp $ Examples.add)
print =<< (evalDefinitional $ gexpToExp $ Examples.fact)
print =<< (evalDefinitional $ gexpToExp $ Examples.sumSimple)
| null | https://raw.githubusercontent.com/andorp/mini-grin/99913efa0f81cb2a76893d3e48c6d025df9c40c9/grin/src/Grin/Interpreter/Definitional.hs | haskell | * Definitional Interpreter
* Test runs | # LANGUAGE LambdaCase , GeneralizedNewtypeDeriving , InstanceSigs , TypeFamilies , TemplateHaskell , ScopedTypeVariables #
module Grin.Interpreter.Definitional where
import Control.Monad (forM_, when)
import Control.Monad.Fail
import Control.Monad.Reader (MonadReader(..))
import Control.Monad.State (MonadState(..))
import Control.Monad.Trans (MonadIO(liftIO), lift)
import Control.Monad.Trans.Reader hiding (ask, local)
import Control.Monad.Trans.State hiding (state, get)
import Data.Int
import Data.Maybe (fromJust, fromMaybe, isNothing)
import Data.Word
import Grin.Exp
import Grin.Interpreter.Base
import Grin.Value (Name, Tag)
import Lens.Micro.Platform
import Prelude hiding (fail)
import Grin.GExpToExp (gexpToExp)
import Grin.Interpreter.Store (Store(..))
import qualified Grin.Interpreter.Store as Store
import Grin.Interpreter.Env (Env)
import qualified Grin.Interpreter.Env as Env
import qualified Data.Map.Strict as Map
import qualified Grin.Value as Grin
import qualified Grin.Examples as Examples
data SVal
= SInt64 Int64
| SWord64 Word64
| SFloat Float
| SBool Bool
| SChar Char
| SLoc Loc
deriving (Eq, Ord, Show)
simpleValue :: Grin.SimpleValue -> SVal
simpleValue = \case
Grin.SInt64 i -> SInt64 i
Grin.SWord64 w -> SWord64 w
Grin.SFloat f -> SFloat f
Grin.SBool b -> SBool b
Grin.SChar c -> SChar c
data Node = Node Tag [SVal]
deriving (Eq, Ord, Show)
newtype Loc = Loc Int
deriving (Eq, Ord, Show)
data DVal
= DNode Node
| DVal SVal
| DUnit
deriving (Eq, Ord, Show)
data DefEnv m v = DefEnv
{ _defFuns :: Map.Map Name Exp
, _defOps :: Map.Map Name ([v] -> m v)
, _defEnv :: Env v
}
makeLenses ''DefEnv
newtype DefinitionalT m a = DefinitionalT
{ definitionalT :: StateT (Store Loc Node) (ReaderT (DefEnv m DVal) m) a
}
deriving (Functor, Applicative, Monad, MonadFail, MonadIO, MonadReader (DefEnv m DVal), MonadState (Store Loc Node))
runDefinitionalT :: (Monad m) => Exp -> [(Name, [DVal] -> m DVal)] -> DefinitionalT m a -> m a
runDefinitionalT prog ops n = runReaderT (evalStateT (definitionalT n) Store.empty) definitional
where
definitional =
DefEnv
(programToDefs prog)
(Map.fromList ops)
Env.empty
instance (Applicative m, Monad m, MonadFail m) => Interpreter (DefinitionalT m) where
type Val (DefinitionalT m) = DVal
type HeapVal (DefinitionalT m) = Node
type Addr (DefinitionalT m) = Loc
value :: Grin.Value -> DefinitionalT m DVal
value = \case
(Grin.VNode (Grin.Node t0 ps)) -> do
p <- askEnv
vs <- pure $ map (Env.lookup p) ps
pure $ DNode $ Node t0 $ map (\case
DVal v -> v
other -> error $ "value " ++ show other
) vs
(Grin.VPrim sv) -> pure $ DVal $ simpleValue sv
val2addr :: DVal -> DefinitionalT m Loc
val2addr = \case
(DVal (SLoc l)) -> pure l
other -> error $ "val2addr" ++ show other
addr2val :: Loc -> DefinitionalT m DVal
addr2val = pure . DVal . SLoc
heapVal2val :: Node -> DefinitionalT m DVal
heapVal2val = pure . DNode
val2heapVal :: DVal -> DefinitionalT m Node
val2heapVal = \case
DNode n -> pure n
other -> error $ "val2heapVal: " ++ show other
unit :: DefinitionalT m DVal
unit = pure DUnit
bindPattern :: DVal -> (Tag, [Name]) -> DefinitionalT m [(Name, DVal)]
bindPattern (DNode (Node t0 vs)) (t1, ps)
| t0 == t1 = pure (ps `zip` (DVal <$> vs))
bindPattern pattern match = error $ "bindPattern: " ++ show (pattern, match)
askEnv :: (DefinitionalT m) (Env DVal)
askEnv = _defEnv <$> ask
localEnv :: Env DVal -> (DefinitionalT m) DVal -> (DefinitionalT m) DVal
localEnv e = local (defEnv .~ e)
lookupFun :: Name -> (DefinitionalT m) Exp
lookupFun funName = (fromMaybe (error $ "Missing:" ++ show funName) . Map.lookup funName . _defFuns) <$> ask
isExternal :: Name -> (DefinitionalT m) Bool
isExternal funName = (Map.member funName . _defOps) <$> ask
external :: Name -> [DVal] -> (DefinitionalT m) DVal
external funName params = DefinitionalT $ do
op <- lift ((fromJust . Map.lookup funName . _defOps) <$> ask)
lift (lift (op params))
evalCase :: (Exp -> (DefinitionalT m) DVal) -> DVal -> [Alt] -> (DefinitionalT m) DVal
evalCase ev0 v alts = evalBranch v $ head $ filter (\(Alt n p _b) -> match v p) alts
where
match :: DVal -> CPat -> Bool
match DUnit p = error $ "matching failure:" ++ show (DUnit, p)
match (DVal (SLoc l)) p = error $ "matching failure:" ++ show (l, p)
match (DNode (Node t0 _p)) (NodePat t1 _v) = t0 == t1
match (DVal l0) (LitPat l1) = l0 == (simpleValue l1)
match (DNode{}) DefaultPat = True
match (DVal{}) DefaultPat = True
match _ _ = False
evalBranch :: DVal -> Alt -> (DefinitionalT m) DVal
evalBranch (DNode (Node t0 vs)) (Alt n (NodePat t1 nps) body)
| t0 == t1 = do
p0 <- askEnv
let p1 = Env.insert n v p0
let p2 = Env.inserts (nps `zip` (DVal <$> vs)) p1
localEnv p2 (ev0 body)
evalBranch _ (Alt n _ body) = do
p <- askEnv
localEnv (Env.insert n v p) $ ev0 body
evalBranch pat alt = error $ "evalBranch: " ++ show (pat, alt)
funCall :: (Exp -> DefinitionalT m DVal) -> Name -> [DVal] -> DefinitionalT m DVal
funCall ev0 fn vs = do
(Def _ fps body) <- lookupFun fn
let p' = Env.inserts (fps `zip` vs) Env.empty
localEnv p' (ev0 body)
allocStore :: Name -> DefinitionalT m DVal
allocStore _ = do
(Store s) <- get
let a = Loc $ Map.size s
addr2val a
fetchStore :: DVal -> DefinitionalT m DVal
fetchStore l = do
s <- get
a <- val2addr l
heapVal2val $ Store.lookup a s
extStore :: DVal -> DVal -> DefinitionalT m ()
extStore l n = do
a <- val2addr l
v <- val2heapVal n
DefinitionalT $ modify (Store.insert a v)
evalDefinitional :: (Monad m, MonadFail m, MonadIO m) => Program -> m DVal
evalDefinitional prog = do
let ops = [ ("prim_int_add", prim_int_add)
, ("prim_int_sub", prim_int_sub)
, ("prim_int_mul", prim_int_mul)
, ("prim_int_print", prim_int_print)
, ("prim_int_eq", prim_int_eq)
, ("prim_int_gt", prim_int_gt)
]
let opsMap = Map.fromList ops
forM_ exts $ \ext -> do
when (isNothing (Map.lookup (eName ext) opsMap)) $
fail $ "Missing external: " ++ show (eName ext)
runDefinitionalT prog ops (eval (SApp "main" []))
where
exts = externals prog
prim_int_add [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SInt64 (a + b)))
prim_int_add ps = error $ "prim_int_add " ++ show ps
prim_int_sub [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SInt64 (a - b)))
prim_int_sub ps = error $ "prim_int_sub " ++ show ps
prim_int_mul [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SInt64 (a * b)))
prim_int_mul ps = error $ "prim_int_mul " ++ show ps
prim_int_eq [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SBool (a == b)))
prim_int_eq ps = error $ "prim_int_eq " ++ show ps
prim_int_gt [(DVal (SInt64 a)),(DVal (SInt64 b))] = pure (DVal (SBool (a > b)))
prim_int_gt ps = error $ "prim_int_gt " ++ show ps
prim_int_print [(DVal (SInt64 i))] = liftIO $ print i >> pure DUnit
prim_int_print ps = error $ "prim_int_print " ++ show ps
tests :: IO ()
tests = do
print =<< (evalDefinitional $ gexpToExp $ Examples.add)
print =<< (evalDefinitional $ gexpToExp $ Examples.fact)
print =<< (evalDefinitional $ gexpToExp $ Examples.sumSimple)
|
17742b7b037d8d8398678284744682e4981412a721371b12ef2c883354e01cee | acl2/acl2 | fixed-point-shift.cpp.ref.ast.lsp |
(funcdef rshift (x) (block (return (* (/ x (expt 2 1)) (expt 2 2)))))(funcdef lshift (x) (block (return (/ (/ x (expt 2 1)) (expt 2 2)))))
| null | https://raw.githubusercontent.com/acl2/acl2/c2d69bad0ed3132cc19a00cb632de8b73558b1f9/books/projects/rac/tests/yaml_test/ac_types/fixed-point-shift.cpp.ref.ast.lsp | lisp |
(funcdef rshift (x) (block (return (* (/ x (expt 2 1)) (expt 2 2)))))(funcdef lshift (x) (block (return (/ (/ x (expt 2 1)) (expt 2 2)))))
|
|
8543c3f2d24782fc8dc9749509d2b4a7609ae3c6ef870ce5e4c19632e3160c17 | russross/cownfs | common.mli | Copyright 2004 , 2005
* See the file COPYING for information about licensing and distribution .
* See the file COPYING for information about licensing and distribution. *)
type priority = Read | Write | Link
type file = FhOnly of Fh.t | FhName of Fh.t * string
val mountFileName : string
type session = int * int list * string
| null | https://raw.githubusercontent.com/russross/cownfs/cc67fae0294203a78b022d7300be8aa6c35c58af/common.mli | ocaml | Copyright 2004 , 2005
* See the file COPYING for information about licensing and distribution .
* See the file COPYING for information about licensing and distribution. *)
type priority = Read | Write | Link
type file = FhOnly of Fh.t | FhName of Fh.t * string
val mountFileName : string
type session = int * int list * string
|
|
935b86667ba1f33bf32d889f13e2f393a65dedd0282a5bd6d0624bc3f6f3948e | lemonidas/Alan-Compiler | QuadTypes.mli | type quad_elem_t =
|Quad_none (* Error Handling *)
|Quad_entry of Symbol.entry (* Symbol Table Entries *)
Dereferenced Symbol Entries
|Quad_int of string (* Constant Integers *)
|Quad_char of string (* Constant Characters *)
|Quad_string of string (* Constant Strings *)
val string_of_quad_elem_t : quad_elem_t -> string
type quad_t =
|Quad_dummy
|Quad_unit of Symbol.entry
|Quad_endu of Symbol.entry
|Quad_calc of string * quad_elem_t * quad_elem_t * quad_elem_t
|Quad_set of quad_elem_t * quad_elem_t
|Quad_array of quad_elem_t * quad_elem_t * Symbol.entry
|Quad_cond of string * quad_elem_t * quad_elem_t * (int ref)
|Quad_jump of (int ref)
|Quad_call of Symbol.entry * (quad_elem_t list)
|Quad_tailCall of Symbol.entry
|Quad_par of quad_elem_t * Symbol.pass_mode
|Quad_ret
type expr_ret_type = {
code : quad_t list;
place : quad_elem_t;
}
type cond_ret_type = {
c_code : quad_t list;
q_true: int ref list;
q_false : int ref list;
}
val return_null : unit -> expr_ret_type
val find_opposite_condition : string -> string
val equal_quad_elems : quad_elem_t * quad_elem_t -> bool
| null | https://raw.githubusercontent.com/lemonidas/Alan-Compiler/bbedcbf91028d45a2e26839790df2a1347e8bc52/QuadTypes.mli | ocaml | Error Handling
Symbol Table Entries
Constant Integers
Constant Characters
Constant Strings | type quad_elem_t =
Dereferenced Symbol Entries
val string_of_quad_elem_t : quad_elem_t -> string
type quad_t =
|Quad_dummy
|Quad_unit of Symbol.entry
|Quad_endu of Symbol.entry
|Quad_calc of string * quad_elem_t * quad_elem_t * quad_elem_t
|Quad_set of quad_elem_t * quad_elem_t
|Quad_array of quad_elem_t * quad_elem_t * Symbol.entry
|Quad_cond of string * quad_elem_t * quad_elem_t * (int ref)
|Quad_jump of (int ref)
|Quad_call of Symbol.entry * (quad_elem_t list)
|Quad_tailCall of Symbol.entry
|Quad_par of quad_elem_t * Symbol.pass_mode
|Quad_ret
type expr_ret_type = {
code : quad_t list;
place : quad_elem_t;
}
type cond_ret_type = {
c_code : quad_t list;
q_true: int ref list;
q_false : int ref list;
}
val return_null : unit -> expr_ret_type
val find_opposite_condition : string -> string
val equal_quad_elems : quad_elem_t * quad_elem_t -> bool
|
e222f24cee5b81810a0be42dffb74a2877374756c9318aeef58b61e4d5eab8c0 | botsunit/bucs | bucinet_tests.erl | -module(bucinet_tests).
-include_lib("eunit/include/eunit.hrl").
bucinet_test_() ->
{setup,
fun setup/0, fun teardown/1,
[
?_test(t_to_ip())
, ?_test(t_ip_to_string())
, ?_test(t_ip_to_binary())
, ?_test(t_active_ip())
, ?_test(t_loopback())
, ?_test(t_active_ips())
, ?_test(t_country())
]}.
setup() ->
ok.
teardown(_) ->
ok.
t_to_ip() ->
?assertMatch({192, 168, 10, 1},
bucinet:to_ip("192.168.10.1")),
?assertMatch({192, 168, 10, 1},
bucinet:to_ip(<<"192.168.10.1">>)),
?assertMatch({192, 168, 10, 1},
bucinet:to_ip(<<"192.168.10.1">>)),
?assertMatch(error,
bucinet:to_ip("223.6723.889.1")),
?assertMatch(error,
bucinet:to_ip("This is not an IP")).
t_ip_to_string() ->
?assertMatch("192.168.10.1",
bucinet:ip_to_string({192, 168, 10, 1})),
?assertMatch(error,
bucinet:ip_to_string({192, 317, 10, 1})),
?assertMatch(error,
bucinet:ip_to_string("This is not an IP")),
?assertMatch(error,
bucinet:ip_to_string({192, 168})).
t_ip_to_binary() ->
?assertMatch(<<"192.168.10.1">>,
bucinet:ip_to_binary({192, 168, 10, 1})),
?assertMatch(error,
bucinet:ip_to_binary({192, 317, 10, 1})),
?assertMatch(error,
bucinet:ip_to_binary("This is not an IP")),
?assertMatch(error,
bucinet:ip_to_binary({192, 168})).
t_active_ip() ->
?assert(bucinet:is_ip(bucinet:active_ip())).
t_loopback() ->
?assert(bucinet:is_ip(bucinet:loopback())).
t_active_ips() ->
?assert(lists:all(fun bucinet:is_ip/1, bucinet:active_ips())).
t_country() ->
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America/Los_Angeles">>},
bucinet:country("208.80.152.201")),
?assertEqual({error, unknow_ip},
bucinet:country("127.0.0.1")),
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America/Los_Angeles">>},
bucinet:country(freegeoip, "208.80.152.201")),
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America/New_York">>},
bucinet:country(ipapi, "208.80.152.201")),
?assertEqual({ok, <<"US">>, undefined, undefined},
bucinet:country(ipinfo, "208.80.152.201")),
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America\\/Los_Angeles">>},
bucinet:country(geoip, "208.80.152.201")).
| null | https://raw.githubusercontent.com/botsunit/bucs/792437befd259042efaf95e301dec019a5dd6ea4/test/bucinet_tests.erl | erlang | -module(bucinet_tests).
-include_lib("eunit/include/eunit.hrl").
bucinet_test_() ->
{setup,
fun setup/0, fun teardown/1,
[
?_test(t_to_ip())
, ?_test(t_ip_to_string())
, ?_test(t_ip_to_binary())
, ?_test(t_active_ip())
, ?_test(t_loopback())
, ?_test(t_active_ips())
, ?_test(t_country())
]}.
setup() ->
ok.
teardown(_) ->
ok.
t_to_ip() ->
?assertMatch({192, 168, 10, 1},
bucinet:to_ip("192.168.10.1")),
?assertMatch({192, 168, 10, 1},
bucinet:to_ip(<<"192.168.10.1">>)),
?assertMatch({192, 168, 10, 1},
bucinet:to_ip(<<"192.168.10.1">>)),
?assertMatch(error,
bucinet:to_ip("223.6723.889.1")),
?assertMatch(error,
bucinet:to_ip("This is not an IP")).
t_ip_to_string() ->
?assertMatch("192.168.10.1",
bucinet:ip_to_string({192, 168, 10, 1})),
?assertMatch(error,
bucinet:ip_to_string({192, 317, 10, 1})),
?assertMatch(error,
bucinet:ip_to_string("This is not an IP")),
?assertMatch(error,
bucinet:ip_to_string({192, 168})).
t_ip_to_binary() ->
?assertMatch(<<"192.168.10.1">>,
bucinet:ip_to_binary({192, 168, 10, 1})),
?assertMatch(error,
bucinet:ip_to_binary({192, 317, 10, 1})),
?assertMatch(error,
bucinet:ip_to_binary("This is not an IP")),
?assertMatch(error,
bucinet:ip_to_binary({192, 168})).
t_active_ip() ->
?assert(bucinet:is_ip(bucinet:active_ip())).
t_loopback() ->
?assert(bucinet:is_ip(bucinet:loopback())).
t_active_ips() ->
?assert(lists:all(fun bucinet:is_ip/1, bucinet:active_ips())).
t_country() ->
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America/Los_Angeles">>},
bucinet:country("208.80.152.201")),
?assertEqual({error, unknow_ip},
bucinet:country("127.0.0.1")),
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America/Los_Angeles">>},
bucinet:country(freegeoip, "208.80.152.201")),
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America/New_York">>},
bucinet:country(ipapi, "208.80.152.201")),
?assertEqual({ok, <<"US">>, undefined, undefined},
bucinet:country(ipinfo, "208.80.152.201")),
?assertEqual({ok, <<"US">>, <<"United States">>, <<"America\\/Los_Angeles">>},
bucinet:country(geoip, "208.80.152.201")).
|
|
43e4853a6eca5db7d87e0394a1de310ab989d60461be139de7f4d389711c95e1 | alang9/dynamic-graphs | gen-program.hs | import qualified Data.Graph.Dynamic.Program as Program
import qualified Data.Text.Lazy.IO as TL
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure)
import qualified System.IO as IO
import qualified Test.QuickCheck as QC
import Text.Read (readMaybe)
main :: IO ()
main = do
progName <- getProgName
args <- getArgs
case args of
[sizeStr] | Just size <- readMaybe sizeStr -> do
Program.IntGraphProgram sample <- head <$>
QC.sample' (QC.resize size QC.arbitrary)
TL.putStrLn $ Program.encodeProgram Program.encodeInt sample
_ -> do
IO.hPutStrLn IO.stderr $ "Usage: " ++ progName ++ " size"
exitFailure
| null | https://raw.githubusercontent.com/alang9/dynamic-graphs/b88f001850c7bee8faa62099e93172a0bb0df613/benchmarks/hs/gen-program.hs | haskell | import qualified Data.Graph.Dynamic.Program as Program
import qualified Data.Text.Lazy.IO as TL
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure)
import qualified System.IO as IO
import qualified Test.QuickCheck as QC
import Text.Read (readMaybe)
main :: IO ()
main = do
progName <- getProgName
args <- getArgs
case args of
[sizeStr] | Just size <- readMaybe sizeStr -> do
Program.IntGraphProgram sample <- head <$>
QC.sample' (QC.resize size QC.arbitrary)
TL.putStrLn $ Program.encodeProgram Program.encodeInt sample
_ -> do
IO.hPutStrLn IO.stderr $ "Usage: " ++ progName ++ " size"
exitFailure
|
|
f50ac69946e56d3a666309ea003904fa44eb46b5c63cea525e303f7179f12114 | coccinelle/coccinelle | parse_string_c.mli | (* the result is reversed, as that is what is useful for the caller *)
val parse_string : (string * Ast_c.isWchar) -> Ast_c.info ->
Parser_c.token list
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/57cbff0c5768e22bb2d8c20e8dae74294515c6b3/parsing_c/parse_string_c.mli | ocaml | the result is reversed, as that is what is useful for the caller | val parse_string : (string * Ast_c.isWchar) -> Ast_c.info ->
Parser_c.token list
|
87a119a20947c4145d0232ab610c5f11d0ba0a555102f903342ceb8879d47548 | klutometis/clrs | 15.1-1.scm | (require-extension syntax-case check srfi-11 foof-loop array-lib)
(require '../15.1/section)
(import section-15.1)
(let ((a (list->array 2
'((7 9 3 4 8 4)
(8 5 6 4 5 7))))
(t (list->array 2
'((2 3 1 3 4)
(2 1 2 2 1))))
(e0 2)
(e1 4)
(x0 3)
(x1 2)
(n 6))
(let-values (((f l f* l*)
(fastest-way a t e0 e1 x0 x1 n)))
(check (stations/recursive l l* n) =>
'(#f 0 1 0 0 1 1))))
| null | https://raw.githubusercontent.com/klutometis/clrs/f85a8f0036f0946c9e64dde3259a19acc62b74a1/15.1/15.1-1.scm | scheme | (require-extension syntax-case check srfi-11 foof-loop array-lib)
(require '../15.1/section)
(import section-15.1)
(let ((a (list->array 2
'((7 9 3 4 8 4)
(8 5 6 4 5 7))))
(t (list->array 2
'((2 3 1 3 4)
(2 1 2 2 1))))
(e0 2)
(e1 4)
(x0 3)
(x1 2)
(n 6))
(let-values (((f l f* l*)
(fastest-way a t e0 e1 x0 x1 n)))
(check (stations/recursive l l* n) =>
'(#f 0 1 0 0 1 1))))
|
|
68d69e24b8cf43bcc765fe5ef7eddf54b3c6b4f21e6128269d96b85bb2c71574 | evturn/haskellbook | 10.10-warm-up-and-review.hs | 1 .
-- Given the following set of consonants and vowels:
stops :: String
stops = "pbtdkg"
vowels :: String
vowels = "aeiou"
-- a)
-- Write a function that takes inputs from `stops` and `vowels` and makes
3 - tuples of all possible stop - vowel - stop combinations .
stopVowelStop :: [(Char, Char, Char)]
stopVowelStop = [(s, v, s') | s <- stops, v <- vowels, s' <- stops]
-- b)
-- Modify that function so that it only returns the combinations that
-- begin with a 'p'.
stopVowelStopP :: [(Char, Char, Char)]
stopVowelStopP = [(s, v, s') | s <- stops, v <- vowels, s' <- stops, s == 'p']
-- c)
-- Set up lists of nouns and verbs and modify the function to make tuples
-- representing possible noun-verb-noun sentences.
nouns :: [String]
nouns = [ "car"
, "computer"
, "shoe"
, "refrigerator"
]
verbs :: [String]
verbs = [ "slap"
, "slip"
, "sleep"
, "slam"
]
nounVerbNoun :: [(String, String, String)]
nounVerbNoun = [(n, v, n') | n <- nouns, v <- verbs, n' <- nouns]
2 .
-- What does the following function do and what is its type?
seekritFunc :: String -> Int
seekritFunc x = div (sum (map length (words x))) (length (words x))
-- Answer:
-- Gets the average word length of a sentence.
3 .
-- Rewrite the function above using fractional division.
seekritFunc' :: Fractional a => String -> a
seekritFunc' x =
fromIntegral (sum $ map length $ words x) /
fromIntegral (length $ words x)
| null | https://raw.githubusercontent.com/evturn/haskellbook/3d310d0ddd4221ffc5b9fd7ec6476b2a0731274a/10/10.10-warm-up-and-review.hs | haskell | Given the following set of consonants and vowels:
a)
Write a function that takes inputs from `stops` and `vowels` and makes
b)
Modify that function so that it only returns the combinations that
begin with a 'p'.
c)
Set up lists of nouns and verbs and modify the function to make tuples
representing possible noun-verb-noun sentences.
What does the following function do and what is its type?
Answer:
Gets the average word length of a sentence.
Rewrite the function above using fractional division. | 1 .
stops :: String
stops = "pbtdkg"
vowels :: String
vowels = "aeiou"
3 - tuples of all possible stop - vowel - stop combinations .
stopVowelStop :: [(Char, Char, Char)]
stopVowelStop = [(s, v, s') | s <- stops, v <- vowels, s' <- stops]
stopVowelStopP :: [(Char, Char, Char)]
stopVowelStopP = [(s, v, s') | s <- stops, v <- vowels, s' <- stops, s == 'p']
nouns :: [String]
nouns = [ "car"
, "computer"
, "shoe"
, "refrigerator"
]
verbs :: [String]
verbs = [ "slap"
, "slip"
, "sleep"
, "slam"
]
nounVerbNoun :: [(String, String, String)]
nounVerbNoun = [(n, v, n') | n <- nouns, v <- verbs, n' <- nouns]
2 .
seekritFunc :: String -> Int
seekritFunc x = div (sum (map length (words x))) (length (words x))
3 .
seekritFunc' :: Fractional a => String -> a
seekritFunc' x =
fromIntegral (sum $ map length $ words x) /
fromIntegral (length $ words x)
|
0b7a219a92dcbc6ce084bb6f2d7b6055fb807b88fc208fd219e61b628f9f3a50 | thelema/ocaml-community | thread.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
and , projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1995 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file ../../LICENSE. *)
(* *)
(***********************************************************************)
* Lightweight threads for [ 1003.1c ] and .
type t
(** The type of thread handles. *)
* { 6 Thread creation and termination }
val create : ('a -> 'b) -> 'a -> t
(** [Thread.create funct arg] creates a new thread of control,
in which the function application [funct arg]
is executed concurrently with the other threads of the program.
The application of [Thread.create]
returns the handle of the newly created thread.
The new thread terminates when the application [funct arg]
returns, either normally or by raising an uncaught exception.
In the latter case, the exception is printed on standard error,
but not propagated back to the parent thread. Similarly, the
result of the application [funct arg] is discarded and not
directly accessible to the parent thread. *)
val self : unit -> t
(** Return the thread currently executing. *)
val id : t -> int
(** Return the identifier of the given thread. A thread identifier
is an integer that identifies uniquely the thread.
It can be used to build data structures indexed by threads. *)
val exit : unit -> unit
(** Terminate prematurely the currently executing thread. *)
val kill : t -> unit
(** Terminate prematurely the thread whose handle is given. *)
* { 6 Suspending threads }
val delay: float -> unit
(** [delay d] suspends the execution of the calling thread for
[d] seconds. The other program threads continue to run during
this time. *)
val join : t -> unit
(** [join th] suspends the execution of the calling thread
until the thread [th] has terminated. *)
val wait_read : Unix.file_descr -> unit
(** See {!Thread.wait_write}.*)
val wait_write : Unix.file_descr -> unit
(** This function does nothing in this implementation. *)
val wait_timed_read : Unix.file_descr -> float -> bool
(** See {!Thread.wait_timed_read}.*)
val wait_timed_write : Unix.file_descr -> float -> bool
* Suspend the execution of the calling thread until at least
one character is available for reading ( [ wait_read ] ) or
one character can be written without blocking ( [ wait_write ] )
on the given Unix file descriptor . Wait for at most
the amount of time given as second argument ( in seconds ) .
Return [ true ] if the file descriptor is ready for input / output
and [ false ] if the timeout expired .
These functions return immediately [ true ] in the Win32
implementation .
one character is available for reading ([wait_read]) or
one character can be written without blocking ([wait_write])
on the given Unix file descriptor. Wait for at most
the amount of time given as second argument (in seconds).
Return [true] if the file descriptor is ready for input/output
and [false] if the timeout expired.
These functions return immediately [true] in the Win32
implementation. *)
val select :
Unix.file_descr list -> Unix.file_descr list ->
Unix.file_descr list -> float ->
Unix.file_descr list * Unix.file_descr list * Unix.file_descr list
* Suspend the execution of the calling thead until input / output
becomes possible on the given Unix file descriptors .
The arguments and results have the same meaning as for
[ Unix.select ] .
This function is not implemented yet under Win32 .
becomes possible on the given Unix file descriptors.
The arguments and results have the same meaning as for
[Unix.select].
This function is not implemented yet under Win32. *)
val wait_pid : int -> int * Unix.process_status
(** [wait_pid p] suspends the execution of the calling thread
until the process specified by the process identifier [p]
terminates. Returns the pid of the child caught and
its termination status, as per [Unix.wait].
This function is not implemented under MacOS. *)
val yield : unit -> unit
(** Re-schedule the calling thread without suspending it.
This function can be used to give scheduling hints,
telling the scheduler that now is a good time to
switch to other threads. *)
* { 6 Management of signals }
* Signal handling follows the POSIX thread model : signals generated
by a thread are delivered to that thread ; signals generated externally
are delivered to one of the threads that does not block it .
Each thread possesses a set of blocked signals , which can be modified
using { ! Thread.sigmask } . This set is inherited at thread creation time .
Per - thread signal masks are supported only by the system thread library
under Unix , but not under Win32 , nor by the VM thread library .
by a thread are delivered to that thread; signals generated externally
are delivered to one of the threads that does not block it.
Each thread possesses a set of blocked signals, which can be modified
using {!Thread.sigmask}. This set is inherited at thread creation time.
Per-thread signal masks are supported only by the system thread library
under Unix, but not under Win32, nor by the VM thread library. *)
val sigmask : Unix.sigprocmask_command -> int list -> int list
* [ sigmask cmd sigs ] changes the set of blocked signals for the
calling thread .
If [ cmd ] is [ SIG_SETMASK ] , blocked signals are set to those in
the list [ ] .
If [ cmd ] is [ SIG_BLOCK ] , the signals in [ sigs ] are added to
the set of blocked signals .
If [ cmd ] is [ SIG_UNBLOCK ] , the signals in [ sigs ] are removed
from the set of blocked signals .
[ sigmask ] returns the set of previously blocked signals for the thread .
calling thread.
If [cmd] is [SIG_SETMASK], blocked signals are set to those in
the list [sigs].
If [cmd] is [SIG_BLOCK], the signals in [sigs] are added to
the set of blocked signals.
If [cmd] is [SIG_UNBLOCK], the signals in [sigs] are removed
from the set of blocked signals.
[sigmask] returns the set of previously blocked signals for the thread. *)
val wait_signal : int list -> int
* [ wait_signal sigs ] suspends the execution of the calling thread
until the process receives one of the signals specified in the
list [ ] . It then returns the number of the signal received .
Signal handlers attached to the signals in [ sigs ] will not
be invoked . The signals [ sigs ] are expected to be blocked before
calling [ wait_signal ] .
until the process receives one of the signals specified in the
list [sigs]. It then returns the number of the signal received.
Signal handlers attached to the signals in [sigs] will not
be invoked. The signals [sigs] are expected to be blocked before
calling [wait_signal]. *)
| null | https://raw.githubusercontent.com/thelema/ocaml-community/ed0a2424bbf13d1b33292725e089f0d7ba94b540/otherlibs/systhreads/thread.mli | ocaml | *********************************************************************
OCaml
the special exception on linking described in file ../../LICENSE.
*********************************************************************
* The type of thread handles.
* [Thread.create funct arg] creates a new thread of control,
in which the function application [funct arg]
is executed concurrently with the other threads of the program.
The application of [Thread.create]
returns the handle of the newly created thread.
The new thread terminates when the application [funct arg]
returns, either normally or by raising an uncaught exception.
In the latter case, the exception is printed on standard error,
but not propagated back to the parent thread. Similarly, the
result of the application [funct arg] is discarded and not
directly accessible to the parent thread.
* Return the thread currently executing.
* Return the identifier of the given thread. A thread identifier
is an integer that identifies uniquely the thread.
It can be used to build data structures indexed by threads.
* Terminate prematurely the currently executing thread.
* Terminate prematurely the thread whose handle is given.
* [delay d] suspends the execution of the calling thread for
[d] seconds. The other program threads continue to run during
this time.
* [join th] suspends the execution of the calling thread
until the thread [th] has terminated.
* See {!Thread.wait_write}.
* This function does nothing in this implementation.
* See {!Thread.wait_timed_read}.
* [wait_pid p] suspends the execution of the calling thread
until the process specified by the process identifier [p]
terminates. Returns the pid of the child caught and
its termination status, as per [Unix.wait].
This function is not implemented under MacOS.
* Re-schedule the calling thread without suspending it.
This function can be used to give scheduling hints,
telling the scheduler that now is a good time to
switch to other threads. | and , projet Cristal , INRIA Rocquencourt
Copyright 1995 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
* Lightweight threads for [ 1003.1c ] and .
type t
* { 6 Thread creation and termination }
val create : ('a -> 'b) -> 'a -> t
val self : unit -> t
val id : t -> int
val exit : unit -> unit
val kill : t -> unit
* { 6 Suspending threads }
val delay: float -> unit
val join : t -> unit
val wait_read : Unix.file_descr -> unit
val wait_write : Unix.file_descr -> unit
val wait_timed_read : Unix.file_descr -> float -> bool
val wait_timed_write : Unix.file_descr -> float -> bool
* Suspend the execution of the calling thread until at least
one character is available for reading ( [ wait_read ] ) or
one character can be written without blocking ( [ wait_write ] )
on the given Unix file descriptor . Wait for at most
the amount of time given as second argument ( in seconds ) .
Return [ true ] if the file descriptor is ready for input / output
and [ false ] if the timeout expired .
These functions return immediately [ true ] in the Win32
implementation .
one character is available for reading ([wait_read]) or
one character can be written without blocking ([wait_write])
on the given Unix file descriptor. Wait for at most
the amount of time given as second argument (in seconds).
Return [true] if the file descriptor is ready for input/output
and [false] if the timeout expired.
These functions return immediately [true] in the Win32
implementation. *)
val select :
Unix.file_descr list -> Unix.file_descr list ->
Unix.file_descr list -> float ->
Unix.file_descr list * Unix.file_descr list * Unix.file_descr list
* Suspend the execution of the calling thead until input / output
becomes possible on the given Unix file descriptors .
The arguments and results have the same meaning as for
[ Unix.select ] .
This function is not implemented yet under Win32 .
becomes possible on the given Unix file descriptors.
The arguments and results have the same meaning as for
[Unix.select].
This function is not implemented yet under Win32. *)
val wait_pid : int -> int * Unix.process_status
val yield : unit -> unit
* { 6 Management of signals }
* Signal handling follows the POSIX thread model : signals generated
by a thread are delivered to that thread ; signals generated externally
are delivered to one of the threads that does not block it .
Each thread possesses a set of blocked signals , which can be modified
using { ! Thread.sigmask } . This set is inherited at thread creation time .
Per - thread signal masks are supported only by the system thread library
under Unix , but not under Win32 , nor by the VM thread library .
by a thread are delivered to that thread; signals generated externally
are delivered to one of the threads that does not block it.
Each thread possesses a set of blocked signals, which can be modified
using {!Thread.sigmask}. This set is inherited at thread creation time.
Per-thread signal masks are supported only by the system thread library
under Unix, but not under Win32, nor by the VM thread library. *)
val sigmask : Unix.sigprocmask_command -> int list -> int list
* [ sigmask cmd sigs ] changes the set of blocked signals for the
calling thread .
If [ cmd ] is [ SIG_SETMASK ] , blocked signals are set to those in
the list [ ] .
If [ cmd ] is [ SIG_BLOCK ] , the signals in [ sigs ] are added to
the set of blocked signals .
If [ cmd ] is [ SIG_UNBLOCK ] , the signals in [ sigs ] are removed
from the set of blocked signals .
[ sigmask ] returns the set of previously blocked signals for the thread .
calling thread.
If [cmd] is [SIG_SETMASK], blocked signals are set to those in
the list [sigs].
If [cmd] is [SIG_BLOCK], the signals in [sigs] are added to
the set of blocked signals.
If [cmd] is [SIG_UNBLOCK], the signals in [sigs] are removed
from the set of blocked signals.
[sigmask] returns the set of previously blocked signals for the thread. *)
val wait_signal : int list -> int
* [ wait_signal sigs ] suspends the execution of the calling thread
until the process receives one of the signals specified in the
list [ ] . It then returns the number of the signal received .
Signal handlers attached to the signals in [ sigs ] will not
be invoked . The signals [ sigs ] are expected to be blocked before
calling [ wait_signal ] .
until the process receives one of the signals specified in the
list [sigs]. It then returns the number of the signal received.
Signal handlers attached to the signals in [sigs] will not
be invoked. The signals [sigs] are expected to be blocked before
calling [wait_signal]. *)
|
ea35dbcb3e07700649adc3c7f81381da5c0947b2e0b4a99b0395af830e64bb0e | lambe-lang/mitch | t02_sum.ml | open Mitch.Lang.Term
open Mitch.Ir.Objcode
open Mitch.Ir.Render
open Mitch.System
open Preface.Result.Monad (struct
type t = string
end)
let compile s =
return s
>>= Transpiler.run
<&> Expander.run
>>= Optimiser.run
<&> Simplifier.run
<&> Normaliser.run
let compile_01 () =
let result = compile (Inl (Int 1))
and expected = [ PUSH (INT 1); LEFT ] in
Alcotest.(check (result string string))
"compile Inl 1"
(return expected <&> to_string)
(result <&> to_string)
let compile_02 () =
let result = compile (Inr (Int 1))
and expected = [ PUSH (INT 1); RIGHT ] in
Alcotest.(check (result string string))
"compile Inr 1"
(return expected <&> to_string)
(result <&> to_string)
let compile_03 () =
let result =
compile (Case (Inl (Int 1), Abs ("x", Var "x"), Abs ("x", Var "x")))
and expected = [ PUSH (INT 1) ] in
Alcotest.(check (result string string))
"compile case (inl 1) (fun x -> x) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_04 () =
let result =
compile (Case (Inr (Int 1), Abs ("x", Var "x"), Abs ("x", Var "x")))
and expected = [ PUSH (INT 1) ] in
Alcotest.(check (result string string))
"compile case (inr 1) (fun x -> x) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_05 () =
let result = compile (Case (Inl (Int 1), Abs ("x", Int 2), Abs ("x", Var "x")))
and expected = [ PUSH (INT 2) ] in
Alcotest.(check (result string string))
"compile case (inl 1) (fun x -> 2) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_06 () =
let result = compile (Case (Inr (Int 1), Abs ("x", Var "x"), Abs ("x", Int 2)))
and expected = [ PUSH (INT 2) ] in
Alcotest.(check (result string string))
"compile case (inr 1) (fun x -> x) (fun x -> 2)"
(return expected <&> to_string)
(result <&> to_string)
let compile_07 () =
let result =
compile
(Case
( Inl (Inr (Int 1))
, Abs ("x", Case (Var "x", Abs ("y", Var "y"), Abs ("y", Int 2)))
, Abs ("x", Int 3) ) )
and expected = [ PUSH (INT 2) ] in
Alcotest.(check (result string string))
"compile case (inl inr 1) (fun x -> case x (fun y -> y) (fun y -> 2)) (fun \
x -> 3)"
(return expected <&> to_string)
(result <&> to_string)
let compile_08 () =
let result = compile (Case (Inl (Int 1), Abs ("x", Unit), Abs ("x", Var "x")))
and expected = [ PUSH UNIT ] in
Alcotest.(check (result string string))
"compile case (inl 1) (fun x -> unit) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_09 () =
let result =
compile (Abs ("y", Case (Var "y", Abs ("x", Unit), Abs ("x", Var "y"))))
and expected =
[
LAMBDA
( "y"
, [
DUP (0, "y")
; IF_LEFT
([ DROP (0, "x"); DROP (0, "y"); PUSH UNIT ], [ DROP (0, "x") ])
] )
]
in
Alcotest.(check (result string string))
"compile fun y -> case y (fun x -> unit) (fun x -> y)"
(return expected <&> to_string)
(result <&> to_string)
let compile_10 () =
let result =
compile (Abs ("y", Case (Var "y", Abs ("x", Unit), Abs ("x", Var "x"))))
and expected =
[
LAMBDA
( "y"
, [
DUP (0, "y")
; IF_LEFT
([ DROP (0, "x"); DROP (0, "y"); PUSH UNIT ], [ DROP (1, "y") ])
] )
]
in
Alcotest.(check (result string string))
"compile fun y -> case y (fun x -> unit) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_11 () =
let result =
compile
(Abs ("x", Case (Inl (Var "x"), Abs ("x", Var "x"), Abs ("x", Int 3))))
and expected = [ LAMBDA ("x", []) ] in
Alcotest.(check (result string string))
"compile (fun x -> case (inl x) (fun x -> x) (fun _ -> 2))"
(return expected <&> to_string)
(result <&> to_string)
let cases =
let open Alcotest in
( "Sum Compilation"
, [
test_case "compile O1" `Quick compile_01
; test_case "compile O2" `Quick compile_02
; test_case "compile O3" `Quick compile_03
; test_case "compile O4" `Quick compile_04
; test_case "compile O5" `Quick compile_05
; test_case "compile O6" `Quick compile_06
; test_case "compile O7" `Quick compile_07
; test_case "compile O8" `Quick compile_08
; test_case "compile O9" `Quick compile_09
; test_case "compile 10" `Quick compile_10
; test_case "compile 11" `Quick compile_11
] )
| null | https://raw.githubusercontent.com/lambe-lang/mitch/2c47f6627c3a219c31afd078836e9e7be3e26719/test/mitch/t02_sum.ml | ocaml | open Mitch.Lang.Term
open Mitch.Ir.Objcode
open Mitch.Ir.Render
open Mitch.System
open Preface.Result.Monad (struct
type t = string
end)
let compile s =
return s
>>= Transpiler.run
<&> Expander.run
>>= Optimiser.run
<&> Simplifier.run
<&> Normaliser.run
let compile_01 () =
let result = compile (Inl (Int 1))
and expected = [ PUSH (INT 1); LEFT ] in
Alcotest.(check (result string string))
"compile Inl 1"
(return expected <&> to_string)
(result <&> to_string)
let compile_02 () =
let result = compile (Inr (Int 1))
and expected = [ PUSH (INT 1); RIGHT ] in
Alcotest.(check (result string string))
"compile Inr 1"
(return expected <&> to_string)
(result <&> to_string)
let compile_03 () =
let result =
compile (Case (Inl (Int 1), Abs ("x", Var "x"), Abs ("x", Var "x")))
and expected = [ PUSH (INT 1) ] in
Alcotest.(check (result string string))
"compile case (inl 1) (fun x -> x) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_04 () =
let result =
compile (Case (Inr (Int 1), Abs ("x", Var "x"), Abs ("x", Var "x")))
and expected = [ PUSH (INT 1) ] in
Alcotest.(check (result string string))
"compile case (inr 1) (fun x -> x) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_05 () =
let result = compile (Case (Inl (Int 1), Abs ("x", Int 2), Abs ("x", Var "x")))
and expected = [ PUSH (INT 2) ] in
Alcotest.(check (result string string))
"compile case (inl 1) (fun x -> 2) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_06 () =
let result = compile (Case (Inr (Int 1), Abs ("x", Var "x"), Abs ("x", Int 2)))
and expected = [ PUSH (INT 2) ] in
Alcotest.(check (result string string))
"compile case (inr 1) (fun x -> x) (fun x -> 2)"
(return expected <&> to_string)
(result <&> to_string)
let compile_07 () =
let result =
compile
(Case
( Inl (Inr (Int 1))
, Abs ("x", Case (Var "x", Abs ("y", Var "y"), Abs ("y", Int 2)))
, Abs ("x", Int 3) ) )
and expected = [ PUSH (INT 2) ] in
Alcotest.(check (result string string))
"compile case (inl inr 1) (fun x -> case x (fun y -> y) (fun y -> 2)) (fun \
x -> 3)"
(return expected <&> to_string)
(result <&> to_string)
let compile_08 () =
let result = compile (Case (Inl (Int 1), Abs ("x", Unit), Abs ("x", Var "x")))
and expected = [ PUSH UNIT ] in
Alcotest.(check (result string string))
"compile case (inl 1) (fun x -> unit) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_09 () =
let result =
compile (Abs ("y", Case (Var "y", Abs ("x", Unit), Abs ("x", Var "y"))))
and expected =
[
LAMBDA
( "y"
, [
DUP (0, "y")
; IF_LEFT
([ DROP (0, "x"); DROP (0, "y"); PUSH UNIT ], [ DROP (0, "x") ])
] )
]
in
Alcotest.(check (result string string))
"compile fun y -> case y (fun x -> unit) (fun x -> y)"
(return expected <&> to_string)
(result <&> to_string)
let compile_10 () =
let result =
compile (Abs ("y", Case (Var "y", Abs ("x", Unit), Abs ("x", Var "x"))))
and expected =
[
LAMBDA
( "y"
, [
DUP (0, "y")
; IF_LEFT
([ DROP (0, "x"); DROP (0, "y"); PUSH UNIT ], [ DROP (1, "y") ])
] )
]
in
Alcotest.(check (result string string))
"compile fun y -> case y (fun x -> unit) (fun x -> x)"
(return expected <&> to_string)
(result <&> to_string)
let compile_11 () =
let result =
compile
(Abs ("x", Case (Inl (Var "x"), Abs ("x", Var "x"), Abs ("x", Int 3))))
and expected = [ LAMBDA ("x", []) ] in
Alcotest.(check (result string string))
"compile (fun x -> case (inl x) (fun x -> x) (fun _ -> 2))"
(return expected <&> to_string)
(result <&> to_string)
let cases =
let open Alcotest in
( "Sum Compilation"
, [
test_case "compile O1" `Quick compile_01
; test_case "compile O2" `Quick compile_02
; test_case "compile O3" `Quick compile_03
; test_case "compile O4" `Quick compile_04
; test_case "compile O5" `Quick compile_05
; test_case "compile O6" `Quick compile_06
; test_case "compile O7" `Quick compile_07
; test_case "compile O8" `Quick compile_08
; test_case "compile O9" `Quick compile_09
; test_case "compile 10" `Quick compile_10
; test_case "compile 11" `Quick compile_11
] )
|
|
5a8d1945255df8242dca14a6b13af5cbc6058e9250123e4943a05ec8ce3652e6 | fukamachi/caveman | skeleton.lisp | (in-package :cl-user)
(defpackage :caveman2.skeleton
(:use :cl)
(:export :make-project))
(in-package :caveman2.skeleton)
(defvar *skeleton-directory*
(asdf:system-relative-pathname :caveman2 #p"v2/skeleton/"))
(defun make-project (path &rest params &key name description author email license &allow-other-keys)
(declare (ignore name description author email license))
(let ((cl-project:*skeleton-directory* *skeleton-directory*))
(apply #'cl-project:make-project path params)))
| null | https://raw.githubusercontent.com/fukamachi/caveman/faa5f7e3b364fd7e7096af9a7bb06728b8d80441/v2/src/skeleton.lisp | lisp | (in-package :cl-user)
(defpackage :caveman2.skeleton
(:use :cl)
(:export :make-project))
(in-package :caveman2.skeleton)
(defvar *skeleton-directory*
(asdf:system-relative-pathname :caveman2 #p"v2/skeleton/"))
(defun make-project (path &rest params &key name description author email license &allow-other-keys)
(declare (ignore name description author email license))
(let ((cl-project:*skeleton-directory* *skeleton-directory*))
(apply #'cl-project:make-project path params)))
|
|
b0f2cf601e639bec678696483fc559397733af4b396585445a05550b98d4a5ee | simingwang/emqx-plugin-kafkav5 | ssl.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1999 - 2022 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
Purpose : Main API module for the SSL application that implements TLS and DTLS
%%% SSL is a legacy name.
-module(ssl).
-include_lib("public_key/include/public_key.hrl").
-include("ssl_internal.hrl").
-include("ssl_api.hrl").
-include("ssl_record.hrl").
-include("ssl_cipher.hrl").
-include("ssl_handshake.hrl").
-include("ssl_srp.hrl").
%% Needed to make documentation rendering happy
-ifndef(VSN).
-define(VSN,"unknown").
-endif.
%% Application handling
-export([start/0,
start/1,
stop/0,
clear_pem_cache/0]).
%% Socket handling
-export([connect/3,
connect/2,
connect/4,
listen/2,
transport_accept/1,
transport_accept/2,
handshake/1,
handshake/2,
handshake/3,
handshake_continue/2,
handshake_continue/3,
handshake_cancel/1,
controlling_process/2,
peername/1,
peercert/1,
sockname/1,
close/1,
close/2,
shutdown/2,
recv/2,
recv/3,
send/2,
getopts/2,
setopts/2,
getstat/1,
getstat/2
]).
%% SSL/TLS protocol handling
-export([cipher_suites/2,
cipher_suites/3,
filter_cipher_suites/2,
prepend_cipher_suites/2,
append_cipher_suites/2,
eccs/0,
eccs/1,
versions/0,
groups/0,
groups/1,
format_error/1,
renegotiate/1,
update_keys/2,
prf/5,
negotiated_protocol/1,
connection_information/1,
connection_information/2]).
%% Misc
-export([handle_options/2,
handle_options/3,
tls_version/1,
suite_to_str/1,
suite_to_openssl_str/1,
str_to_suite/1]).
-removed({ssl_accept, '_',
"use ssl_handshake/1,2,3 instead"}).
-removed({cipher_suites, 0,
"use cipher_suites/2,3 instead"}).
-removed({cipher_suites, 1,
"use cipher_suites/2,3 instead"}).
-removed([{negotiated_next_protocol,1,
"use ssl:negotiated_protocol/1 instead"}]).
-removed([{connection_info,1,
"use ssl:connection_information/[1,2] instead"}]).
-export_type([socket/0,
sslsocket/0,
socket_option/0,
active_msgs/0,
host/0,
tls_option/0,
tls_client_option/0,
tls_server_option/0,
erl_cipher_suite/0,
old_cipher_suite/0,
ciphers/0,
cipher/0,
hash/0,
key/0,
kex_algo/0,
prf_random/0,
cipher_filters/0,
sign_algo/0,
protocol_version/0,
protocol_extensions/0,
session_id/0,
error_alert/0,
tls_alert/0,
srp_param_type/0,
named_curve/0,
sign_scheme/0,
group/0]).
%% -------------------------------------------------------------------------------------------------------
-type socket() :: gen_tcp:socket(). % exported
-type socket_option() :: gen_tcp:connect_option() | gen_tcp:listen_option() | gen_udp:option(). % exported
-type sslsocket() :: any(). % exported
-type tls_option() :: tls_client_option() | tls_server_option(). % exported
-type tls_client_option() :: client_option() | common_option() | socket_option() | transport_option(). % exported
-type tls_server_option() :: server_option() | common_option() | socket_option() | transport_option(). % exported
-type active_msgs() :: {ssl, sslsocket(), Data::binary() | list()} | {ssl_closed, sslsocket()} |
{ssl_error, sslsocket(), Reason::any()} | {ssl_passive, sslsocket()}. % exported
-type transport_option() :: {cb_info, {CallbackModule::atom(), DataTag::atom(),
ClosedTag::atom(), ErrTag::atom()}} |
{cb_info, {CallbackModule::atom(), DataTag::atom(),
ClosedTag::atom(), ErrTag::atom(), PassiveTag::atom()}}.
-type host() :: hostname() | ip_address(). % exported
-type hostname() :: string().
-type ip_address() :: inet:ip_address().
-type session_id() :: binary(). % exported
-type protocol_version() :: tls_version() | dtls_version(). % exported
-type tls_version() :: 'tlsv1.2' | 'tlsv1.3' | tls_legacy_version().
-type dtls_version() :: 'dtlsv1.2' | dtls_legacy_version().
-type tls_legacy_version() :: tlsv1 | 'tlsv1.1' .
-type dtls_legacy_version() :: 'dtlsv1'.
-type verify_type() :: verify_none | verify_peer.
-type cipher() :: aes_128_cbc |
aes_256_cbc |
aes_128_gcm |
aes_256_gcm |
aes_128_ccm |
aes_256_ccm |
aes_128_ccm_8 |
aes_256_ccm_8 |
chacha20_poly1305 |
legacy_cipher(). % exported
-type legacy_cipher() :: rc4_128 |
des_cbc |
'3des_ede_cbc'.
-type hash() :: sha |
sha2() |
legacy_hash(). % exported
-type sha2() :: sha224 |
sha256 |
sha384 |
sha512.
-type legacy_hash() :: md5.
-type sign_algo() :: rsa | dsa | ecdsa | eddsa. % exported
-type sign_schemes() :: [sign_scheme()].
-type sign_scheme() :: eddsa_ed25519
| eddsa_ed448
| ecdsa_secp256r1_sha256
| ecdsa_secp384r1_sha384
| ecdsa_secp521r1_sha512
| rsassa_pss_scheme()
| sign_scheme_legacy() . % exported
-type rsassa_pss_scheme() :: rsa_pss_rsae_sha256
| rsa_pss_rsae_sha384
| rsa_pss_rsae_sha512
| rsa_pss_pss_sha256
| rsa_pss_pss_sha384
| rsa_pss_pss_sha512.
-type sign_scheme_legacy() :: rsa_pkcs1_sha256
| rsa_pkcs1_sha384
| rsa_pkcs1_sha512
| rsa_pkcs1_sha1
| ecdsa_sha1.
-type kex_algo() :: rsa |
dhe_rsa | dhe_dss |
ecdhe_ecdsa | ecdh_ecdsa | ecdh_rsa |
srp_rsa| srp_dss |
psk | dhe_psk | rsa_psk |
dh_anon | ecdh_anon | srp_anon |
any. %% TLS 1.3 , exported
-type erl_cipher_suite() :: #{key_exchange := kex_algo(),
cipher := cipher(),
mac := hash() | aead,
prf := hash() | default_prf %% Old cipher suites, version dependent
}.
-type old_cipher_suite() :: {kex_algo(), cipher(), hash()} % Pre TLS 1.2
%% TLS 1.2, internally PRE TLS 1.2 will use default_prf
| {kex_algo(), cipher(), hash() | aead, hash()}.
-type named_curve() :: sect571r1 |
sect571k1 |
secp521r1 |
brainpoolP512r1 |
sect409k1 |
sect409r1 |
brainpoolP384r1 |
secp384r1 |
sect283k1 |
sect283r1 |
brainpoolP256r1 |
secp256k1 |
secp256r1 |
sect239k1 |
sect233k1 |
sect233r1 |
secp224k1 |
secp224r1 |
sect193r1 |
sect193r2 |
secp192k1 |
secp192r1 |
sect163k1 |
sect163r1 |
sect163r2 |
secp160k1 |
secp160r1 |
secp160r2. % exported
-type group() :: secp256r1 | secp384r1 | secp521r1 | ffdhe2048 |
ffdhe3072 | ffdhe4096 | ffdhe6144 | ffdhe8192. % exported
-type srp_param_type() :: srp_1024 |
srp_1536 |
srp_2048 |
srp_3072 |
srp_4096 |
srp_6144 |
srp_8192. % exported
-type error_alert() :: {tls_alert, {tls_alert(), Description::string()}}. % exported
-type tls_alert() :: close_notify |
unexpected_message |
bad_record_mac |
record_overflow |
handshake_failure |
bad_certificate |
unsupported_certificate |
certificate_revoked |
certificate_expired |
certificate_unknown |
illegal_parameter |
unknown_ca |
access_denied |
decode_error |
decrypt_error |
export_restriction|
protocol_version |
insufficient_security |
internal_error |
inappropriate_fallback |
user_canceled |
no_renegotiation |
unsupported_extension |
certificate_unobtainable |
unrecognized_name |
bad_certificate_status_response |
bad_certificate_hash_value |
unknown_psk_identity |
no_application_protocol. % exported
%% -------------------------------------------------------------------------------------------------------
-type common_option() :: {protocol, protocol()} |
{handshake, handshake_completion()} |
{cert, cert() | [cert()]} |
{certfile, cert_pem()} |
{key, key()} |
{keyfile, key_pem()} |
{password, key_password()} |
{ciphers, cipher_suites()} |
{eccs, [named_curve()]} |
{signature_algs, signature_algs()} |
{signature_algs_cert, sign_schemes()} |
{supported_groups, supported_groups()} |
{secure_renegotiate, secure_renegotiation()} |
{keep_secrets, keep_secrets()} |
{depth, allowed_cert_chain_length()} |
{verify_fun, custom_verify()} |
{crl_check, crl_check()} |
{crl_cache, crl_cache_opts()} |
{max_handshake_size, handshake_size()} |
{partial_chain, root_fun()} |
{versions, protocol_versions()} |
{user_lookup_fun, custom_user_lookup()} |
{log_level, logging_level()} |
{log_alert, log_alert()} |
{hibernate_after, hibernate_after()} |
{padding_check, padding_check()} |
{beast_mitigation, beast_mitigation()} |
{ssl_imp, ssl_imp()} |
{session_tickets, session_tickets()} |
{key_update_at, key_update_at()} |
{middlebox_comp_mode, middlebox_comp_mode()}.
-type protocol() :: tls | dtls.
-type handshake_completion() :: hello | full.
-type cert() :: public_key:der_encoded().
-type cert_pem() :: file:filename().
-type key() :: {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo',
public_key:der_encoded()} |
#{algorithm := rsa | dss | ecdsa,
engine := crypto:engine_ref(),
key_id := crypto:key_id(),
password => crypto:password()}. % exported
-type key_pem() :: file:filename().
-type key_password() :: string() | fun(() -> string()).
-type cipher_suites() :: ciphers().
-type ciphers() :: [erl_cipher_suite()] |
string(). % (according to old API) exported
-type cipher_filters() :: list({key_exchange | cipher | mac | prf,
algo_filter()}). % exported
-type algo_filter() :: fun((kex_algo()|cipher()|hash()|aead|default_prf) -> true | false).
-type keep_secrets() :: boolean().
-type secure_renegotiation() :: boolean().
-type allowed_cert_chain_length() :: integer().
-type custom_verify() :: {Verifyfun :: fun(), InitialUserState :: any()}.
-type crl_check() :: boolean() | peer | best_effort.
-type crl_cache_opts() :: {Module :: atom(),
{DbHandle :: internal | term(),
Args :: list()}}.
-type handshake_size() :: integer().
-type hibernate_after() :: timeout().
-type root_fun() :: fun().
-type protocol_versions() :: [protocol_version()].
-type signature_algs() :: [{hash(), sign_algo()} | sign_scheme()].
-type supported_groups() :: [group()].
-type custom_user_lookup() :: {Lookupfun :: fun(), UserState :: any()}.
-type padding_check() :: boolean().
-type beast_mitigation() :: one_n_minus_one | zero_n | disabled.
-type srp_identity() :: {Username :: string(), Password :: string()}.
-type psk_identity() :: string().
-type log_alert() :: boolean().
-type logging_level() :: logger:level() | none | all.
-type client_session_tickets() :: disabled | manual | auto.
-type server_session_tickets() :: disabled | stateful | stateless.
-type session_tickets() :: client_session_tickets() | server_session_tickets().
-type key_update_at() :: pos_integer().
-type bloom_filter_window_size() :: integer().
-type bloom_filter_hash_functions() :: integer().
-type bloom_filter_bits() :: integer().
-type anti_replay() :: '10k' | '100k' |
number of seconds in time window
bloom_filter_hash_functions(), %% k - number of hash functions
bloom_filter_bits()}. %% m - number of bits in bit vector
-type use_ticket() :: [binary()].
-type middlebox_comp_mode() :: boolean().
-type client_early_data() :: binary().
-type server_early_data() :: disabled | enabled.
%% -------------------------------------------------------------------------------------------------------
-type client_option() :: {verify, client_verify_type()} |
{reuse_session, client_reuse_session()} |
{reuse_sessions, client_reuse_sessions()} |
{cacerts, client_cacerts()} |
{cacertfile, client_cafile()} |
{alpn_advertised_protocols, client_alpn()} |
{client_preferred_next_protocols, client_preferred_next_protocols()} |
{psk_identity, client_psk_identity()} |
{srp_identity, client_srp_identity()} |
{server_name_indication, sni()} |
{max_fragment_length, max_fragment_length()} |
{customize_hostname_check, customize_hostname_check()} |
{fallback, fallback()} |
{certificate_authorities, certificate_authorities()} |
{session_tickets, client_session_tickets()} |
{use_ticket, use_ticket()} |
{early_data, client_early_data()}.
%% {ocsp_stapling, ocsp_stapling()} |
%% {ocsp_responder_certs, ocsp_responder_certs()} |
%% {ocsp_nonce, ocsp_nonce()}.
-type client_verify_type() :: verify_type().
-type client_reuse_session() :: session_id() | {session_id(), SessionData::binary()}.
-type client_reuse_sessions() :: boolean() | save.
-type certificate_authorities() :: boolean().
-type client_cacerts() :: [public_key:der_encoded()].
-type client_cafile() :: file:filename().
-type app_level_protocol() :: binary().
-type client_alpn() :: [app_level_protocol()].
-type client_preferred_next_protocols() :: {Precedence :: server | client,
ClientPrefs :: [app_level_protocol()]} |
{Precedence :: server | client,
ClientPrefs :: [app_level_protocol()],
Default::app_level_protocol()}.
-type client_psk_identity() :: psk_identity().
-type client_srp_identity() :: srp_identity().
-type customize_hostname_check() :: list().
-type sni() :: HostName :: hostname() | disable.
-type max_fragment_length() :: undefined | 512 | 1024 | 2048 | 4096.
-type fallback() :: boolean().
-type ssl_imp() :: new | old.
%% -type ocsp_stapling() :: boolean().
-type ocsp_responder_certs ( ) : : [ public_key : der_encoded ( ) ] .
%% -type ocsp_nonce() :: boolean().
%% -------------------------------------------------------------------------------------------------------
-type server_option() :: {cacerts, server_cacerts()} |
{cacertfile, server_cafile()} |
{dh, dh_der()} |
{dhfile, dh_file()} |
{verify, server_verify_type()} |
{fail_if_no_peer_cert, fail_if_no_peer_cert()} |
{reuse_sessions, server_reuse_sessions()} |
{reuse_session, server_reuse_session()} |
{alpn_preferred_protocols, server_alpn()} |
{next_protocols_advertised, server_next_protocol()} |
{psk_identity, server_psk_identity()} |
{sni_hosts, sni_hosts()} |
{sni_fun, sni_fun()} |
{honor_cipher_order, honor_cipher_order()} |
{honor_ecc_order, honor_ecc_order()} |
{client_renegotiation, client_renegotiation()}|
{session_tickets, server_session_tickets()} |
{anti_replay, anti_replay()} |
{cookie, cookie()} |
{early_data, server_early_data()}.
-type server_cacerts() :: [public_key:der_encoded()].
-type server_cafile() :: file:filename().
-type server_alpn() :: [app_level_protocol()].
-type server_next_protocol() :: [app_level_protocol()].
-type server_psk_identity() :: psk_identity().
-type dh_der() :: binary().
-type dh_file() :: file:filename().
-type server_verify_type() :: verify_type().
-type fail_if_no_peer_cert() :: boolean().
-type server_reuse_session() :: fun().
-type server_reuse_sessions() :: boolean().
-type sni_hosts() :: [{hostname(), [server_option() | common_option()]}].
-type sni_fun() :: fun().
-type honor_cipher_order() :: boolean().
-type honor_ecc_order() :: boolean().
-type client_renegotiation() :: boolean().
-type cookie() :: boolean().
%% -------------------------------------------------------------------------------------------------------
-type prf_random() :: client_random | server_random. % exported
-type protocol_extensions() :: #{renegotiation_info => binary(),
signature_algs => signature_algs(),
alpn => app_level_protocol(),
srp => binary(),
next_protocol => app_level_protocol(),
max_frag_enum => 1..4,
ec_point_formats => [0..2],
elliptic_curves => [public_key:oid()],
sni => hostname()}. % exported
%% -------------------------------------------------------------------------------------------------------
-type connection_info() :: [common_info() | curve_info() | ssl_options_info() | security_info()].
-type common_info() :: {protocol, protocol_version()} |
{session_id, session_id()} |
{session_resumption, boolean()} |
{selected_cipher_suite, erl_cipher_suite()} |
{sni_hostname, term()} |
{srp_username, term()}.
-type curve_info() :: {ecc, {named_curve, term()}}.
-type ssl_options_info() :: tls_option().
-type security_info() :: {client_random, binary()} |
{server_random, binary()} |
{master_secret, binary()}.
-type connection_info_items() :: [connection_info_item()].
-type connection_info_item() :: protocol |
session_id |
session_resumption |
selected_cipher_suite |
sni_hostname |
srp_username |
ecc |
client_random |
server_random |
master_secret |
keylog |
tls_options_name().
-type tls_options_name() :: atom().
%% -------------------------------------------------------------------------------------------------------
%%%--------------------------------------------------------------------
%%% API
%%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%%
%% Description: Utility function that starts the ssl and applications
%% that it depends on.
%% see application(3)
%%--------------------------------------------------------------------
-spec start() -> ok | {error, reason()}.
start() ->
start(temporary).
-spec start(permanent | transient | temporary) -> ok | {error, reason()}.
start(Type) ->
case application:ensure_all_started(ssl, Type) of
{ok, _} ->
ok;
Other ->
Other
end.
%%--------------------------------------------------------------------
-spec stop() -> ok.
%%
%% Description: Stops the ssl application.
%%--------------------------------------------------------------------
stop() ->
application:stop(ssl).
%%--------------------------------------------------------------------
%%
%% Description: Connect to an ssl server.
%%--------------------------------------------------------------------
-spec connect(TCPSocket, TLSOptions) ->
{ok, sslsocket()} |
{error, reason()} |
{option_not_a_key_value_tuple, any()} when
TCPSocket :: socket(),
TLSOptions :: [tls_client_option()].
connect(Socket, SslOptions) ->
connect(Socket, SslOptions, infinity).
-spec connect(TCPSocket, TLSOptions, Timeout) ->
{ok, sslsocket()} | {error, reason()} when
TCPSocket :: socket(),
TLSOptions :: [tls_client_option()],
Timeout :: timeout();
(Host, Port, TLSOptions) ->
{ok, sslsocket()} |
{ok, sslsocket(),Ext :: protocol_extensions()} |
{error, reason()} |
{option_not_a_key_value_tuple, any()} when
Host :: host(),
Port :: inet:port_number(),
TLSOptions :: [tls_client_option()].
connect(Socket, SslOptions0, Timeout) when is_list(SslOptions0) andalso
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
CbInfo = handle_option_cb_info(SslOptions0, tls),
Transport = element(1, CbInfo),
try handle_options(Transport, Socket, SslOptions0, client, undefined) of
{ok, Config} ->
tls_socket:upgrade(Socket, Config, Timeout)
catch
_:{error, Reason} ->
{error, Reason}
end;
connect(Host, Port, Options) ->
connect(Host, Port, Options, infinity).
-spec connect(Host, Port, TLSOptions, Timeout) ->
{ok, sslsocket()} |
{ok, sslsocket(),Ext :: protocol_extensions()} |
{error, reason()} |
{option_not_a_key_value_tuple, any()} when
Host :: host(),
Port :: inet:port_number(),
TLSOptions :: [tls_client_option()],
Timeout :: timeout().
connect(Host, Port, Options, Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
try
{ok, Config} = handle_options(Options, client, Host),
case Config#config.connection_cb of
tls_gen_connection ->
tls_socket:connect(Host,Port,Config,Timeout);
dtls_gen_connection ->
dtls_socket:connect(Host,Port,Config,Timeout)
end
catch
throw:Error ->
Error
end.
%%--------------------------------------------------------------------
-spec listen(Port, Options) -> {ok, ListenSocket} | {error, reason()} when
Port::inet:port_number(),
Options::[tls_server_option()],
ListenSocket :: sslsocket().
%%
%% Description: Creates an ssl listen socket.
%%--------------------------------------------------------------------
listen(_Port, []) ->
{error, nooptions};
listen(Port, Options0) ->
try
{ok, Config} = handle_options(Options0, server),
do_listen(Port, Config, Config#config.connection_cb)
catch
Error = {error, _} ->
Error
end.
%%--------------------------------------------------------------------
%%
%% Description: Performs transport accept on an ssl listen socket
%%--------------------------------------------------------------------
-spec transport_accept(ListenSocket) -> {ok, SslSocket} |
{error, reason()} when
ListenSocket :: sslsocket(),
SslSocket :: sslsocket().
transport_accept(ListenSocket) ->
transport_accept(ListenSocket, infinity).
-spec transport_accept(ListenSocket, Timeout) -> {ok, SslSocket} |
{error, reason()} when
ListenSocket :: sslsocket(),
Timeout :: timeout(),
SslSocket :: sslsocket().
transport_accept(#sslsocket{pid = {ListenSocket,
#config{connection_cb = ConnectionCb} = Config}}, Timeout)
when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
case ConnectionCb of
tls_gen_connection ->
tls_socket:accept(ListenSocket, Config, Timeout);
dtls_gen_connection ->
dtls_socket:accept(ListenSocket, Config, Timeout)
end.
%%--------------------------------------------------------------------
%%
Description : Performs accept on an ssl listen socket . performs
%% ssl handshake.
%%--------------------------------------------------------------------
Performs the SSL / TLS / DTLS server - side handshake .
-spec handshake(HsSocket) -> {ok, SslSocket} | {ok, SslSocket, Ext} | {error, Reason} when
HsSocket :: sslsocket(),
SslSocket :: sslsocket(),
Ext :: protocol_extensions(),
Reason :: closed | timeout | error_alert().
handshake(ListenSocket) ->
handshake(ListenSocket, infinity).
-spec handshake(HsSocket, Timeout) -> {ok, SslSocket} | {ok, SslSocket, Ext} | {error, Reason} when
HsSocket :: sslsocket(),
Timeout :: timeout(),
SslSocket :: sslsocket(),
Ext :: protocol_extensions(),
Reason :: closed | timeout | error_alert();
(Socket, Options) -> {ok, SslSocket} | {ok, SslSocket, Ext} | {error, Reason} when
Socket :: socket() | sslsocket(),
SslSocket :: sslsocket(),
Options :: [server_option()],
Ext :: protocol_extensions(),
Reason :: closed | timeout | error_alert().
handshake(#sslsocket{} = Socket, Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or
(Timeout == infinity) ->
ssl_gen_statem:handshake(Socket, Timeout);
%% If Socket is a ordinary socket(): upgrades a gen_tcp, or equivalent, socket to
%% an SSL socket, that is, performs the SSL/TLS server-side handshake and returns
%% the SSL socket.
%%
%% If Socket is an sslsocket(): provides extra SSL/TLS/DTLS options to those
specified in ssl : listen/2 and then performs the SSL / TLS / DTLS handshake .
handshake(ListenSocket, SslOptions) ->
handshake(ListenSocket, SslOptions, infinity).
-spec handshake(Socket, Options, Timeout) ->
{ok, SslSocket} |
{ok, SslSocket, Ext} |
{error, Reason} when
Socket :: socket() | sslsocket(),
SslSocket :: sslsocket(),
Options :: [server_option()],
Timeout :: timeout(),
Ext :: protocol_extensions(),
Reason :: closed | timeout | {options, any()} | error_alert().
handshake(#sslsocket{} = Socket, [], Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or
(Timeout == infinity)->
handshake(Socket, Timeout);
handshake(#sslsocket{fd = {_, _, _, Trackers}} = Socket, SslOpts, Timeout) when
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
try
Tracker = proplists:get_value(option_tracker, Trackers),
{ok, EmOpts, _} = tls_socket:get_all_opts(Tracker),
ssl_gen_statem:handshake(Socket, {SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout)
catch
Error = {error, _Reason} -> Error
end;
handshake(#sslsocket{pid = [Pid|_], fd = {_, _, _}} = Socket, SslOpts, Timeout) when
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
try
{ok, EmOpts, _} = dtls_packet_demux:get_all_opts(Pid),
ssl_gen_statem:handshake(Socket, {SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout)
catch
Error = {error, _Reason} -> Error
end;
handshake(Socket, SslOptions, Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
CbInfo = handle_option_cb_info(SslOptions, tls),
Transport = element(1, CbInfo),
ConnetionCb = connection_cb(SslOptions),
try handle_options(Transport, Socket, SslOptions, server, undefined) of
{ok, #config{transport_info = CbInfo, ssl = SslOpts, emulated = EmOpts}} ->
ok = tls_socket:setopts(Transport, Socket, tls_socket:internal_inet_values()),
{ok, Port} = tls_socket:port(Transport, Socket),
{ok, SessionIdHandle} = tls_socket:session_id_tracker(ssl_unknown_listener, SslOpts),
ssl_gen_statem:handshake(ConnetionCb, Port, Socket,
{SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{}),
[{session_id_tracker, SessionIdHandle}]},
self(), CbInfo, Timeout)
catch
Error = {error, _Reason} -> Error
end.
%%--------------------------------------------------------------------
-spec handshake_continue(HsSocket, Options) ->
{ok, SslSocket} | {error, Reason} when
HsSocket :: sslsocket(),
Options :: [tls_client_option() | tls_server_option()],
SslSocket :: sslsocket(),
Reason :: closed | timeout | error_alert().
%%
%%
%% Description: Continues the handshake possible with newly supplied options.
%%--------------------------------------------------------------------
handshake_continue(Socket, SSLOptions) ->
handshake_continue(Socket, SSLOptions, infinity).
%%--------------------------------------------------------------------
-spec handshake_continue(HsSocket, Options, Timeout) ->
{ok, SslSocket} | {error, Reason} when
HsSocket :: sslsocket(),
Options :: [tls_client_option() | tls_server_option()],
Timeout :: timeout(),
SslSocket :: sslsocket(),
Reason :: closed | timeout | error_alert().
%%
%%
%% Description: Continues the handshake possible with newly supplied options.
%%--------------------------------------------------------------------
handshake_continue(Socket, SSLOptions, Timeout) ->
ssl_gen_statem:handshake_continue(Socket, SSLOptions, Timeout).
%%--------------------------------------------------------------------
-spec handshake_cancel(#sslsocket{}) -> any().
%%
%% Description: Cancels the handshakes sending a close alert.
%%--------------------------------------------------------------------
handshake_cancel(Socket) ->
ssl_gen_statem:handshake_cancel(Socket).
%%--------------------------------------------------------------------
-spec close(SslSocket) -> ok | {error, Reason} when
SslSocket :: sslsocket(),
Reason :: any().
%%
%% Description: Close an ssl connection
%%--------------------------------------------------------------------
close(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
ssl_gen_statem:close(Pid, {close, ?DEFAULT_TIMEOUT});
close(#sslsocket{pid = {dtls, #config{dtls_handler = {_, _}}}} = DTLSListen) ->
dtls_socket:close(DTLSListen);
close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_,_,_,_}}}}) ->
Transport:close(ListenSocket).
%%--------------------------------------------------------------------
-spec close(SslSocket, How) -> ok | {ok, port()} | {ok, port(), Data} | {error,Reason} when
SslSocket :: sslsocket(),
How :: timeout() | {NewController::pid(), timeout()},
Data :: binary(),
Reason :: any().
%%
%% Description: Close an ssl connection
%%--------------------------------------------------------------------
close(#sslsocket{pid = [TLSPid|_]},
{Pid, Timeout} = DownGrade) when is_pid(TLSPid),
is_pid(Pid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
case ssl_gen_statem:close(TLSPid, {close, DownGrade}) of
ok -> %% In normal close {error, closed} is regarded as ok, as it is not interesting which side
%% that got to do the actual close. But in the downgrade case only {ok, Port} is a success.
{error, closed};
Other ->
Other
end;
close(#sslsocket{pid = [TLSPid|_]}, Timeout) when is_pid(TLSPid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
ssl_gen_statem:close(TLSPid, {close, Timeout});
close(#sslsocket{pid = {dtls = ListenSocket, #config{transport_info={Transport,_,_,_,_}}}}, _) ->
dtls_socket:close(Transport, ListenSocket);
close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_,_,_,_}}}}, _) ->
tls_socket:close(Transport, ListenSocket).
%%--------------------------------------------------------------------
-spec send(SslSocket, Data) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
Data :: iodata().
%%
%% Description: Sends data over the ssl connection
%%--------------------------------------------------------------------
send(#sslsocket{pid = [Pid]}, Data) when is_pid(Pid) ->
ssl_gen_statem:send(Pid, Data);
send(#sslsocket{pid = [_, Pid]}, Data) when is_pid(Pid) ->
tls_sender:send_data(Pid, erlang:iolist_to_iovec(Data));
send(#sslsocket{pid = {_, #config{transport_info={_, udp, _, _}}}}, _) ->
{error,enotconn}; %% Emulate connection behaviour
send(#sslsocket{pid = {dtls,_}}, _) ->
{error,enotconn}; %% Emulate connection behaviour
send(#sslsocket{pid = {ListenSocket, #config{transport_info = Info}}}, Data) ->
Transport = element(1, Info),
Transport:send(ListenSocket, Data). %% {error,enotconn}
%%--------------------------------------------------------------------
%%
%% Description: Receives data when active = false
%%--------------------------------------------------------------------
-spec recv(SslSocket, Length) -> {ok, Data} | {error, reason()} when
SslSocket :: sslsocket(),
Length :: integer(),
Data :: binary() | list() | HttpPacket,
HttpPacket :: any().
recv(Socket, Length) ->
recv(Socket, Length, infinity).
-spec recv(SslSocket, Length, Timeout) -> {ok, Data} | {error, reason()} when
SslSocket :: sslsocket(),
Length :: integer(),
Data :: binary() | list() | HttpPacket,
Timeout :: timeout(),
HttpPacket :: any().
recv(#sslsocket{pid = [Pid|_]}, Length, Timeout) when is_pid(Pid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
ssl_gen_statem:recv(Pid, Length, Timeout);
recv(#sslsocket{pid = {dtls,_}}, _, _) ->
{error,enotconn};
recv(#sslsocket{pid = {Listen,
#config{transport_info = Info}}},_,_) when is_port(Listen)->
Transport = element(1, Info),
Transport:recv(Listen, 0). %% {error,enotconn}
%%--------------------------------------------------------------------
-spec controlling_process(SslSocket, NewOwner) -> ok | {error, Reason} when
SslSocket :: sslsocket(),
NewOwner :: pid(),
Reason :: any().
%%
%% Description: Changes process that receives the messages when active = true
%% or once.
%%--------------------------------------------------------------------
controlling_process(#sslsocket{pid = [Pid|_]}, NewOwner) when is_pid(Pid), is_pid(NewOwner) ->
ssl_gen_statem:new_user(Pid, NewOwner);
controlling_process(#sslsocket{pid = {dtls, _}},
NewOwner) when is_pid(NewOwner) ->
ok; %% Meaningless but let it be allowed to conform with TLS
controlling_process(#sslsocket{pid = {Listen,
#config{transport_info = {Transport,_,_,_,_}}}},
NewOwner) when is_port(Listen),
is_pid(NewOwner) ->
%% Meaningless but let it be allowed to conform with normal sockets
Transport:controlling_process(Listen, NewOwner).
%%--------------------------------------------------------------------
-spec connection_information(SslSocket) -> {ok, Result} | {error, reason()} when
SslSocket :: sslsocket(),
Result :: connection_info().
%%
%% Description: Return SSL information for the connection
%%--------------------------------------------------------------------
connection_information(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
case ssl_gen_statem:connection_information(Pid, false) of
{ok, Info} ->
{ok, [Item || Item = {_Key, Value} <- Info, Value =/= undefined]};
Error ->
Error
end;
connection_information(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
{error, enotconn};
connection_information(#sslsocket{pid = {dtls,_}}) ->
{error,enotconn}.
%%--------------------------------------------------------------------
-spec connection_information(SslSocket, Items) -> {ok, Result} | {error, reason()} when
SslSocket :: sslsocket(),
Items :: connection_info_items(),
Result :: connection_info().
%%
%% Description: Return SSL information for the connection
%%--------------------------------------------------------------------
connection_information(#sslsocket{pid = [Pid|_]}, Items) when is_pid(Pid) ->
case ssl_gen_statem:connection_information(Pid, include_security_info(Items)) of
{ok, Info} ->
{ok, [Item || Item = {Key, Value} <- Info, lists:member(Key, Items),
Value =/= undefined]};
Error ->
Error
end.
%%--------------------------------------------------------------------
-spec peername(SslSocket) -> {ok, {Address, Port}} |
{error, reason()} when
SslSocket :: sslsocket(),
Address :: inet:ip_address(),
Port :: inet:port_number().
%%
%% Description: same as inet:peername/1.
%%--------------------------------------------------------------------
peername(#sslsocket{pid = [Pid|_], fd = {Transport, Socket,_}}) when is_pid(Pid)->
dtls_socket:peername(Transport, Socket);
peername(#sslsocket{pid = [Pid|_], fd = {Transport, Socket,_,_}}) when is_pid(Pid)->
tls_socket:peername(Transport, Socket);
peername(#sslsocket{pid = {dtls, #config{dtls_handler = {_Pid,_}}}}) ->
dtls_socket:peername(dtls, undefined);
peername(#sslsocket{pid = {ListenSocket, #config{transport_info = {Transport,_,_,_,_}}}}) ->
tls_socket:peername(Transport, ListenSocket); %% Will return {error, enotconn}
peername(#sslsocket{pid = {dtls,_}}) ->
{error,enotconn}.
%%--------------------------------------------------------------------
-spec peercert(SslSocket) -> {ok, Cert} | {error, reason()} when
SslSocket :: sslsocket(),
Cert :: public_key:der_encoded().
%%
%% Description: Returns the peercert.
%%--------------------------------------------------------------------
peercert(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
case ssl_gen_statem:peer_certificate(Pid) of
{ok, undefined} ->
{error, no_peercert};
Result ->
Result
end;
peercert(#sslsocket{pid = {dtls, _}}) ->
{error, enotconn};
peercert(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
{error, enotconn}.
%%--------------------------------------------------------------------
-spec negotiated_protocol(SslSocket) -> {ok, Protocol} | {error, Reason} when
SslSocket :: sslsocket(),
Protocol :: binary(),
Reason :: protocol_not_negotiated.
%%
%% Description: Returns the protocol that has been negotiated. If no
%% protocol has been negotiated will return {error, protocol_not_negotiated}
%%--------------------------------------------------------------------
negotiated_protocol(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
ssl_gen_statem:negotiated_protocol(Pid).
%%--------------------------------------------------------------------
-spec cipher_suites(Description, Version) -> ciphers() when
Description :: default | all | exclusive | anonymous | exclusive_anonymous,
Version :: protocol_version().
%% Description: Returns all default and all supported cipher suites for a
TLS / DTLS version
%%--------------------------------------------------------------------
cipher_suites(Description, Version) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
cipher_suites(Description, tls_record:protocol_version(Version));
cipher_suites(Description, Version) when Version == 'dtlsv1.2';
Version == 'dtlsv1'->
cipher_suites(Description, dtls_record:protocol_version(Version));
cipher_suites(Description, Version) ->
[ssl_cipher_format:suite_bin_to_map(Suite) || Suite <- supported_suites(Description, Version)].
%%--------------------------------------------------------------------
-spec cipher_suites(Description, Version, rfc | openssl) -> [string()] when
Description :: default | all | exclusive | anonymous,
Version :: protocol_version().
%% Description: Returns all default and all supported cipher suites for a
TLS / DTLS version
%%--------------------------------------------------------------------
cipher_suites(Description, Version, StringType) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
cipher_suites(Description, tls_record:protocol_version(Version), StringType);
cipher_suites(Description, Version, StringType) when Version == 'dtlsv1.2';
Version == 'dtlsv1'->
cipher_suites(Description, dtls_record:protocol_version(Version), StringType);
cipher_suites(Description, Version, rfc) ->
[ssl_cipher_format:suite_map_to_str(ssl_cipher_format:suite_bin_to_map(Suite))
|| Suite <- supported_suites(Description, Version)];
cipher_suites(Description, Version, openssl) ->
[ssl_cipher_format:suite_map_to_openssl_str(ssl_cipher_format:suite_bin_to_map(Suite))
|| Suite <- supported_suites(Description, Version)].
%%--------------------------------------------------------------------
-spec filter_cipher_suites(Suites, Filters) -> Ciphers when
Suites :: ciphers(),
Filters :: cipher_filters(),
Ciphers :: ciphers().
%% Description: Removes cipher suites if any of the filter functions returns false
%% for any part of the cipher suite. This function also calls default filter functions
%% to make sure the cipher suite are supported by crypto.
%%--------------------------------------------------------------------
filter_cipher_suites(Suites, Filters0) ->
#{key_exchange_filters := KexF,
cipher_filters := CipherF,
mac_filters := MacF,
prf_filters := PrfF}
= ssl_cipher:crypto_support_filters(),
Filters = #{key_exchange_filters => add_filter(proplists:get_value(key_exchange, Filters0), KexF),
cipher_filters => add_filter(proplists:get_value(cipher, Filters0), CipherF),
mac_filters => add_filter(proplists:get_value(mac, Filters0), MacF),
prf_filters => add_filter(proplists:get_value(prf, Filters0), PrfF)},
ssl_cipher:filter_suites(Suites, Filters).
%%--------------------------------------------------------------------
-spec prepend_cipher_suites(Preferred, Suites) -> ciphers() when
Preferred :: ciphers() | cipher_filters(),
Suites :: ciphers().
%% Description: Make <Preferred> suites become the most preferred
%% suites that is put them at the head of the cipher suite list
%% and remove them from <Suites> if present. <Preferred> may be a
%% list of cipher suites or a list of filters in which case the
%% filters are use on Suites to extract the the preferred
%% cipher list.
%% --------------------------------------------------------------------
prepend_cipher_suites([First | _] = Preferred, Suites0) when is_map(First) ->
Suites = Preferred ++ (Suites0 -- Preferred),
Suites;
prepend_cipher_suites(Filters, Suites) ->
Preferred = filter_cipher_suites(Suites, Filters),
Preferred ++ (Suites -- Preferred).
%%--------------------------------------------------------------------
-spec append_cipher_suites(Deferred, Suites) -> ciphers() when
Deferred :: ciphers() | cipher_filters(),
Suites :: ciphers().
%% Description: Make <Deferred> suites suites become the
%% least preferred suites that is put them at the end of the cipher suite list
%% and removed them from <Suites> if present.
%%
%%--------------------------------------------------------------------
append_cipher_suites([First | _] = Deferred, Suites0) when is_map(First)->
Suites = (Suites0 -- Deferred) ++ Deferred,
Suites;
append_cipher_suites(Filters, Suites) ->
Deferred = filter_cipher_suites(Suites, Filters),
(Suites -- Deferred) ++ Deferred.
%%--------------------------------------------------------------------
-spec eccs() -> NamedCurves when
NamedCurves :: [named_curve()].
%% Description: returns all supported curves across all versions
%%--------------------------------------------------------------------
eccs() ->
Curves = tls_v1:ecc_curves(all), % only tls_v1 has named curves right now
eccs_filter_supported(Curves).
%%--------------------------------------------------------------------
-spec eccs(Version) -> NamedCurves when
Version :: protocol_version(),
NamedCurves :: [named_curve()].
%% Description: returns the curves supported for a given version of
%% ssl/tls.
%%--------------------------------------------------------------------
eccs('dtlsv1') ->
eccs('tlsv1.1');
eccs('dtlsv1.2') ->
eccs('tlsv1.2');
eccs(Version) when Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
Curves = tls_v1:ecc_curves(all),
eccs_filter_supported(Curves).
eccs_filter_supported(Curves) ->
CryptoCurves = crypto:ec_curves(),
lists:filter(fun(Curve) -> proplists:get_bool(Curve, CryptoCurves) end,
Curves).
%%--------------------------------------------------------------------
-spec groups() -> [group()].
%% Description: returns all supported groups (TLS 1.3 and later)
%%--------------------------------------------------------------------
groups() ->
tls_v1:groups(4).
%%--------------------------------------------------------------------
-spec groups(default) -> [group()].
%% Description: returns the default groups (TLS 1.3 and later)
%%--------------------------------------------------------------------
groups(default) ->
tls_v1:default_groups(4).
%%--------------------------------------------------------------------
-spec getopts(SslSocket, OptionNames) ->
{ok, [gen_tcp:option()]} | {error, reason()} when
SslSocket :: sslsocket(),
OptionNames :: [gen_tcp:option_name()].
%%
%% Description: Gets options
%%--------------------------------------------------------------------
getopts(#sslsocket{pid = [Pid|_]}, OptionTags) when is_pid(Pid), is_list(OptionTags) ->
ssl_gen_statem:get_opts(Pid, OptionTags);
getopts(#sslsocket{pid = {dtls, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket, OptionTags) when is_list(OptionTags) ->
try dtls_socket:getopts(Transport, ListenSocket, OptionTags) of
{ok, _} = Result ->
Result;
{error, InetError} ->
{error, {options, {socket_options, OptionTags, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, OptionTags, Error}}}
end;
getopts(#sslsocket{pid = {_, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket,
OptionTags) when is_list(OptionTags) ->
try tls_socket:getopts(Transport, ListenSocket, OptionTags) of
{ok, _} = Result ->
Result;
{error, InetError} ->
{error, {options, {socket_options, OptionTags, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, OptionTags, Error}}}
end;
getopts(#sslsocket{}, OptionTags) ->
{error, {options, {socket_options, OptionTags}}}.
%%--------------------------------------------------------------------
-spec setopts(SslSocket, Options) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
Options :: [gen_tcp:option()].
%%
%% Description: Sets options
%%--------------------------------------------------------------------
setopts(#sslsocket{pid = [Pid, Sender]}, Options0) when is_pid(Pid), is_list(Options0) ->
try proplists:expand([{binary, [{mode, binary}]},
{list, [{mode, list}]}], Options0) of
Options ->
case proplists:get_value(packet, Options, undefined) of
undefined ->
ssl_gen_statem:set_opts(Pid, Options);
PacketOpt ->
case tls_sender:setopts(Sender, [{packet, PacketOpt}]) of
ok ->
ssl_gen_statem:set_opts(Pid, Options);
Error ->
Error
end
end
catch
_:_ ->
{error, {options, {not_a_proplist, Options0}}}
end;
setopts(#sslsocket{pid = [Pid|_]}, Options0) when is_pid(Pid), is_list(Options0) ->
try proplists:expand([{binary, [{mode, binary}]},
{list, [{mode, list}]}], Options0) of
Options ->
ssl_gen_statem:set_opts(Pid, Options)
catch
_:_ ->
{error, {options, {not_a_proplist, Options0}}}
end;
setopts(#sslsocket{pid = {dtls, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket, Options) when is_list(Options) ->
try dtls_socket:setopts(Transport, ListenSocket, Options) of
ok ->
ok;
{error, InetError} ->
{error, {options, {socket_options, Options, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, Options, Error}}}
end;
setopts(#sslsocket{pid = {_, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket, Options) when is_list(Options) ->
try tls_socket:setopts(Transport, ListenSocket, Options) of
ok ->
ok;
{error, InetError} ->
{error, {options, {socket_options, Options, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, Options, Error}}}
end;
setopts(#sslsocket{}, Options) ->
{error, {options,{not_a_proplist, Options}}}.
%%---------------------------------------------------------------
-spec getstat(SslSocket) ->
{ok, OptionValues} | {error, inet:posix()} when
SslSocket :: sslsocket(),
OptionValues :: [{inet:stat_option(), integer()}].
%%
%% Description: Get all statistic options for a socket.
%%--------------------------------------------------------------------
getstat(Socket) ->
getstat(Socket, inet:stats()).
%%---------------------------------------------------------------
-spec getstat(SslSocket, Options) ->
{ok, OptionValues} | {error, inet:posix()} when
SslSocket :: sslsocket(),
Options :: [inet:stat_option()],
OptionValues :: [{inet:stat_option(), integer()}].
%%
Description : Get one or more statistic options for a socket .
%%--------------------------------------------------------------------
getstat(#sslsocket{pid = {dtls, #config{transport_info = {Transport, _, _, _, _},
dtls_handler = {Listener, _}}}},
Options) when is_list(Options) ->
dtls_socket:getstat(Transport, Listener, Options);
getstat(#sslsocket{pid = {Listen, #config{transport_info = {Transport, _, _, _, _}}}},
Options) when is_port(Listen), is_list(Options) ->
tls_socket:getstat(Transport, Listen, Options);
getstat(#sslsocket{pid = [Pid|_], fd = {Transport, Socket, _, _}},
Options) when is_pid(Pid), is_list(Options) ->
tls_socket:getstat(Transport, Socket, Options);
getstat(#sslsocket{pid = [Pid|_], fd = {Transport, Socket, _}},
Options) when is_pid(Pid), is_list(Options) ->
dtls_socket:getstat(Transport, Socket, Options).
%%---------------------------------------------------------------
-spec shutdown(SslSocket, How) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
How :: read | write | read_write.
%%
%% Description: Same as gen_tcp:shutdown/2
%%--------------------------------------------------------------------
shutdown(#sslsocket{pid = {Listen, #config{transport_info = Info}}},
How) when is_port(Listen) ->
Transport = element(1, Info),
Transport:shutdown(Listen, How);
shutdown(#sslsocket{pid = {dtls,_}},_) ->
{error, enotconn};
shutdown(#sslsocket{pid = [Pid|_]}, How) when is_pid(Pid) ->
ssl_gen_statem:shutdown(Pid, How).
%%--------------------------------------------------------------------
-spec sockname(SslSocket) ->
{ok, {Address, Port}} | {error, reason()} when
SslSocket :: sslsocket(),
Address :: inet:ip_address(),
Port :: inet:port_number().
%%
%% Description: Same as inet:sockname/1
%%--------------------------------------------------------------------
sockname(#sslsocket{pid = {Listen, #config{transport_info = {Transport,_,_,_,_}}}}) when is_port(Listen) ->
tls_socket:sockname(Transport, Listen);
sockname(#sslsocket{pid = {dtls, #config{dtls_handler = {Pid, _}}}}) ->
dtls_packet_demux:sockname(Pid);
sockname(#sslsocket{pid = [Pid|_], fd = {Transport, Socket,_}}) when is_pid(Pid) ->
dtls_socket:sockname(Transport, Socket);
sockname(#sslsocket{pid = [Pid| _], fd = {Transport, Socket,_,_}}) when is_pid(Pid) ->
tls_socket:sockname(Transport, Socket).
%%---------------------------------------------------------------
-spec versions() -> [VersionInfo] when
VersionInfo :: {ssl_app, string()} |
{supported | available | implemented, [tls_version()]} |
{supported_dtls | available_dtls | implemented_dtls, [dtls_version()]}.
%%
%% Description: Returns a list of relevant versions.
%%--------------------------------------------------------------------
versions() ->
ConfTLSVsns = tls_record:supported_protocol_versions(),
ConfDTLSVsns = dtls_record:supported_protocol_versions(),
ImplementedTLSVsns = ?ALL_AVAILABLE_VERSIONS,
ImplementedDTLSVsns = ?ALL_AVAILABLE_DATAGRAM_VERSIONS,
TLSCryptoSupported = fun(Vsn) ->
tls_record:sufficient_crypto_support(Vsn)
end,
DTLSCryptoSupported = fun(Vsn) ->
tls_record:sufficient_crypto_support(dtls_v1:corresponding_tls_version(Vsn))
end,
SupportedTLSVsns = [tls_record:protocol_version(Vsn) || Vsn <- ConfTLSVsns, TLSCryptoSupported(Vsn)],
SupportedDTLSVsns = [dtls_record:protocol_version(Vsn) || Vsn <- ConfDTLSVsns, DTLSCryptoSupported(Vsn)],
AvailableTLSVsns = [Vsn || Vsn <- ImplementedTLSVsns, TLSCryptoSupported(tls_record:protocol_version(Vsn))],
AvailableDTLSVsns = [Vsn || Vsn <- ImplementedDTLSVsns, DTLSCryptoSupported(dtls_record:protocol_version(Vsn))],
[{ssl_app, ?VSN},
{supported, SupportedTLSVsns},
{supported_dtls, SupportedDTLSVsns},
{available, AvailableTLSVsns},
{available_dtls, AvailableDTLSVsns},
{implemented, ImplementedTLSVsns},
{implemented_dtls, ImplementedDTLSVsns}
].
%%---------------------------------------------------------------
-spec renegotiate(SslSocket) -> ok | {error, reason()} when
SslSocket :: sslsocket().
%%
%% Description: Initiates a renegotiation.
%%--------------------------------------------------------------------
renegotiate(#sslsocket{pid = [Pid, Sender |_]}) when is_pid(Pid),
is_pid(Sender) ->
case tls_sender:renegotiate(Sender) of
{ok, Write} ->
tls_dtls_connection:renegotiation(Pid, Write);
Error ->
Error
end;
renegotiate(#sslsocket{pid = [Pid |_]}) when is_pid(Pid) ->
tls_dtls_connection:renegotiation(Pid);
renegotiate(#sslsocket{pid = {dtls,_}}) ->
{error, enotconn};
renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
{error, enotconn}.
%%---------------------------------------------------------------
-spec update_keys(SslSocket, Type) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
Type :: write | read_write.
%%
%% Description: Initiate a key update.
%%--------------------------------------------------------------------
update_keys(#sslsocket{pid = [Pid, Sender |_]}, Type0) when is_pid(Pid) andalso
is_pid(Sender) andalso
(Type0 =:= write orelse
Type0 =:= read_write) ->
Type = case Type0 of
write ->
update_not_requested;
read_write ->
update_requested
end,
tls_connection_1_3:send_key_update(Sender, Type);
update_keys(_, Type) ->
{error, {illegal_parameter, Type}}.
%%--------------------------------------------------------------------
-spec prf(SslSocket, Secret, Label, Seed, WantedLength) ->
{ok, binary()} | {error, reason()} when
SslSocket :: sslsocket(),
Secret :: binary() | 'master_secret',
Label::binary(),
Seed :: [binary() | prf_random()],
WantedLength :: non_neg_integer().
%%
Description : use a ssl sessions TLS PRF to generate key material
%%--------------------------------------------------------------------
prf(#sslsocket{pid = [Pid|_]},
Secret, Label, Seed, WantedLength) when is_pid(Pid) ->
tls_dtls_connection:prf(Pid, Secret, Label, Seed, WantedLength);
prf(#sslsocket{pid = {dtls,_}}, _,_,_,_) ->
{error, enotconn};
prf(#sslsocket{pid = {Listen,_}}, _,_,_,_) when is_port(Listen) ->
{error, enotconn}.
%%--------------------------------------------------------------------
-spec clear_pem_cache() -> ok.
%%
%% Description: Clear the PEM cache
%%--------------------------------------------------------------------
clear_pem_cache() ->
ssl_pem_cache:clear().
%%---------------------------------------------------------------
-spec format_error({error, Reason}) -> string() when
Reason :: any().
%%
%% Description: Creates error string.
%%--------------------------------------------------------------------
format_error({error, Reason}) ->
format_error(Reason);
format_error(Reason) when is_list(Reason) ->
Reason;
format_error(closed) ->
"TLS connection is closed";
format_error({tls_alert, {_, Description}}) ->
Description;
format_error({options,{FileType, File, Reason}}) when FileType == cacertfile;
FileType == certfile;
FileType == keyfile;
FileType == dhfile ->
Error = file_error_format(Reason),
file_desc(FileType) ++ File ++ ": " ++ Error;
format_error({options, {socket_options, Option, Error}}) ->
lists:flatten(io_lib:format("Invalid transport socket option ~p: ~s", [Option, format_error(Error)]));
format_error({options, {socket_options, Option}}) ->
lists:flatten(io_lib:format("Invalid socket option: ~p", [Option]));
format_error({options, Options}) ->
lists:flatten(io_lib:format("Invalid TLS option: ~p", [Options]));
format_error(Error) ->
case inet:format_error(Error) of
"unknown POSIX" ++ _ ->
unexpected_format(Error);
Other ->
Other
end.
tls_version({3, _} = Version) ->
Version;
tls_version({254, _} = Version) ->
dtls_v1:corresponding_tls_version(Version).
%%--------------------------------------------------------------------
-spec suite_to_str(CipherSuite) -> string() when
CipherSuite :: erl_cipher_suite();
(CipherSuite) -> string() when
%% For internal use!
CipherSuite :: #{key_exchange := null,
cipher := null,
mac := null,
prf := null}.
%%
%% Description: Return the string representation of a cipher suite.
%%--------------------------------------------------------------------
suite_to_str(Cipher) ->
ssl_cipher_format:suite_map_to_str(Cipher).
%%--------------------------------------------------------------------
-spec suite_to_openssl_str(CipherSuite) -> string() when
CipherSuite :: erl_cipher_suite().
%%
%% Description: Return the string representation of a cipher suite.
%%--------------------------------------------------------------------
suite_to_openssl_str(Cipher) ->
ssl_cipher_format:suite_map_to_openssl_str(Cipher).
%%
%%--------------------------------------------------------------------
-spec str_to_suite(CipherSuiteName) -> erl_cipher_suite() | {error, {not_recognized, CipherSuiteName}} when
CipherSuiteName :: string().
%%
%% Description: Return the map representation of a cipher suite.
%%--------------------------------------------------------------------
str_to_suite(CipherSuiteName) ->
try
Note in TLS-1.3 OpenSSL conforms to RFC names
so if CipherSuiteName starts with TLS this
function will call : suite_str_to_map
so both RFC names and legacy OpenSSL names of supported
%% cipher suites will be handled
ssl_cipher_format:suite_openssl_str_to_map(CipherSuiteName)
catch
_:_ ->
{error, {not_recognized, CipherSuiteName}}
end.
%%%--------------------------------------------------------------
Internal functions
%%%--------------------------------------------------------------------
supported_suites(exclusive, {3,Minor}) ->
tls_v1:exclusive_suites(Minor);
supported_suites(exclusive, {254, Minor}) ->
dtls_v1:exclusive_suites(Minor);
supported_suites(default, Version) ->
ssl_cipher:suites(Version);
supported_suites(all, Version) ->
ssl_cipher:all_suites(Version);
supported_suites(anonymous, Version) ->
ssl_cipher:anonymous_suites(Version);
supported_suites(exclusive_anonymous, {3, Minor}) ->
tls_v1:exclusive_anonymous_suites(Minor);
supported_suites(exclusive_anonymous, {254, Minor}) ->
dtls_v1:exclusive_anonymous_suites(Minor).
do_listen(Port, #config{transport_info = {Transport, _, _, _,_}} = Config, tls_gen_connection) ->
tls_socket:listen(Transport, Port, Config);
do_listen(Port, Config, dtls_gen_connection) ->
dtls_socket:listen(Port, Config).
-spec handle_options([any()], client | server) -> {ok, #config{}};
([any()], ssl_options()) -> ssl_options().
handle_options(Opts, Role) ->
handle_options(undefined, undefined, Opts, Role, undefined).
handle_options(Opts, Role, InheritedSslOpts) ->
handle_options(undefined, undefined, Opts, Role, InheritedSslOpts).
%% Handle ssl options at handshake, handshake_continue
handle_options(_, _, Opts0, Role, InheritedSslOpts) when is_map(InheritedSslOpts) ->
{SslOpts, _} = expand_options(Opts0, ?RULES),
process_options(SslOpts, InheritedSslOpts, #{role => Role,
rules => ?RULES});
%% Handle all options in listen, connect and handshake
handle_options(Transport, Socket, Opts0, Role, Host) ->
{SslOpts0, SockOpts0} = expand_options(Opts0, ?RULES),
%% Ensure all options are evaluated at startup
SslOpts1 = add_missing_options(SslOpts0, ?RULES),
SslOpts2 = #{protocol := Protocol}
= process_options(SslOpts1,
#{},
#{role => Role,
host => Host,
rules => ?RULES}),
maybe_client_warn_no_verify(SslOpts2, Role),
SslOpts = maps:without([warn_verify_none], SslOpts2),
%% Handle special options
{Sock, Emulated} = emulated_options(Transport, Socket, Protocol, SockOpts0),
ConnetionCb = connection_cb(Protocol),
CbInfo = handle_option_cb_info(Opts0, Protocol),
{ok, #config{
ssl = SslOpts,
emulated = Emulated,
inet_ssl = Sock,
inet_user = Sock,
transport_info = CbInfo,
connection_cb = ConnetionCb
}}.
process_options(SSLOptions , , Env ) where
SSLOptions is the following tuple :
{ InOptions , SkippedOptions , Counter }
%%
%% The list of options is processed in multiple passes. When
%% processing an option all dependencies must already be resolved.
%% If there are unresolved dependencies the option will be
%% skipped and processed in a subsequent pass.
%% Counter is equal to the number of unprocessed options at
%% the beginning of a pass. Its value must monotonically decrease
%% after each successful pass.
%% If the value of the counter is unchanged at the end of a pass,
%% the processing stops due to faulty input data.
process_options({[], [], _}, OptionsMap, _Env) ->
OptionsMap;
process_options({[], [_|_] = Skipped, Counter}, OptionsMap, Env)
when length(Skipped) < Counter ->
%% Continue handling options if current pass was successful
process_options({Skipped, [], length(Skipped)}, OptionsMap, Env);
process_options({[], [_|_], _Counter}, _OptionsMap, _Env) ->
throw({error, faulty_configuration});
process_options({[{K0,V} = E|T], S, Counter}, OptionsMap0, Env) ->
K = maybe_map_key_internal(K0),
case check_dependencies(K, OptionsMap0, Env) of
true ->
OptionsMap = handle_option(K, V, OptionsMap0, Env),
process_options({T, S, Counter}, OptionsMap, Env);
false ->
%% Skip option for next pass
process_options({T, [E|S], Counter}, OptionsMap0, Env)
end.
handle_option(anti_replay = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(anti_replay = Option, Value0,
#{session_tickets := SessionTickets,
versions := Versions} = OptionsMap, #{rules := Rules}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_option_dependency(Option, session_tickets, [SessionTickets], [stateless]),
case SessionTickets of
stateless ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
_ ->
OptionsMap#{Option => default_value(Option, Rules)}
end;
handle_option(beast_mitigation = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(beast_mitigation = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(cacertfile = Option, unbound, #{cacerts := CaCerts,
verify := Verify,
verify_fun := VerifyFun} = OptionsMap, _Env)
when Verify =:= verify_none orelse
Verify =:= 0 ->
Value = validate_option(Option, ca_cert_default(verify_none, VerifyFun, CaCerts)),
OptionsMap#{Option => Value};
handle_option(cacertfile = Option, unbound, #{cacerts := CaCerts,
verify := Verify,
verify_fun := VerifyFun} = OptionsMap, _Env)
when Verify =:= verify_peer orelse
Verify =:= 1 orelse
Verify =:= 2 ->
Value = validate_option(Option, ca_cert_default(verify_peer, VerifyFun, CaCerts)),
OptionsMap#{Option => Value};
handle_option(cacertfile = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(ciphers = Option, unbound, #{versions := Versions} = OptionsMap, #{rules := Rules}) ->
Value = handle_cipher_option(default_value(Option, Rules), Versions),
OptionsMap#{Option => Value};
handle_option(ciphers = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
Value = handle_cipher_option(Value0, Versions),
OptionsMap#{Option => Value};
handle_option(client_renegotiation = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, true, Role),
OptionsMap#{Option => Value};
handle_option(client_renegotiation = Option, Value0,
#{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_role(server_only, Role, Option, Value0),
assert_option_dependency(Option, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(early_data = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(early_data = Option, Value0, #{session_tickets := SessionTickets,
versions := Versions} = OptionsMap,
#{role := server = Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_option_dependency(Option, session_tickets, [SessionTickets],
[stateful, stateless]),
Value = validate_option(Option, Value0, Role),
OptionsMap#{Option => Value};
handle_option(early_data = Option, Value0, #{session_tickets := SessionTickets,
use_ticket := UseTicket,
versions := Versions} = OptionsMap,
#{role := client = Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_option_dependency(Option, session_tickets, [SessionTickets],
[manual, auto]),
case UseTicket of
undefined when SessionTickets =/= auto ->
throw({error, {options, dependency, {Option, use_ticket}}});
_ ->
ok
end,
Value = validate_option(Option, Value0, Role),
OptionsMap#{Option => Value};
handle_option(eccs = Option, unbound, #{versions := [HighestVersion|_]} = OptionsMap, #{rules := _Rules}) ->
Value = handle_eccs_option(eccs(), HighestVersion),
OptionsMap#{Option => Value};
handle_option(eccs = Option, Value0, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
Value = handle_eccs_option(Value0, HighestVersion),
OptionsMap#{Option => Value};
handle_option(fallback = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(client, false, Role),
OptionsMap#{Option => Value};
handle_option(fallback = Option, Value0, OptionsMap, #{role := Role}) ->
assert_role(client_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(certificate_authorities = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(client, false, Role),
OptionsMap#{Option => Value};
handle_option(certificate_authorities = Option, Value0, #{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_role(client_only, Role, Option, Value0),
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(cookie = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, true, Role),
OptionsMap#{Option => Value};
handle_option(cookie = Option, Value0, #{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_role(server_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(honor_cipher_order = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, false, Role),
OptionsMap#{Option => Value};
handle_option(honor_cipher_order = Option, Value0, OptionsMap, #{role := Role}) ->
assert_role(server_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(honor_ecc_order = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, false, Role),
OptionsMap#{Option => Value};
handle_option(honor_ecc_order = Option, Value0, OptionsMap, #{role := Role}) ->
assert_role(server_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(keyfile = Option, unbound, #{certfile := CertFile} = OptionsMap, _Env) ->
Value = validate_option(Option, CertFile),
OptionsMap#{Option => Value};
handle_option(key_update_at = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(key_update_at = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(next_protocols_advertised = Option, unbound, OptionsMap,
#{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(next_protocols_advertised = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(next_protocols_advertised, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(next_protocol_selector = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = default_value(Option, Rules),
OptionsMap#{Option => Value};
handle_option(next_protocol_selector = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(client_preferred_next_protocols, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = make_next_protocol_selector(
validate_option(client_preferred_next_protocols, Value0)),
OptionsMap#{Option => Value};
handle_option(padding_check = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(padding_check = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(password = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{password => Value};
handle_option(password = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{password => Value};
handle_option(psk_identity = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(psk_identity = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(secure_renegotiate = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(secure_renegotiate= Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(secure_renegotiate, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(reuse_session = Option, unbound, OptionsMap, #{role := Role}) ->
Value =
case Role of
client ->
undefined;
server ->
fun(_, _, _, _) -> true end
end,
OptionsMap#{Option => Value};
handle_option(reuse_session = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(reuse_session, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
%% TODO: validate based on role
handle_option(reuse_sessions = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(reuse_sessions = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(reuse_sessions, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(server_name_indication = Option, unbound, OptionsMap, #{host := Host,
role := Role}) ->
Value = default_option_role(client, server_name_indication_default(Host), Role),
OptionsMap#{Option => Value};
handle_option(server_name_indication = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(session_tickets = Option, unbound, OptionsMap, #{role := Role,
rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules), Role),
OptionsMap#{Option => Value};
handle_option(session_tickets = Option, Value0, #{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0, Role),
OptionsMap#{Option => Value};
handle_option(signature_algs = Option, unbound, #{versions := [HighestVersion | _] = Versions} = OptionsMap, #{role := Role}) ->
Value =
handle_hashsigns_option(
default_option_role_sign_algs(
server,
tls_v1:default_signature_algs(Versions),
Role,
HighestVersion),
tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(signature_algs = Option, Value0, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
Value = handle_hashsigns_option(Value0, tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(signature_algs_cert = Option, unbound, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
%% Do not send by default
Value = handle_signature_algorithms_option(undefined, tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(signature_algs_cert = Option, Value0, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
Value = handle_signature_algorithms_option(Value0, tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(sni_fun = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = default_value(Option, Rules),
OptionsMap#{Option => Value};
handle_option(sni_fun = Option, Value0, OptionsMap, _Env) ->
validate_option(Option, Value0),
OptHosts = maps:get(sni_hosts, OptionsMap, undefined),
Value =
case {Value0, OptHosts} of
{undefined, _} ->
Value0;
{_, []} ->
Value0;
_ ->
throw({error, {conflict_options, [sni_fun, sni_hosts]}})
end,
OptionsMap#{Option => Value};
handle_option(srp_identity = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(srp_identity = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(srp_identity, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(supported_groups = Option, unbound, #{versions := [HighestVersion|_]} = OptionsMap, #{rules := _Rules}) ->
Value = handle_supported_groups_option(groups(default), HighestVersion),
OptionsMap#{Option => Value};
handle_option(supported_groups = Option, Value0,
#{versions := [HighestVersion|_] = Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = handle_supported_groups_option(Value0, HighestVersion),
OptionsMap#{Option => Value};
handle_option(use_ticket = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(use_ticket = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(user_lookup_fun = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(user_lookup_fun = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(verify = Option, unbound, OptionsMap, #{rules := Rules}) ->
handle_verify_option(default_value(Option, Rules), OptionsMap#{warn_verify_none => true});
handle_option(verify = _Option, Value, OptionsMap, _Env) ->
handle_verify_option(Value, OptionsMap);
handle_option(verify_fun = Option, unbound, #{verify := Verify} = OptionsMap, #{rules := Rules})
when Verify =:= verify_none ->
OptionsMap#{Option => default_value(Option, Rules)};
handle_option(verify_fun = Option, unbound, #{verify := Verify} = OptionsMap, _Env)
when Verify =:= verify_peer ->
OptionsMap#{Option => undefined};
handle_option(verify_fun = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(versions = Option, unbound, #{protocol := Protocol} = OptionsMap, _Env) ->
RecordCb = record_cb(Protocol),
Vsns0 = RecordCb:supported_protocol_versions(),
Value = lists:sort(fun RecordCb:is_higher/2, Vsns0),
OptionsMap#{Option => Value};
handle_option(versions = Option, Vsns0, #{protocol := Protocol} = OptionsMap, _Env) ->
validate_option(versions, Vsns0),
RecordCb = record_cb(Protocol),
Vsns1 = [RecordCb:protocol_version(Vsn) || Vsn <- Vsns0],
Value = lists:sort(fun RecordCb:is_higher/2, Vsns1),
OptionsMap#{Option => Value};
%% Special options
handle_option(cb_info = Option, unbound, #{protocol := Protocol} = OptionsMap, _Env) ->
Default = default_cb_info(Protocol),
validate_option(Option, Default),
Value = handle_cb_info(Default),
OptionsMap#{Option => Value};
handle_option(cb_info = Option, Value0, OptionsMap, _Env) ->
validate_option(Option, Value0),
Value = handle_cb_info(Value0),
OptionsMap#{Option => Value};
Generic case
handle_option(Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value}.
handle_option_cb_info(Options, Protocol) ->
Value = proplists:get_value(cb_info, Options, default_cb_info(Protocol)),
#{cb_info := CbInfo} = handle_option(cb_info, Value, #{protocol => Protocol}, #{}),
CbInfo.
maybe_map_key_internal(client_preferred_next_protocols) ->
next_protocol_selector;
maybe_map_key_internal(K) ->
K.
maybe_map_key_external(next_protocol_selector) ->
client_preferred_next_protocols;
maybe_map_key_external(K) ->
K.
check_dependencies(K, OptionsMap, Env) ->
Rules = maps:get(rules, Env),
Deps = get_dependencies(K, Rules),
case Deps of
[] ->
true;
L ->
option_already_defined(K,OptionsMap) orelse
dependecies_already_defined(L, OptionsMap)
end.
%% Handle options that are not present in the map
get_dependencies(K, _) when K =:= cb_info orelse K =:= log_alert->
[];
get_dependencies(K, Rules) ->
{_, Deps} = maps:get(K, Rules),
Deps.
option_already_defined(K, Map) ->
maps:get(K, Map, unbound) =/= unbound.
dependecies_already_defined(L, OptionsMap) ->
Fun = fun (E) -> option_already_defined(E, OptionsMap) end,
lists:all(Fun, L).
expand_options(Opts0, Rules) ->
Opts1 = proplists:expand([{binary, [{mode, binary}]},
{list, [{mode, list}]}], Opts0),
Opts2 = handle_option_format(Opts1, []),
%% Remove deprecated ssl_imp option
Opts = proplists:delete(ssl_imp, Opts2),
AllOpts = maps:keys(Rules),
SockOpts = lists:foldl(fun(Key, PropList) -> proplists:delete(Key, PropList) end,
Opts,
AllOpts ++
[ssl_imp, %% TODO: remove ssl_imp
cb_info,
client_preferred_next_protocols, %% next_protocol_selector
obsoleted by log_level
SslOpts0 = Opts -- SockOpts,
SslOpts = {SslOpts0, [], length(SslOpts0)},
{SslOpts, SockOpts}.
add_missing_options({L0, S, _C}, Rules) ->
Fun = fun(K0, Acc) ->
K = maybe_map_key_external(K0),
case proplists:is_defined(K, Acc) of
true ->
Acc;
false ->
Default = unbound,
[{K, Default}|Acc]
end
end,
AllOpts = maps:keys(Rules),
L = lists:foldl(Fun, L0, AllOpts),
{L, S, length(L)}.
default_value(Key, Rules) ->
{Default, _} = maps:get(Key, Rules, {undefined, []}),
Default.
assert_role(client_only, client, _, _) ->
ok;
assert_role(server_only, server, _, _) ->
ok;
assert_role(client_only, _, _, undefined) ->
ok;
assert_role(server_only, _, _, undefined) ->
ok;
assert_role(Type, _, Key, _) ->
throw({error, {option, Type, Key}}).
assert_option_dependency(Option, OptionDep, Values0, AllowedValues) ->
case is_dtls_configured(Values0) of
true ->
TODO : Check option dependency for DTLS
ok;
false ->
%% special handling for version
Values =
case OptionDep of
versions ->
lists:map(fun tls_record:protocol_version/1, Values0);
_ ->
Values0
end,
Set1 = sets:from_list(Values),
Set2 = sets:from_list(AllowedValues),
case sets:size(sets:intersection(Set1, Set2)) > 0 of
true ->
ok;
false ->
throw({error, {options, dependency,
{Option, {OptionDep, AllowedValues}}}})
end
end.
is_dtls_configured(Versions) ->
Fun = fun (Version) when Version =:= {254, 253} orelse
Version =:= {254, 255} ->
true;
(_) ->
false
end,
lists:any(Fun, Versions).
validate_option(Option, Value) ->
validate_option(Option, Value, undefined).
%%
validate_option(Opt, Value, _)
when Opt =:= alpn_advertised_protocols orelse
Opt =:= alpn_preferred_protocols,
is_list(Value) ->
validate_binary_list(Opt, Value),
Value;
validate_option(Opt, Value, _)
when Opt =:= alpn_advertised_protocols orelse
Opt =:= alpn_preferred_protocols,
Value =:= undefined ->
undefined;
validate_option(anti_replay, '10k', _) ->
n = 10000
p = 0.030003564 ( 1 in 33 )
m = 72985 ( 8.91KiB )
k = 5
{10, 5, 72985};
validate_option(anti_replay, '100k', _) ->
n = 100000
p = 0.03000428 ( 1 in 33 )
m = 729845 ( 89.09KiB )
k = 5
{10, 5, 729845};
validate_option(anti_replay, Value, _)
when (is_tuple(Value) andalso
tuple_size(Value) =:= 3) ->
Value;
validate_option(beast_mitigation, Value, _)
when Value == one_n_minus_one orelse
Value == zero_n orelse
Value == disabled ->
Value;
%% certfile must be present in some cases otherwise it can be set
%% to the empty string.
validate_option(cacertfile, undefined, _) ->
<<>>;
validate_option(cacertfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(cacertfile, Value, _)
when is_list(Value), Value =/= ""->
binary_filename(Value);
validate_option(cacerts, Value, _)
when Value == undefined;
is_list(Value) ->
Value;
validate_option(cb_info, {V1, V2, V3, V4} = Value, _)
when is_atom(V1),
is_atom(V2),
is_atom(V3),
is_atom(V4) ->
Value;
validate_option(cb_info, {V1, V2, V3, V4, V5} = Value, _)
when is_atom(V1),
is_atom(V2),
is_atom(V3),
is_atom(V4),
is_atom(V5) ->
Value;
validate_option(cert, Value, _) when Value == undefined;
is_list(Value)->
Value;
validate_option(cert, Value, _) when Value == undefined;
is_binary(Value)->
[Value];
validate_option(certificate_authorities, Value, _) when is_boolean(Value)->
Value;
validate_option(certfile, undefined = Value, _) ->
Value;
validate_option(certfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(certfile, Value, _)
when is_list(Value) ->
binary_filename(Value);
validate_option(client_preferred_next_protocols, {Precedence, PreferredProtocols}, _)
when is_list(PreferredProtocols) ->
validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
validate_npn_ordering(Precedence),
{Precedence, PreferredProtocols, ?NO_PROTOCOL};
validate_option(client_preferred_next_protocols,
{Precedence, PreferredProtocols, Default} = Value, _)
when is_list(PreferredProtocols), is_binary(Default),
byte_size(Default) > 0, byte_size(Default) < 256 ->
validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
validate_npn_ordering(Precedence),
Value;
validate_option(client_preferred_next_protocols, undefined, _) ->
undefined;
validate_option(client_renegotiation, Value, _)
when is_boolean(Value) ->
Value;
validate_option(cookie, Value, _)
when is_boolean(Value) ->
Value;
validate_option(crl_cache, {Cb, {_Handle, Options}} = Value, _)
when is_atom(Cb) and is_list(Options) ->
Value;
validate_option(crl_check, Value, _)
when is_boolean(Value) ->
Value;
validate_option(crl_check, Value, _)
when (Value == best_effort) or
(Value == peer) ->
Value;
validate_option(customize_hostname_check, Value, _)
when is_list(Value) ->
Value;
validate_option(depth, Value, _)
when is_integer(Value),
Value >= 0, Value =< 255->
Value;
validate_option(dh, Value, _)
when Value == undefined;
is_binary(Value) ->
Value;
validate_option(dhfile, undefined = Value, _) ->
Value;
validate_option(dhfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(dhfile, Value, _)
when is_list(Value), Value =/= "" ->
binary_filename(Value);
validate_option(early_data, Value, server)
when Value =:= disabled orelse
Value =:= enabled ->
Value;
validate_option(early_data = Option, Value, server) ->
throw({error,
{options, role, {Option, {Value, {server, [disabled, enabled]}}}}});
validate_option(early_data, Value, client)
when is_binary(Value) ->
Value;
validate_option(early_data = Option, Value, client) ->
throw({error,
{options, type, {Option, {Value, not_binary}}}});
validate_option(erl_dist, Value, _)
when is_boolean(Value) ->
Value;
validate_option(fail_if_no_peer_cert, Value, _)
when is_boolean(Value) ->
Value;
validate_option(fallback, Value, _)
when is_boolean(Value) ->
Value;
validate_option(handshake, hello = Value, _) ->
Value;
validate_option(handshake, full = Value, _) ->
Value;
validate_option(hibernate_after, undefined, _) -> %% Backwards compatibility
infinity;
validate_option(hibernate_after, infinity, _) ->
infinity;
validate_option(hibernate_after, Value, _)
when is_integer(Value), Value >= 0 ->
Value;
validate_option(honor_cipher_order, Value, _)
when is_boolean(Value) ->
Value;
validate_option(honor_ecc_order, Value, _)
when is_boolean(Value) ->
Value;
validate_option(keep_secrets, Value, _) when is_boolean(Value) ->
Value;
validate_option(key, undefined, _) ->
undefined;
validate_option(key, {KeyType, Value}, _)
when is_binary(Value),
KeyType == rsa; %% Backwards compatibility
KeyType == dsa; %% Backwards compatibility
KeyType == 'RSAPrivateKey';
KeyType == 'DSAPrivateKey';
KeyType == 'ECPrivateKey';
KeyType == 'PrivateKeyInfo' ->
{KeyType, Value};
validate_option(key, #{algorithm := _} = Value, _) ->
Value;
validate_option(keyfile, undefined, _) ->
<<>>;
validate_option(keyfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(keyfile, Value, _)
when is_list(Value), Value =/= "" ->
binary_filename(Value);
validate_option(key_update_at, Value, _)
when is_integer(Value) andalso
Value > 0 ->
Value;
validate_option(log_level, Value, _) when
is_atom(Value) andalso
(Value =:= none orelse
Value =:= all orelse
Value =:= emergency orelse
Value =:= alert orelse
Value =:= critical orelse
Value =:= error orelse
Value =:= warning orelse
Value =:= notice orelse
Value =:= info orelse
Value =:= debug) ->
Value;
%% RFC 6066, Section 4
validate_option(max_fragment_length, I, _)
when I == ?MAX_FRAGMENT_LENGTH_BYTES_1;
I == ?MAX_FRAGMENT_LENGTH_BYTES_2;
I == ?MAX_FRAGMENT_LENGTH_BYTES_3;
I == ?MAX_FRAGMENT_LENGTH_BYTES_4 ->
I;
validate_option(max_fragment_length, undefined, _) ->
undefined;
validate_option(max_handshake_size, Value, _)
when is_integer(Value) andalso
Value =< ?MAX_UNIT24 ->
Value;
validate_option(middlebox_comp_mode, Value, _)
when is_boolean(Value) ->
Value;
validate_option(next_protocols_advertised, Value, _) when is_list(Value) ->
validate_binary_list(next_protocols_advertised, Value),
Value;
validate_option(next_protocols_advertised, undefined, _) ->
undefined;
validate_option(ocsp_nonce, Value, _)
when Value =:= true orelse
Value =:= false ->
Value;
%% The OCSP responders' certificates can be given as a suggestion and
%% will be used to verify the OCSP response.
validate_option(ocsp_responder_certs, Value, _)
when is_list(Value) ->
[public_key:pkix_decode_cert(CertDer, plain) || CertDer <- Value,
is_binary(CertDer)];
validate_option(ocsp_stapling, Value, _)
when Value =:= true orelse
Value =:= false ->
Value;
validate_option(padding_check, Value, _)
when is_boolean(Value) ->
Value;
validate_option(partial_chain, Value, _)
when is_function(Value) ->
Value;
validate_option(password, Value, _)
when is_list(Value) ->
Value;
validate_option(password, Value, _)
when is_function(Value, 0) ->
Value;
validate_option(protocol, Value = tls, _) ->
Value;
validate_option(protocol, Value = dtls, _) ->
Value;
validate_option(psk_identity, undefined, _) ->
undefined;
validate_option(psk_identity, Identity, _)
when is_list(Identity), Identity =/= "", length(Identity) =< 65535 ->
binary_filename(Identity);
validate_option(renegotiate_at, Value, _) when is_integer(Value) ->
erlang:min(Value, ?DEFAULT_RENEGOTIATE_AT);
validate_option(reuse_session, undefined, _) ->
undefined;
validate_option(reuse_session, Value, _)
when is_function(Value) ->
Value;
validate_option(reuse_session, Value, _)
when is_binary(Value) ->
Value;
validate_option(reuse_session, {Id, Data} = Value, _)
when is_binary(Id) andalso
is_binary(Data) ->
Value;
validate_option(reuse_sessions, Value, _)
when is_boolean(Value) ->
Value;
validate_option(reuse_sessions, save = Value, _) ->
Value;
validate_option(secure_renegotiate, Value, _)
when is_boolean(Value) ->
Value;
validate_option(server_name_indication, Value, _)
when is_list(Value) ->
%% RFC 6066, Section 3: Currently, the only server names supported are
DNS hostnames
%% case inet_parse:domain(Value) of
%% false ->
%% throw({error, {options, {{Opt, Value}}}});
%% true ->
%% Value
%% end;
%%
%% But the definition seems very diffuse, so let all strings through
%% and leave it up to public_key to decide...
Value;
validate_option(server_name_indication, undefined, _) ->
undefined;
validate_option(server_name_indication, disable, _) ->
disable;
validate_option(session_tickets, Value, server)
when Value =:= disabled orelse
Value =:= stateful orelse
Value =:= stateless ->
Value;
validate_option(session_tickets, Value, server) ->
throw({error,
{options, role,
{session_tickets,
{Value, {server, [disabled, stateful, stateless]}}}}});
validate_option(session_tickets, Value, client)
when Value =:= disabled orelse
Value =:= manual orelse
Value =:= auto ->
Value;
validate_option(session_tickets, Value, client) ->
throw({error,
{options, role,
{session_tickets,
{Value, {client, [disabled, manual, auto]}}}}});
validate_option(sni_fun, undefined, _) ->
undefined;
validate_option(sni_fun, Fun, _)
when is_function(Fun) ->
Fun;
validate_option(sni_hosts, [], _) ->
[];
validate_option(sni_hosts, [{Hostname, SSLOptions} | Tail], _)
when is_list(Hostname) ->
RecursiveSNIOptions = proplists:get_value(sni_hosts, SSLOptions, undefined),
case RecursiveSNIOptions of
undefined ->
[{Hostname, validate_options(SSLOptions)} |
validate_option(sni_hosts, Tail)];
_ ->
throw({error, {options, {sni_hosts, RecursiveSNIOptions}}})
end;
validate_option(srp_identity, undefined, _) ->
undefined;
validate_option(srp_identity, {Username, Password}, _)
when is_list(Username),
is_list(Password), Username =/= "",
length(Username) =< 255 ->
{unicode:characters_to_binary(Username),
unicode:characters_to_binary(Password)};
validate_option(user_lookup_fun, undefined, _) ->
undefined;
validate_option(user_lookup_fun, {Fun, _} = Value, _)
when is_function(Fun, 3) ->
Value;
validate_option(use_ticket, Value, _)
when is_list(Value) ->
Value;
validate_option(verify, Value, _)
when Value == verify_none; Value == verify_peer ->
Value;
validate_option(verify_fun, undefined, _) ->
undefined;
%% Backwards compatibility
validate_option(verify_fun, Fun, _) when is_function(Fun) ->
{fun(_,{bad_cert, _} = Reason, OldFun) ->
case OldFun([Reason]) of
true ->
{valid, OldFun};
false ->
{fail, Reason}
end;
(_,{extension, _}, UserState) ->
{unknown, UserState};
(_, valid, UserState) ->
{valid, UserState};
(_, valid_peer, UserState) ->
{valid, UserState}
end, Fun};
validate_option(verify_fun, {Fun, _} = Value, _) when is_function(Fun) ->
Value;
validate_option(versions, Versions, _) ->
validate_versions(Versions, Versions);
validate_option(Opt, undefined = Value, _) ->
AllOpts = maps:keys(?RULES),
case lists:member(Opt, AllOpts) of
true ->
Value;
false ->
throw({error, {options, {Opt, Value}}})
end;
validate_option(Opt, Value, _) ->
throw({error, {options, {Opt, Value}}}).
handle_cb_info({V1, V2, V3, V4}) ->
{V1,V2,V3,V4, list_to_atom(atom_to_list(V2) ++ "_passive")};
handle_cb_info(CbInfo) ->
CbInfo.
handle_hashsigns_option(Value, Version) when is_list(Value)
andalso Version >= {3, 4} ->
case tls_v1:signature_schemes(Version, Value) of
[] ->
throw({error, {options,
no_supported_signature_schemes,
{signature_algs, Value}}});
_ ->
Value
end;
handle_hashsigns_option(Value, Version) when is_list(Value)
andalso Version =:= {3, 3} ->
case tls_v1:signature_algs(Version, Value) of
[] ->
throw({error, {options, no_supported_algorithms, {signature_algs, Value}}});
_ ->
Value
end;
handle_hashsigns_option(_, Version) when Version =:= {3, 3} ->
handle_hashsigns_option(tls_v1:default_signature_algs([Version]), Version);
handle_hashsigns_option(_, _Version) ->
undefined.
handle_signature_algorithms_option(Value, Version) when is_list(Value)
andalso Version >= {3, 4} ->
case tls_v1:signature_schemes(Version, Value) of
[] ->
throw({error, {options,
no_supported_signature_schemes,
{signature_algs_cert, Value}}});
_ ->
Value
end;
handle_signature_algorithms_option(_, _Version) ->
undefined.
validate_options([]) ->
[];
validate_options([{Opt, Value} | Tail]) ->
[{Opt, validate_option(Opt, Value)} | validate_options(Tail)].
validate_npn_ordering(client) ->
ok;
validate_npn_ordering(server) ->
ok;
validate_npn_ordering(Value) ->
throw({error, {options, {client_preferred_next_protocols, {invalid_precedence, Value}}}}).
validate_binary_list(Opt, List) ->
lists:foreach(
fun(Bin) when is_binary(Bin),
byte_size(Bin) > 0,
byte_size(Bin) < 256 ->
ok;
(Bin) ->
throw({error, {options, {Opt, {invalid_protocol, Bin}}}})
end, List).
validate_versions([], Versions) ->
Versions;
validate_versions([Version | Rest], Versions) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
case tls_record:sufficient_crypto_support(Version) of
true ->
tls_validate_versions(Rest, Versions);
false ->
throw({error, {options, {insufficient_crypto_support, {Version, {versions, Versions}}}}})
end;
validate_versions([Version | Rest], Versions) when Version == 'dtlsv1';
Version == 'dtlsv1.2'->
DTLSVer = dtls_record:protocol_version(Version),
case tls_record:sufficient_crypto_support(dtls_v1:corresponding_tls_version(DTLSVer)) of
true ->
dtls_validate_versions(Rest, Versions);
false ->
throw({error, {options, {insufficient_crypto_support, {Version, {versions, Versions}}}}})
end;
validate_versions([Version| _], Versions) ->
throw({error, {options, {Version, {versions, Versions}}}}).
tls_validate_versions([], Versions) ->
tls_validate_version_gap(Versions);
tls_validate_versions([Version | Rest], Versions) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
tls_validate_versions(Rest, Versions);
tls_validate_versions([Version| _], Versions) ->
throw({error, {options, {Version, {versions, Versions}}}}).
%% Do not allow configuration of TLS 1.3 with a gap where TLS 1.2 is not supported
%% as that configuration can trigger the built in version downgrade protection
%% mechanism and the handshake can fail with an Illegal Parameter alert.
tls_validate_version_gap(Versions) ->
case lists:member('tlsv1.3', Versions) of
true when length(Versions) >= 2 ->
case lists:member('tlsv1.2', Versions) of
true ->
Versions;
false ->
throw({error, {options, missing_version, {'tlsv1.2', {versions, Versions}}}})
end;
_ ->
Versions
end.
dtls_validate_versions([], Versions) ->
Versions;
dtls_validate_versions([Version | Rest], Versions) when Version == 'dtlsv1';
Version == 'dtlsv1.2'->
dtls_validate_versions(Rest, Versions);
dtls_validate_versions([Ver| _], Versions) ->
throw({error, {options, {Ver, {versions, Versions}}}}).
The option cacerts overrides cacertsfile
ca_cert_default(_,_, [_|_]) ->
undefined;
ca_cert_default(verify_none, _, _) ->
undefined;
ca_cert_default(verify_peer, {Fun,_}, _) when is_function(Fun) ->
undefined;
Server that wants to and has no verify_fun must have
%% some trusted certs.
ca_cert_default(verify_peer, undefined, _) ->
"".
emulated_options(undefined, undefined, Protocol, Opts) ->
case Protocol of
tls ->
tls_socket:emulated_options(Opts);
dtls ->
dtls_socket:emulated_options(Opts)
end;
emulated_options(Transport, Socket, Protocol, Opts) ->
EmulatedOptions = tls_socket:emulated_options(),
{ok, Original} = tls_socket:getopts(Transport, Socket, EmulatedOptions),
{Inet, Emulated0} = emulated_options(undefined, undefined, Protocol, Opts),
{Inet, lists:ukeymerge(1, Emulated0, Original)}.
handle_cipher_option(Value, Versions) when is_list(Value) ->
try binary_cipher_suites(Versions, Value) of
Suites ->
Suites
catch
exit:_ ->
throw({error, {options, {ciphers, Value}}});
error:_->
throw({error, {options, {ciphers, Value}}})
end.
binary_cipher_suites([{3,4} = Version], []) ->
%% Defaults to all supported suites that does
not require explicit configuration TLS-1.3
%% only mode.
default_binary_suites(exclusive, Version);
binary_cipher_suites([Version| _], []) ->
%% Defaults to all supported suites that does
%% not require explicit configuration
default_binary_suites(default, Version);
binary_cipher_suites(Versions, [Map|_] = Ciphers0) when is_map(Map) ->
Ciphers = [ssl_cipher_format:suite_map_to_bin(C) || C <- Ciphers0],
binary_cipher_suites(Versions, Ciphers);
binary_cipher_suites(Versions, [Tuple|_] = Ciphers0) when is_tuple(Tuple) ->
Ciphers = [ssl_cipher_format:suite_map_to_bin(tuple_to_map(C)) || C <- Ciphers0],
binary_cipher_suites(Versions, Ciphers);
binary_cipher_suites(Versions, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) ->
All = all_suites(Versions),
case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of
[] ->
%% Defaults to all supported suites that does
%% not require explicit configuration
binary_cipher_suites(Versions, []);
Ciphers ->
Ciphers
end;
binary_cipher_suites(Versions, [Head | _] = Ciphers0) when is_list(Head) ->
%% Format: ["RC4-SHA","RC4-MD5"]
Ciphers = [ssl_cipher_format:suite_openssl_str_to_map(C) || C <- Ciphers0],
binary_cipher_suites(Versions, Ciphers);
binary_cipher_suites(Versions, Ciphers0) ->
Format : " RC4 - SHA : RC4 - MD5 "
Ciphers = [ssl_cipher_format:suite_openssl_str_to_map(C) || C <- string:lexemes(Ciphers0, ":")],
binary_cipher_suites(Versions, Ciphers).
default_binary_suites(exclusive, {_, Minor}) ->
ssl_cipher:filter_suites(tls_v1:exclusive_suites(Minor));
default_binary_suites(default, Version) ->
ssl_cipher:filter_suites(ssl_cipher:suites(Version)).
all_suites([{3, 4 = Minor}]) ->
tls_v1:exclusive_suites(Minor);
all_suites([{3, 4} = Version0, Version1 |_]) ->
all_suites([Version0]) ++
ssl_cipher:all_suites(Version1) ++
ssl_cipher:anonymous_suites(Version1);
all_suites([Version|_]) ->
ssl_cipher:all_suites(Version) ++
ssl_cipher:anonymous_suites(Version).
tuple_to_map({Kex, Cipher, Mac}) ->
#{key_exchange => Kex,
cipher => Cipher,
mac => Mac,
prf => default_prf};
tuple_to_map({Kex, Cipher, Mac, Prf}) ->
#{key_exchange => Kex,
cipher => Cipher,
mac => tuple_to_map_mac(Cipher, Mac),
prf => Prf}.
%% Backwards compatible
tuple_to_map_mac(aes_128_gcm, _) ->
aead;
tuple_to_map_mac(aes_256_gcm, _) ->
aead;
tuple_to_map_mac(chacha20_poly1305, _) ->
aead;
tuple_to_map_mac(_, MAC) ->
MAC.
handle_eccs_option(Value, Version) when is_list(Value) ->
{_Major, Minor} = tls_version(Version),
try tls_v1:ecc_curves(Minor, Value) of
Curves -> #elliptic_curves{elliptic_curve_list = Curves}
catch
exit:_ -> throw({error, {options, {eccs, Value}}});
error:_ -> throw({error, {options, {eccs, Value}}})
end.
handle_supported_groups_option(Value, Version) when is_list(Value) ->
{_Major, Minor} = tls_version(Version),
try tls_v1:groups(Minor, Value) of
Groups -> #supported_groups{supported_groups = Groups}
catch
exit:_ -> throw({error, {options, {supported_groups, Value}}});
error:_ -> throw({error, {options, {supported_groups, Value}}})
end.
unexpected_format(Error) ->
lists:flatten(io_lib:format("Unexpected error: ~p", [Error])).
file_error_format({error, Error})->
case file:format_error(Error) of
"unknown POSIX error" ->
"decoding error";
Str ->
Str
end;
file_error_format(_) ->
"decoding error".
file_desc(cacertfile) ->
"Invalid CA certificate file ";
file_desc(certfile) ->
"Invalid certificate file ";
file_desc(keyfile) ->
"Invalid key file ";
file_desc(dhfile) ->
"Invalid DH params file ".
detect(_Pred, []) ->
undefined;
detect(Pred, [H|T]) ->
case Pred(H) of
true ->
H;
_ ->
detect(Pred, T)
end.
make_next_protocol_selector(undefined) ->
undefined;
make_next_protocol_selector({client, AllProtocols, DefaultProtocol}) ->
fun(AdvertisedProtocols) ->
case detect(fun(PreferredProtocol) ->
lists:member(PreferredProtocol, AdvertisedProtocols)
end, AllProtocols) of
undefined ->
DefaultProtocol;
PreferredProtocol ->
PreferredProtocol
end
end;
make_next_protocol_selector({server, AllProtocols, DefaultProtocol}) ->
fun(AdvertisedProtocols) ->
case detect(fun(PreferredProtocol) ->
lists:member(PreferredProtocol, AllProtocols)
end,
AdvertisedProtocols) of
undefined ->
DefaultProtocol;
PreferredProtocol ->
PreferredProtocol
end
end.
connection_cb(tls) ->
tls_gen_connection;
connection_cb(dtls) ->
dtls_gen_connection;
connection_cb(Opts) ->
connection_cb(proplists:get_value(protocol, Opts, tls)).
record_cb(tls) ->
tls_record;
record_cb(dtls) ->
dtls_record;
record_cb(Opts) ->
record_cb(proplists:get_value(protocol, Opts, tls)).
binary_filename(FileName) ->
Enc = file:native_name_encoding(),
unicode:characters_to_binary(FileName, unicode, Enc).
%% Assert that basic options are on the format {Key, Value}
with a few exceptions and phase out
handle_option_format([], Acc) ->
lists:reverse(Acc);
handle_option_format([{log_alert, Bool} | Rest], Acc) when is_boolean(Bool) ->
case proplists:get_value(log_level, Acc ++ Rest, undefined) of
undefined ->
handle_option_format(Rest, [{log_level,
map_log_level(Bool)} | Acc]);
_ ->
handle_option_format(Rest, Acc)
end;
handle_option_format([{Key,_} = Opt | Rest], Acc) when is_atom(Key) ->
handle_option_format(Rest, [Opt | Acc]);
%% Handle exceptions
handle_option_format([{raw,_,_,_} = Opt | Rest], Acc) ->
handle_option_format(Rest, [Opt | Acc]);
handle_option_format([inet = Opt | Rest], Acc) ->
handle_option_format(Rest, [Opt | Acc]);
handle_option_format([inet6 = Opt | Rest], Acc) ->
handle_option_format(Rest, [Opt | Acc]);
handle_option_format([Value | _], _) ->
throw({option_not_a_key_value_tuple, Value}).
map_log_level(true) ->
notice;
map_log_level(false) ->
none.
handle_verify_option(verify_none, #{fail_if_no_peer_cert := false} = OptionsMap) ->
OptionsMap#{verify => verify_none};
handle_verify_option(verify_none, #{fail_if_no_peer_cert := true}) ->
throw({error, {options, incompatible,
{verify, verify_none},
{fail_if_no_peer_cert, true}}});
%% The option 'verify' is simulated by the configured 'verify_fun' that is mostly
%% hidden from the end user. When 'verify' is set to verify_none, the option
%% 'verify_fun' is also set to a default verify-none-verify_fun when processing
the configuration . If ' verify ' is later changed from verify_none to ,
%% the 'verify_fun' must also be changed to undefined. When 'verify_fun' is set to
undefined , public_key 's default verify_fun will be used that performs a full
%% verification.
handle_verify_option(verify_peer, #{verify := verify_none} = OptionsMap) ->
OptionsMap#{verify => verify_peer,
verify_fun => undefined};
handle_verify_option(verify_peer, OptionsMap) ->
OptionsMap#{verify => verify_peer};
handle_verify_option(Value, _) ->
throw({error, {options, {verify, Value}}}).
Added to handle default values for signature_algs in TLS 1.3
default_option_role_sign_algs(_, Value, _, Version) when Version >= {3,4} ->
Value;
default_option_role_sign_algs(Role, Value, Role, _) ->
Value;
default_option_role_sign_algs(_, _, _, _) ->
undefined.
default_option_role(Role, Value, Role) ->
Value;
default_option_role(_,_,_) ->
undefined.
default_cb_info(tls) ->
{gen_tcp, tcp, tcp_closed, tcp_error, tcp_passive};
default_cb_info(dtls) ->
{gen_udp, udp, udp_closed, udp_error, udp_passive}.
include_security_info([]) ->
false;
include_security_info([Item | Items]) ->
case lists:member(Item, [client_random, server_random, master_secret, keylog]) of
true ->
true;
false ->
include_security_info(Items)
end.
server_name_indication_default(Host) when is_list(Host) ->
%% SNI should not contain a trailing dot that a hostname may
string:strip(Host, right, $.);
server_name_indication_default(_) ->
undefined.
add_filter(undefined, Filters) ->
Filters;
add_filter(Filter, Filters) ->
[Filter | Filters].
maybe_client_warn_no_verify(#{verify := verify_none,
warn_verify_none := true,
log_level := LogLevel}, client) ->
ssl_logger:log(warning, LogLevel, #{description => "Authenticity is not established by certificate path validation",
reason => "Option {verify, verify_peer} and cacertfile/cacerts is missing"}, #{});
maybe_client_warn_no_verify(_,_) ->
%% Warning not needed. Note client certificate validation is optional in TLS
ok.
| null | https://raw.githubusercontent.com/simingwang/emqx-plugin-kafkav5/bbf919e56dbc8fd2d4c1c541084532f844a11cbc/_build/default/rel/emqx_plugin_kafka/lib/ssl-10.7/src/ssl.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
SSL is a legacy name.
Needed to make documentation rendering happy
Application handling
Socket handling
SSL/TLS protocol handling
Misc
-------------------------------------------------------------------------------------------------------
exported
exported
exported
exported
exported
exported
exported
exported
exported
exported
exported
exported
exported
exported
TLS 1.3 , exported
Old cipher suites, version dependent
Pre TLS 1.2
TLS 1.2, internally PRE TLS 1.2 will use default_prf
exported
exported
exported
exported
exported
-------------------------------------------------------------------------------------------------------
exported
(according to old API) exported
exported
k - number of hash functions
m - number of bits in bit vector
-------------------------------------------------------------------------------------------------------
{ocsp_stapling, ocsp_stapling()} |
{ocsp_responder_certs, ocsp_responder_certs()} |
{ocsp_nonce, ocsp_nonce()}.
-type ocsp_stapling() :: boolean().
-type ocsp_nonce() :: boolean().
-------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------
exported
exported
-------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------
--------------------------------------------------------------------
API
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Utility function that starts the ssl and applications
that it depends on.
see application(3)
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Stops the ssl application.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Connect to an ssl server.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Creates an ssl listen socket.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Performs transport accept on an ssl listen socket
--------------------------------------------------------------------
--------------------------------------------------------------------
ssl handshake.
--------------------------------------------------------------------
If Socket is a ordinary socket(): upgrades a gen_tcp, or equivalent, socket to
an SSL socket, that is, performs the SSL/TLS server-side handshake and returns
the SSL socket.
If Socket is an sslsocket(): provides extra SSL/TLS/DTLS options to those
--------------------------------------------------------------------
Description: Continues the handshake possible with newly supplied options.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Continues the handshake possible with newly supplied options.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Cancels the handshakes sending a close alert.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Close an ssl connection
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Close an ssl connection
--------------------------------------------------------------------
In normal close {error, closed} is regarded as ok, as it is not interesting which side
that got to do the actual close. But in the downgrade case only {ok, Port} is a success.
--------------------------------------------------------------------
Description: Sends data over the ssl connection
--------------------------------------------------------------------
Emulate connection behaviour
Emulate connection behaviour
{error,enotconn}
--------------------------------------------------------------------
Description: Receives data when active = false
--------------------------------------------------------------------
{error,enotconn}
--------------------------------------------------------------------
Description: Changes process that receives the messages when active = true
or once.
--------------------------------------------------------------------
Meaningless but let it be allowed to conform with TLS
Meaningless but let it be allowed to conform with normal sockets
--------------------------------------------------------------------
Description: Return SSL information for the connection
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Return SSL information for the connection
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: same as inet:peername/1.
--------------------------------------------------------------------
Will return {error, enotconn}
--------------------------------------------------------------------
Description: Returns the peercert.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Returns the protocol that has been negotiated. If no
protocol has been negotiated will return {error, protocol_not_negotiated}
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Returns all default and all supported cipher suites for a
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Returns all default and all supported cipher suites for a
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Removes cipher suites if any of the filter functions returns false
for any part of the cipher suite. This function also calls default filter functions
to make sure the cipher suite are supported by crypto.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Make <Preferred> suites become the most preferred
suites that is put them at the head of the cipher suite list
and remove them from <Suites> if present. <Preferred> may be a
list of cipher suites or a list of filters in which case the
filters are use on Suites to extract the the preferred
cipher list.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Make <Deferred> suites suites become the
least preferred suites that is put them at the end of the cipher suite list
and removed them from <Suites> if present.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: returns all supported curves across all versions
--------------------------------------------------------------------
only tls_v1 has named curves right now
--------------------------------------------------------------------
Description: returns the curves supported for a given version of
ssl/tls.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: returns all supported groups (TLS 1.3 and later)
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: returns the default groups (TLS 1.3 and later)
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Gets options
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Sets options
--------------------------------------------------------------------
---------------------------------------------------------------
Description: Get all statistic options for a socket.
--------------------------------------------------------------------
---------------------------------------------------------------
--------------------------------------------------------------------
---------------------------------------------------------------
Description: Same as gen_tcp:shutdown/2
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Same as inet:sockname/1
--------------------------------------------------------------------
---------------------------------------------------------------
Description: Returns a list of relevant versions.
--------------------------------------------------------------------
---------------------------------------------------------------
Description: Initiates a renegotiation.
--------------------------------------------------------------------
---------------------------------------------------------------
Description: Initiate a key update.
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Clear the PEM cache
--------------------------------------------------------------------
---------------------------------------------------------------
Description: Creates error string.
--------------------------------------------------------------------
--------------------------------------------------------------------
For internal use!
Description: Return the string representation of a cipher suite.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Return the string representation of a cipher suite.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Return the map representation of a cipher suite.
--------------------------------------------------------------------
cipher suites will be handled
--------------------------------------------------------------
--------------------------------------------------------------------
Handle ssl options at handshake, handshake_continue
Handle all options in listen, connect and handshake
Ensure all options are evaluated at startup
Handle special options
The list of options is processed in multiple passes. When
processing an option all dependencies must already be resolved.
If there are unresolved dependencies the option will be
skipped and processed in a subsequent pass.
Counter is equal to the number of unprocessed options at
the beginning of a pass. Its value must monotonically decrease
after each successful pass.
If the value of the counter is unchanged at the end of a pass,
the processing stops due to faulty input data.
Continue handling options if current pass was successful
Skip option for next pass
TODO: validate based on role
Do not send by default
Special options
Handle options that are not present in the map
Remove deprecated ssl_imp option
TODO: remove ssl_imp
next_protocol_selector
special handling for version
certfile must be present in some cases otherwise it can be set
to the empty string.
Backwards compatibility
Backwards compatibility
Backwards compatibility
RFC 6066, Section 4
The OCSP responders' certificates can be given as a suggestion and
will be used to verify the OCSP response.
RFC 6066, Section 3: Currently, the only server names supported are
case inet_parse:domain(Value) of
false ->
throw({error, {options, {{Opt, Value}}}});
true ->
Value
end;
But the definition seems very diffuse, so let all strings through
and leave it up to public_key to decide...
Backwards compatibility
Do not allow configuration of TLS 1.3 with a gap where TLS 1.2 is not supported
as that configuration can trigger the built in version downgrade protection
mechanism and the handshake can fail with an Illegal Parameter alert.
some trusted certs.
Defaults to all supported suites that does
only mode.
Defaults to all supported suites that does
not require explicit configuration
Defaults to all supported suites that does
not require explicit configuration
Format: ["RC4-SHA","RC4-MD5"]
Backwards compatible
Assert that basic options are on the format {Key, Value}
Handle exceptions
The option 'verify' is simulated by the configured 'verify_fun' that is mostly
hidden from the end user. When 'verify' is set to verify_none, the option
'verify_fun' is also set to a default verify-none-verify_fun when processing
the 'verify_fun' must also be changed to undefined. When 'verify_fun' is set to
verification.
SNI should not contain a trailing dot that a hostname may
Warning not needed. Note client certificate validation is optional in TLS | Copyright Ericsson AB 1999 - 2022 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
Purpose : Main API module for the SSL application that implements TLS and DTLS
-module(ssl).
-include_lib("public_key/include/public_key.hrl").
-include("ssl_internal.hrl").
-include("ssl_api.hrl").
-include("ssl_record.hrl").
-include("ssl_cipher.hrl").
-include("ssl_handshake.hrl").
-include("ssl_srp.hrl").
-ifndef(VSN).
-define(VSN,"unknown").
-endif.
-export([start/0,
start/1,
stop/0,
clear_pem_cache/0]).
-export([connect/3,
connect/2,
connect/4,
listen/2,
transport_accept/1,
transport_accept/2,
handshake/1,
handshake/2,
handshake/3,
handshake_continue/2,
handshake_continue/3,
handshake_cancel/1,
controlling_process/2,
peername/1,
peercert/1,
sockname/1,
close/1,
close/2,
shutdown/2,
recv/2,
recv/3,
send/2,
getopts/2,
setopts/2,
getstat/1,
getstat/2
]).
-export([cipher_suites/2,
cipher_suites/3,
filter_cipher_suites/2,
prepend_cipher_suites/2,
append_cipher_suites/2,
eccs/0,
eccs/1,
versions/0,
groups/0,
groups/1,
format_error/1,
renegotiate/1,
update_keys/2,
prf/5,
negotiated_protocol/1,
connection_information/1,
connection_information/2]).
-export([handle_options/2,
handle_options/3,
tls_version/1,
suite_to_str/1,
suite_to_openssl_str/1,
str_to_suite/1]).
-removed({ssl_accept, '_',
"use ssl_handshake/1,2,3 instead"}).
-removed({cipher_suites, 0,
"use cipher_suites/2,3 instead"}).
-removed({cipher_suites, 1,
"use cipher_suites/2,3 instead"}).
-removed([{negotiated_next_protocol,1,
"use ssl:negotiated_protocol/1 instead"}]).
-removed([{connection_info,1,
"use ssl:connection_information/[1,2] instead"}]).
-export_type([socket/0,
sslsocket/0,
socket_option/0,
active_msgs/0,
host/0,
tls_option/0,
tls_client_option/0,
tls_server_option/0,
erl_cipher_suite/0,
old_cipher_suite/0,
ciphers/0,
cipher/0,
hash/0,
key/0,
kex_algo/0,
prf_random/0,
cipher_filters/0,
sign_algo/0,
protocol_version/0,
protocol_extensions/0,
session_id/0,
error_alert/0,
tls_alert/0,
srp_param_type/0,
named_curve/0,
sign_scheme/0,
group/0]).
-type active_msgs() :: {ssl, sslsocket(), Data::binary() | list()} | {ssl_closed, sslsocket()} |
-type transport_option() :: {cb_info, {CallbackModule::atom(), DataTag::atom(),
ClosedTag::atom(), ErrTag::atom()}} |
{cb_info, {CallbackModule::atom(), DataTag::atom(),
ClosedTag::atom(), ErrTag::atom(), PassiveTag::atom()}}.
-type hostname() :: string().
-type ip_address() :: inet:ip_address().
-type tls_version() :: 'tlsv1.2' | 'tlsv1.3' | tls_legacy_version().
-type dtls_version() :: 'dtlsv1.2' | dtls_legacy_version().
-type tls_legacy_version() :: tlsv1 | 'tlsv1.1' .
-type dtls_legacy_version() :: 'dtlsv1'.
-type verify_type() :: verify_none | verify_peer.
-type cipher() :: aes_128_cbc |
aes_256_cbc |
aes_128_gcm |
aes_256_gcm |
aes_128_ccm |
aes_256_ccm |
aes_128_ccm_8 |
aes_256_ccm_8 |
chacha20_poly1305 |
-type legacy_cipher() :: rc4_128 |
des_cbc |
'3des_ede_cbc'.
-type hash() :: sha |
sha2() |
-type sha2() :: sha224 |
sha256 |
sha384 |
sha512.
-type legacy_hash() :: md5.
-type sign_schemes() :: [sign_scheme()].
-type sign_scheme() :: eddsa_ed25519
| eddsa_ed448
| ecdsa_secp256r1_sha256
| ecdsa_secp384r1_sha384
| ecdsa_secp521r1_sha512
| rsassa_pss_scheme()
-type rsassa_pss_scheme() :: rsa_pss_rsae_sha256
| rsa_pss_rsae_sha384
| rsa_pss_rsae_sha512
| rsa_pss_pss_sha256
| rsa_pss_pss_sha384
| rsa_pss_pss_sha512.
-type sign_scheme_legacy() :: rsa_pkcs1_sha256
| rsa_pkcs1_sha384
| rsa_pkcs1_sha512
| rsa_pkcs1_sha1
| ecdsa_sha1.
-type kex_algo() :: rsa |
dhe_rsa | dhe_dss |
ecdhe_ecdsa | ecdh_ecdsa | ecdh_rsa |
srp_rsa| srp_dss |
psk | dhe_psk | rsa_psk |
dh_anon | ecdh_anon | srp_anon |
-type erl_cipher_suite() :: #{key_exchange := kex_algo(),
cipher := cipher(),
mac := hash() | aead,
}.
| {kex_algo(), cipher(), hash() | aead, hash()}.
-type named_curve() :: sect571r1 |
sect571k1 |
secp521r1 |
brainpoolP512r1 |
sect409k1 |
sect409r1 |
brainpoolP384r1 |
secp384r1 |
sect283k1 |
sect283r1 |
brainpoolP256r1 |
secp256k1 |
secp256r1 |
sect239k1 |
sect233k1 |
sect233r1 |
secp224k1 |
secp224r1 |
sect193r1 |
sect193r2 |
secp192k1 |
secp192r1 |
sect163k1 |
sect163r1 |
sect163r2 |
secp160k1 |
secp160r1 |
-type group() :: secp256r1 | secp384r1 | secp521r1 | ffdhe2048 |
-type srp_param_type() :: srp_1024 |
srp_1536 |
srp_2048 |
srp_3072 |
srp_4096 |
srp_6144 |
-type tls_alert() :: close_notify |
unexpected_message |
bad_record_mac |
record_overflow |
handshake_failure |
bad_certificate |
unsupported_certificate |
certificate_revoked |
certificate_expired |
certificate_unknown |
illegal_parameter |
unknown_ca |
access_denied |
decode_error |
decrypt_error |
export_restriction|
protocol_version |
insufficient_security |
internal_error |
inappropriate_fallback |
user_canceled |
no_renegotiation |
unsupported_extension |
certificate_unobtainable |
unrecognized_name |
bad_certificate_status_response |
bad_certificate_hash_value |
unknown_psk_identity |
-type common_option() :: {protocol, protocol()} |
{handshake, handshake_completion()} |
{cert, cert() | [cert()]} |
{certfile, cert_pem()} |
{key, key()} |
{keyfile, key_pem()} |
{password, key_password()} |
{ciphers, cipher_suites()} |
{eccs, [named_curve()]} |
{signature_algs, signature_algs()} |
{signature_algs_cert, sign_schemes()} |
{supported_groups, supported_groups()} |
{secure_renegotiate, secure_renegotiation()} |
{keep_secrets, keep_secrets()} |
{depth, allowed_cert_chain_length()} |
{verify_fun, custom_verify()} |
{crl_check, crl_check()} |
{crl_cache, crl_cache_opts()} |
{max_handshake_size, handshake_size()} |
{partial_chain, root_fun()} |
{versions, protocol_versions()} |
{user_lookup_fun, custom_user_lookup()} |
{log_level, logging_level()} |
{log_alert, log_alert()} |
{hibernate_after, hibernate_after()} |
{padding_check, padding_check()} |
{beast_mitigation, beast_mitigation()} |
{ssl_imp, ssl_imp()} |
{session_tickets, session_tickets()} |
{key_update_at, key_update_at()} |
{middlebox_comp_mode, middlebox_comp_mode()}.
-type protocol() :: tls | dtls.
-type handshake_completion() :: hello | full.
-type cert() :: public_key:der_encoded().
-type cert_pem() :: file:filename().
-type key() :: {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo',
public_key:der_encoded()} |
#{algorithm := rsa | dss | ecdsa,
engine := crypto:engine_ref(),
key_id := crypto:key_id(),
-type key_pem() :: file:filename().
-type key_password() :: string() | fun(() -> string()).
-type cipher_suites() :: ciphers().
-type ciphers() :: [erl_cipher_suite()] |
-type cipher_filters() :: list({key_exchange | cipher | mac | prf,
-type algo_filter() :: fun((kex_algo()|cipher()|hash()|aead|default_prf) -> true | false).
-type keep_secrets() :: boolean().
-type secure_renegotiation() :: boolean().
-type allowed_cert_chain_length() :: integer().
-type custom_verify() :: {Verifyfun :: fun(), InitialUserState :: any()}.
-type crl_check() :: boolean() | peer | best_effort.
-type crl_cache_opts() :: {Module :: atom(),
{DbHandle :: internal | term(),
Args :: list()}}.
-type handshake_size() :: integer().
-type hibernate_after() :: timeout().
-type root_fun() :: fun().
-type protocol_versions() :: [protocol_version()].
-type signature_algs() :: [{hash(), sign_algo()} | sign_scheme()].
-type supported_groups() :: [group()].
-type custom_user_lookup() :: {Lookupfun :: fun(), UserState :: any()}.
-type padding_check() :: boolean().
-type beast_mitigation() :: one_n_minus_one | zero_n | disabled.
-type srp_identity() :: {Username :: string(), Password :: string()}.
-type psk_identity() :: string().
-type log_alert() :: boolean().
-type logging_level() :: logger:level() | none | all.
-type client_session_tickets() :: disabled | manual | auto.
-type server_session_tickets() :: disabled | stateful | stateless.
-type session_tickets() :: client_session_tickets() | server_session_tickets().
-type key_update_at() :: pos_integer().
-type bloom_filter_window_size() :: integer().
-type bloom_filter_hash_functions() :: integer().
-type bloom_filter_bits() :: integer().
-type anti_replay() :: '10k' | '100k' |
number of seconds in time window
-type use_ticket() :: [binary()].
-type middlebox_comp_mode() :: boolean().
-type client_early_data() :: binary().
-type server_early_data() :: disabled | enabled.
-type client_option() :: {verify, client_verify_type()} |
{reuse_session, client_reuse_session()} |
{reuse_sessions, client_reuse_sessions()} |
{cacerts, client_cacerts()} |
{cacertfile, client_cafile()} |
{alpn_advertised_protocols, client_alpn()} |
{client_preferred_next_protocols, client_preferred_next_protocols()} |
{psk_identity, client_psk_identity()} |
{srp_identity, client_srp_identity()} |
{server_name_indication, sni()} |
{max_fragment_length, max_fragment_length()} |
{customize_hostname_check, customize_hostname_check()} |
{fallback, fallback()} |
{certificate_authorities, certificate_authorities()} |
{session_tickets, client_session_tickets()} |
{use_ticket, use_ticket()} |
{early_data, client_early_data()}.
-type client_verify_type() :: verify_type().
-type client_reuse_session() :: session_id() | {session_id(), SessionData::binary()}.
-type client_reuse_sessions() :: boolean() | save.
-type certificate_authorities() :: boolean().
-type client_cacerts() :: [public_key:der_encoded()].
-type client_cafile() :: file:filename().
-type app_level_protocol() :: binary().
-type client_alpn() :: [app_level_protocol()].
-type client_preferred_next_protocols() :: {Precedence :: server | client,
ClientPrefs :: [app_level_protocol()]} |
{Precedence :: server | client,
ClientPrefs :: [app_level_protocol()],
Default::app_level_protocol()}.
-type client_psk_identity() :: psk_identity().
-type client_srp_identity() :: srp_identity().
-type customize_hostname_check() :: list().
-type sni() :: HostName :: hostname() | disable.
-type max_fragment_length() :: undefined | 512 | 1024 | 2048 | 4096.
-type fallback() :: boolean().
-type ssl_imp() :: new | old.
-type ocsp_responder_certs ( ) : : [ public_key : der_encoded ( ) ] .
-type server_option() :: {cacerts, server_cacerts()} |
{cacertfile, server_cafile()} |
{dh, dh_der()} |
{dhfile, dh_file()} |
{verify, server_verify_type()} |
{fail_if_no_peer_cert, fail_if_no_peer_cert()} |
{reuse_sessions, server_reuse_sessions()} |
{reuse_session, server_reuse_session()} |
{alpn_preferred_protocols, server_alpn()} |
{next_protocols_advertised, server_next_protocol()} |
{psk_identity, server_psk_identity()} |
{sni_hosts, sni_hosts()} |
{sni_fun, sni_fun()} |
{honor_cipher_order, honor_cipher_order()} |
{honor_ecc_order, honor_ecc_order()} |
{client_renegotiation, client_renegotiation()}|
{session_tickets, server_session_tickets()} |
{anti_replay, anti_replay()} |
{cookie, cookie()} |
{early_data, server_early_data()}.
-type server_cacerts() :: [public_key:der_encoded()].
-type server_cafile() :: file:filename().
-type server_alpn() :: [app_level_protocol()].
-type server_next_protocol() :: [app_level_protocol()].
-type server_psk_identity() :: psk_identity().
-type dh_der() :: binary().
-type dh_file() :: file:filename().
-type server_verify_type() :: verify_type().
-type fail_if_no_peer_cert() :: boolean().
-type server_reuse_session() :: fun().
-type server_reuse_sessions() :: boolean().
-type sni_hosts() :: [{hostname(), [server_option() | common_option()]}].
-type sni_fun() :: fun().
-type honor_cipher_order() :: boolean().
-type honor_ecc_order() :: boolean().
-type client_renegotiation() :: boolean().
-type cookie() :: boolean().
-type protocol_extensions() :: #{renegotiation_info => binary(),
signature_algs => signature_algs(),
alpn => app_level_protocol(),
srp => binary(),
next_protocol => app_level_protocol(),
max_frag_enum => 1..4,
ec_point_formats => [0..2],
elliptic_curves => [public_key:oid()],
-type connection_info() :: [common_info() | curve_info() | ssl_options_info() | security_info()].
-type common_info() :: {protocol, protocol_version()} |
{session_id, session_id()} |
{session_resumption, boolean()} |
{selected_cipher_suite, erl_cipher_suite()} |
{sni_hostname, term()} |
{srp_username, term()}.
-type curve_info() :: {ecc, {named_curve, term()}}.
-type ssl_options_info() :: tls_option().
-type security_info() :: {client_random, binary()} |
{server_random, binary()} |
{master_secret, binary()}.
-type connection_info_items() :: [connection_info_item()].
-type connection_info_item() :: protocol |
session_id |
session_resumption |
selected_cipher_suite |
sni_hostname |
srp_username |
ecc |
client_random |
server_random |
master_secret |
keylog |
tls_options_name().
-type tls_options_name() :: atom().
-spec start() -> ok | {error, reason()}.
start() ->
start(temporary).
-spec start(permanent | transient | temporary) -> ok | {error, reason()}.
start(Type) ->
case application:ensure_all_started(ssl, Type) of
{ok, _} ->
ok;
Other ->
Other
end.
-spec stop() -> ok.
stop() ->
application:stop(ssl).
-spec connect(TCPSocket, TLSOptions) ->
{ok, sslsocket()} |
{error, reason()} |
{option_not_a_key_value_tuple, any()} when
TCPSocket :: socket(),
TLSOptions :: [tls_client_option()].
connect(Socket, SslOptions) ->
connect(Socket, SslOptions, infinity).
-spec connect(TCPSocket, TLSOptions, Timeout) ->
{ok, sslsocket()} | {error, reason()} when
TCPSocket :: socket(),
TLSOptions :: [tls_client_option()],
Timeout :: timeout();
(Host, Port, TLSOptions) ->
{ok, sslsocket()} |
{ok, sslsocket(),Ext :: protocol_extensions()} |
{error, reason()} |
{option_not_a_key_value_tuple, any()} when
Host :: host(),
Port :: inet:port_number(),
TLSOptions :: [tls_client_option()].
connect(Socket, SslOptions0, Timeout) when is_list(SslOptions0) andalso
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
CbInfo = handle_option_cb_info(SslOptions0, tls),
Transport = element(1, CbInfo),
try handle_options(Transport, Socket, SslOptions0, client, undefined) of
{ok, Config} ->
tls_socket:upgrade(Socket, Config, Timeout)
catch
_:{error, Reason} ->
{error, Reason}
end;
connect(Host, Port, Options) ->
connect(Host, Port, Options, infinity).
-spec connect(Host, Port, TLSOptions, Timeout) ->
{ok, sslsocket()} |
{ok, sslsocket(),Ext :: protocol_extensions()} |
{error, reason()} |
{option_not_a_key_value_tuple, any()} when
Host :: host(),
Port :: inet:port_number(),
TLSOptions :: [tls_client_option()],
Timeout :: timeout().
connect(Host, Port, Options, Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
try
{ok, Config} = handle_options(Options, client, Host),
case Config#config.connection_cb of
tls_gen_connection ->
tls_socket:connect(Host,Port,Config,Timeout);
dtls_gen_connection ->
dtls_socket:connect(Host,Port,Config,Timeout)
end
catch
throw:Error ->
Error
end.
-spec listen(Port, Options) -> {ok, ListenSocket} | {error, reason()} when
Port::inet:port_number(),
Options::[tls_server_option()],
ListenSocket :: sslsocket().
listen(_Port, []) ->
{error, nooptions};
listen(Port, Options0) ->
try
{ok, Config} = handle_options(Options0, server),
do_listen(Port, Config, Config#config.connection_cb)
catch
Error = {error, _} ->
Error
end.
-spec transport_accept(ListenSocket) -> {ok, SslSocket} |
{error, reason()} when
ListenSocket :: sslsocket(),
SslSocket :: sslsocket().
transport_accept(ListenSocket) ->
transport_accept(ListenSocket, infinity).
-spec transport_accept(ListenSocket, Timeout) -> {ok, SslSocket} |
{error, reason()} when
ListenSocket :: sslsocket(),
Timeout :: timeout(),
SslSocket :: sslsocket().
transport_accept(#sslsocket{pid = {ListenSocket,
#config{connection_cb = ConnectionCb} = Config}}, Timeout)
when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
case ConnectionCb of
tls_gen_connection ->
tls_socket:accept(ListenSocket, Config, Timeout);
dtls_gen_connection ->
dtls_socket:accept(ListenSocket, Config, Timeout)
end.
Description : Performs accept on an ssl listen socket . performs
Performs the SSL / TLS / DTLS server - side handshake .
-spec handshake(HsSocket) -> {ok, SslSocket} | {ok, SslSocket, Ext} | {error, Reason} when
HsSocket :: sslsocket(),
SslSocket :: sslsocket(),
Ext :: protocol_extensions(),
Reason :: closed | timeout | error_alert().
handshake(ListenSocket) ->
handshake(ListenSocket, infinity).
-spec handshake(HsSocket, Timeout) -> {ok, SslSocket} | {ok, SslSocket, Ext} | {error, Reason} when
HsSocket :: sslsocket(),
Timeout :: timeout(),
SslSocket :: sslsocket(),
Ext :: protocol_extensions(),
Reason :: closed | timeout | error_alert();
(Socket, Options) -> {ok, SslSocket} | {ok, SslSocket, Ext} | {error, Reason} when
Socket :: socket() | sslsocket(),
SslSocket :: sslsocket(),
Options :: [server_option()],
Ext :: protocol_extensions(),
Reason :: closed | timeout | error_alert().
handshake(#sslsocket{} = Socket, Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or
(Timeout == infinity) ->
ssl_gen_statem:handshake(Socket, Timeout);
specified in ssl : listen/2 and then performs the SSL / TLS / DTLS handshake .
handshake(ListenSocket, SslOptions) ->
handshake(ListenSocket, SslOptions, infinity).
-spec handshake(Socket, Options, Timeout) ->
{ok, SslSocket} |
{ok, SslSocket, Ext} |
{error, Reason} when
Socket :: socket() | sslsocket(),
SslSocket :: sslsocket(),
Options :: [server_option()],
Timeout :: timeout(),
Ext :: protocol_extensions(),
Reason :: closed | timeout | {options, any()} | error_alert().
handshake(#sslsocket{} = Socket, [], Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or
(Timeout == infinity)->
handshake(Socket, Timeout);
handshake(#sslsocket{fd = {_, _, _, Trackers}} = Socket, SslOpts, Timeout) when
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
try
Tracker = proplists:get_value(option_tracker, Trackers),
{ok, EmOpts, _} = tls_socket:get_all_opts(Tracker),
ssl_gen_statem:handshake(Socket, {SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout)
catch
Error = {error, _Reason} -> Error
end;
handshake(#sslsocket{pid = [Pid|_], fd = {_, _, _}} = Socket, SslOpts, Timeout) when
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
try
{ok, EmOpts, _} = dtls_packet_demux:get_all_opts(Pid),
ssl_gen_statem:handshake(Socket, {SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout)
catch
Error = {error, _Reason} -> Error
end;
handshake(Socket, SslOptions, Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
CbInfo = handle_option_cb_info(SslOptions, tls),
Transport = element(1, CbInfo),
ConnetionCb = connection_cb(SslOptions),
try handle_options(Transport, Socket, SslOptions, server, undefined) of
{ok, #config{transport_info = CbInfo, ssl = SslOpts, emulated = EmOpts}} ->
ok = tls_socket:setopts(Transport, Socket, tls_socket:internal_inet_values()),
{ok, Port} = tls_socket:port(Transport, Socket),
{ok, SessionIdHandle} = tls_socket:session_id_tracker(ssl_unknown_listener, SslOpts),
ssl_gen_statem:handshake(ConnetionCb, Port, Socket,
{SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{}),
[{session_id_tracker, SessionIdHandle}]},
self(), CbInfo, Timeout)
catch
Error = {error, _Reason} -> Error
end.
-spec handshake_continue(HsSocket, Options) ->
{ok, SslSocket} | {error, Reason} when
HsSocket :: sslsocket(),
Options :: [tls_client_option() | tls_server_option()],
SslSocket :: sslsocket(),
Reason :: closed | timeout | error_alert().
handshake_continue(Socket, SSLOptions) ->
handshake_continue(Socket, SSLOptions, infinity).
-spec handshake_continue(HsSocket, Options, Timeout) ->
{ok, SslSocket} | {error, Reason} when
HsSocket :: sslsocket(),
Options :: [tls_client_option() | tls_server_option()],
Timeout :: timeout(),
SslSocket :: sslsocket(),
Reason :: closed | timeout | error_alert().
handshake_continue(Socket, SSLOptions, Timeout) ->
ssl_gen_statem:handshake_continue(Socket, SSLOptions, Timeout).
-spec handshake_cancel(#sslsocket{}) -> any().
handshake_cancel(Socket) ->
ssl_gen_statem:handshake_cancel(Socket).
-spec close(SslSocket) -> ok | {error, Reason} when
SslSocket :: sslsocket(),
Reason :: any().
close(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
ssl_gen_statem:close(Pid, {close, ?DEFAULT_TIMEOUT});
close(#sslsocket{pid = {dtls, #config{dtls_handler = {_, _}}}} = DTLSListen) ->
dtls_socket:close(DTLSListen);
close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_,_,_,_}}}}) ->
Transport:close(ListenSocket).
-spec close(SslSocket, How) -> ok | {ok, port()} | {ok, port(), Data} | {error,Reason} when
SslSocket :: sslsocket(),
How :: timeout() | {NewController::pid(), timeout()},
Data :: binary(),
Reason :: any().
close(#sslsocket{pid = [TLSPid|_]},
{Pid, Timeout} = DownGrade) when is_pid(TLSPid),
is_pid(Pid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
case ssl_gen_statem:close(TLSPid, {close, DownGrade}) of
{error, closed};
Other ->
Other
end;
close(#sslsocket{pid = [TLSPid|_]}, Timeout) when is_pid(TLSPid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
ssl_gen_statem:close(TLSPid, {close, Timeout});
close(#sslsocket{pid = {dtls = ListenSocket, #config{transport_info={Transport,_,_,_,_}}}}, _) ->
dtls_socket:close(Transport, ListenSocket);
close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_,_,_,_}}}}, _) ->
tls_socket:close(Transport, ListenSocket).
-spec send(SslSocket, Data) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
Data :: iodata().
send(#sslsocket{pid = [Pid]}, Data) when is_pid(Pid) ->
ssl_gen_statem:send(Pid, Data);
send(#sslsocket{pid = [_, Pid]}, Data) when is_pid(Pid) ->
tls_sender:send_data(Pid, erlang:iolist_to_iovec(Data));
send(#sslsocket{pid = {_, #config{transport_info={_, udp, _, _}}}}, _) ->
send(#sslsocket{pid = {dtls,_}}, _) ->
send(#sslsocket{pid = {ListenSocket, #config{transport_info = Info}}}, Data) ->
Transport = element(1, Info),
-spec recv(SslSocket, Length) -> {ok, Data} | {error, reason()} when
SslSocket :: sslsocket(),
Length :: integer(),
Data :: binary() | list() | HttpPacket,
HttpPacket :: any().
recv(Socket, Length) ->
recv(Socket, Length, infinity).
-spec recv(SslSocket, Length, Timeout) -> {ok, Data} | {error, reason()} when
SslSocket :: sslsocket(),
Length :: integer(),
Data :: binary() | list() | HttpPacket,
Timeout :: timeout(),
HttpPacket :: any().
recv(#sslsocket{pid = [Pid|_]}, Length, Timeout) when is_pid(Pid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
ssl_gen_statem:recv(Pid, Length, Timeout);
recv(#sslsocket{pid = {dtls,_}}, _, _) ->
{error,enotconn};
recv(#sslsocket{pid = {Listen,
#config{transport_info = Info}}},_,_) when is_port(Listen)->
Transport = element(1, Info),
-spec controlling_process(SslSocket, NewOwner) -> ok | {error, Reason} when
SslSocket :: sslsocket(),
NewOwner :: pid(),
Reason :: any().
controlling_process(#sslsocket{pid = [Pid|_]}, NewOwner) when is_pid(Pid), is_pid(NewOwner) ->
ssl_gen_statem:new_user(Pid, NewOwner);
controlling_process(#sslsocket{pid = {dtls, _}},
NewOwner) when is_pid(NewOwner) ->
controlling_process(#sslsocket{pid = {Listen,
#config{transport_info = {Transport,_,_,_,_}}}},
NewOwner) when is_port(Listen),
is_pid(NewOwner) ->
Transport:controlling_process(Listen, NewOwner).
-spec connection_information(SslSocket) -> {ok, Result} | {error, reason()} when
SslSocket :: sslsocket(),
Result :: connection_info().
connection_information(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
case ssl_gen_statem:connection_information(Pid, false) of
{ok, Info} ->
{ok, [Item || Item = {_Key, Value} <- Info, Value =/= undefined]};
Error ->
Error
end;
connection_information(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
{error, enotconn};
connection_information(#sslsocket{pid = {dtls,_}}) ->
{error,enotconn}.
-spec connection_information(SslSocket, Items) -> {ok, Result} | {error, reason()} when
SslSocket :: sslsocket(),
Items :: connection_info_items(),
Result :: connection_info().
connection_information(#sslsocket{pid = [Pid|_]}, Items) when is_pid(Pid) ->
case ssl_gen_statem:connection_information(Pid, include_security_info(Items)) of
{ok, Info} ->
{ok, [Item || Item = {Key, Value} <- Info, lists:member(Key, Items),
Value =/= undefined]};
Error ->
Error
end.
-spec peername(SslSocket) -> {ok, {Address, Port}} |
{error, reason()} when
SslSocket :: sslsocket(),
Address :: inet:ip_address(),
Port :: inet:port_number().
peername(#sslsocket{pid = [Pid|_], fd = {Transport, Socket,_}}) when is_pid(Pid)->
dtls_socket:peername(Transport, Socket);
peername(#sslsocket{pid = [Pid|_], fd = {Transport, Socket,_,_}}) when is_pid(Pid)->
tls_socket:peername(Transport, Socket);
peername(#sslsocket{pid = {dtls, #config{dtls_handler = {_Pid,_}}}}) ->
dtls_socket:peername(dtls, undefined);
peername(#sslsocket{pid = {ListenSocket, #config{transport_info = {Transport,_,_,_,_}}}}) ->
peername(#sslsocket{pid = {dtls,_}}) ->
{error,enotconn}.
-spec peercert(SslSocket) -> {ok, Cert} | {error, reason()} when
SslSocket :: sslsocket(),
Cert :: public_key:der_encoded().
peercert(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
case ssl_gen_statem:peer_certificate(Pid) of
{ok, undefined} ->
{error, no_peercert};
Result ->
Result
end;
peercert(#sslsocket{pid = {dtls, _}}) ->
{error, enotconn};
peercert(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
{error, enotconn}.
-spec negotiated_protocol(SslSocket) -> {ok, Protocol} | {error, Reason} when
SslSocket :: sslsocket(),
Protocol :: binary(),
Reason :: protocol_not_negotiated.
negotiated_protocol(#sslsocket{pid = [Pid|_]}) when is_pid(Pid) ->
ssl_gen_statem:negotiated_protocol(Pid).
-spec cipher_suites(Description, Version) -> ciphers() when
Description :: default | all | exclusive | anonymous | exclusive_anonymous,
Version :: protocol_version().
TLS / DTLS version
cipher_suites(Description, Version) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
cipher_suites(Description, tls_record:protocol_version(Version));
cipher_suites(Description, Version) when Version == 'dtlsv1.2';
Version == 'dtlsv1'->
cipher_suites(Description, dtls_record:protocol_version(Version));
cipher_suites(Description, Version) ->
[ssl_cipher_format:suite_bin_to_map(Suite) || Suite <- supported_suites(Description, Version)].
-spec cipher_suites(Description, Version, rfc | openssl) -> [string()] when
Description :: default | all | exclusive | anonymous,
Version :: protocol_version().
TLS / DTLS version
cipher_suites(Description, Version, StringType) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
cipher_suites(Description, tls_record:protocol_version(Version), StringType);
cipher_suites(Description, Version, StringType) when Version == 'dtlsv1.2';
Version == 'dtlsv1'->
cipher_suites(Description, dtls_record:protocol_version(Version), StringType);
cipher_suites(Description, Version, rfc) ->
[ssl_cipher_format:suite_map_to_str(ssl_cipher_format:suite_bin_to_map(Suite))
|| Suite <- supported_suites(Description, Version)];
cipher_suites(Description, Version, openssl) ->
[ssl_cipher_format:suite_map_to_openssl_str(ssl_cipher_format:suite_bin_to_map(Suite))
|| Suite <- supported_suites(Description, Version)].
-spec filter_cipher_suites(Suites, Filters) -> Ciphers when
Suites :: ciphers(),
Filters :: cipher_filters(),
Ciphers :: ciphers().
filter_cipher_suites(Suites, Filters0) ->
#{key_exchange_filters := KexF,
cipher_filters := CipherF,
mac_filters := MacF,
prf_filters := PrfF}
= ssl_cipher:crypto_support_filters(),
Filters = #{key_exchange_filters => add_filter(proplists:get_value(key_exchange, Filters0), KexF),
cipher_filters => add_filter(proplists:get_value(cipher, Filters0), CipherF),
mac_filters => add_filter(proplists:get_value(mac, Filters0), MacF),
prf_filters => add_filter(proplists:get_value(prf, Filters0), PrfF)},
ssl_cipher:filter_suites(Suites, Filters).
-spec prepend_cipher_suites(Preferred, Suites) -> ciphers() when
Preferred :: ciphers() | cipher_filters(),
Suites :: ciphers().
prepend_cipher_suites([First | _] = Preferred, Suites0) when is_map(First) ->
Suites = Preferred ++ (Suites0 -- Preferred),
Suites;
prepend_cipher_suites(Filters, Suites) ->
Preferred = filter_cipher_suites(Suites, Filters),
Preferred ++ (Suites -- Preferred).
-spec append_cipher_suites(Deferred, Suites) -> ciphers() when
Deferred :: ciphers() | cipher_filters(),
Suites :: ciphers().
append_cipher_suites([First | _] = Deferred, Suites0) when is_map(First)->
Suites = (Suites0 -- Deferred) ++ Deferred,
Suites;
append_cipher_suites(Filters, Suites) ->
Deferred = filter_cipher_suites(Suites, Filters),
(Suites -- Deferred) ++ Deferred.
-spec eccs() -> NamedCurves when
NamedCurves :: [named_curve()].
eccs() ->
eccs_filter_supported(Curves).
-spec eccs(Version) -> NamedCurves when
Version :: protocol_version(),
NamedCurves :: [named_curve()].
eccs('dtlsv1') ->
eccs('tlsv1.1');
eccs('dtlsv1.2') ->
eccs('tlsv1.2');
eccs(Version) when Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
Curves = tls_v1:ecc_curves(all),
eccs_filter_supported(Curves).
eccs_filter_supported(Curves) ->
CryptoCurves = crypto:ec_curves(),
lists:filter(fun(Curve) -> proplists:get_bool(Curve, CryptoCurves) end,
Curves).
-spec groups() -> [group()].
groups() ->
tls_v1:groups(4).
-spec groups(default) -> [group()].
groups(default) ->
tls_v1:default_groups(4).
-spec getopts(SslSocket, OptionNames) ->
{ok, [gen_tcp:option()]} | {error, reason()} when
SslSocket :: sslsocket(),
OptionNames :: [gen_tcp:option_name()].
getopts(#sslsocket{pid = [Pid|_]}, OptionTags) when is_pid(Pid), is_list(OptionTags) ->
ssl_gen_statem:get_opts(Pid, OptionTags);
getopts(#sslsocket{pid = {dtls, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket, OptionTags) when is_list(OptionTags) ->
try dtls_socket:getopts(Transport, ListenSocket, OptionTags) of
{ok, _} = Result ->
Result;
{error, InetError} ->
{error, {options, {socket_options, OptionTags, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, OptionTags, Error}}}
end;
getopts(#sslsocket{pid = {_, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket,
OptionTags) when is_list(OptionTags) ->
try tls_socket:getopts(Transport, ListenSocket, OptionTags) of
{ok, _} = Result ->
Result;
{error, InetError} ->
{error, {options, {socket_options, OptionTags, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, OptionTags, Error}}}
end;
getopts(#sslsocket{}, OptionTags) ->
{error, {options, {socket_options, OptionTags}}}.
-spec setopts(SslSocket, Options) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
Options :: [gen_tcp:option()].
setopts(#sslsocket{pid = [Pid, Sender]}, Options0) when is_pid(Pid), is_list(Options0) ->
try proplists:expand([{binary, [{mode, binary}]},
{list, [{mode, list}]}], Options0) of
Options ->
case proplists:get_value(packet, Options, undefined) of
undefined ->
ssl_gen_statem:set_opts(Pid, Options);
PacketOpt ->
case tls_sender:setopts(Sender, [{packet, PacketOpt}]) of
ok ->
ssl_gen_statem:set_opts(Pid, Options);
Error ->
Error
end
end
catch
_:_ ->
{error, {options, {not_a_proplist, Options0}}}
end;
setopts(#sslsocket{pid = [Pid|_]}, Options0) when is_pid(Pid), is_list(Options0) ->
try proplists:expand([{binary, [{mode, binary}]},
{list, [{mode, list}]}], Options0) of
Options ->
ssl_gen_statem:set_opts(Pid, Options)
catch
_:_ ->
{error, {options, {not_a_proplist, Options0}}}
end;
setopts(#sslsocket{pid = {dtls, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket, Options) when is_list(Options) ->
try dtls_socket:setopts(Transport, ListenSocket, Options) of
ok ->
ok;
{error, InetError} ->
{error, {options, {socket_options, Options, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, Options, Error}}}
end;
setopts(#sslsocket{pid = {_, #config{transport_info = {Transport,_,_,_,_}}}} = ListenSocket, Options) when is_list(Options) ->
try tls_socket:setopts(Transport, ListenSocket, Options) of
ok ->
ok;
{error, InetError} ->
{error, {options, {socket_options, Options, InetError}}}
catch
_:Error ->
{error, {options, {socket_options, Options, Error}}}
end;
setopts(#sslsocket{}, Options) ->
{error, {options,{not_a_proplist, Options}}}.
-spec getstat(SslSocket) ->
{ok, OptionValues} | {error, inet:posix()} when
SslSocket :: sslsocket(),
OptionValues :: [{inet:stat_option(), integer()}].
getstat(Socket) ->
getstat(Socket, inet:stats()).
-spec getstat(SslSocket, Options) ->
{ok, OptionValues} | {error, inet:posix()} when
SslSocket :: sslsocket(),
Options :: [inet:stat_option()],
OptionValues :: [{inet:stat_option(), integer()}].
Description : Get one or more statistic options for a socket .
getstat(#sslsocket{pid = {dtls, #config{transport_info = {Transport, _, _, _, _},
dtls_handler = {Listener, _}}}},
Options) when is_list(Options) ->
dtls_socket:getstat(Transport, Listener, Options);
getstat(#sslsocket{pid = {Listen, #config{transport_info = {Transport, _, _, _, _}}}},
Options) when is_port(Listen), is_list(Options) ->
tls_socket:getstat(Transport, Listen, Options);
getstat(#sslsocket{pid = [Pid|_], fd = {Transport, Socket, _, _}},
Options) when is_pid(Pid), is_list(Options) ->
tls_socket:getstat(Transport, Socket, Options);
getstat(#sslsocket{pid = [Pid|_], fd = {Transport, Socket, _}},
Options) when is_pid(Pid), is_list(Options) ->
dtls_socket:getstat(Transport, Socket, Options).
-spec shutdown(SslSocket, How) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
How :: read | write | read_write.
shutdown(#sslsocket{pid = {Listen, #config{transport_info = Info}}},
How) when is_port(Listen) ->
Transport = element(1, Info),
Transport:shutdown(Listen, How);
shutdown(#sslsocket{pid = {dtls,_}},_) ->
{error, enotconn};
shutdown(#sslsocket{pid = [Pid|_]}, How) when is_pid(Pid) ->
ssl_gen_statem:shutdown(Pid, How).
-spec sockname(SslSocket) ->
{ok, {Address, Port}} | {error, reason()} when
SslSocket :: sslsocket(),
Address :: inet:ip_address(),
Port :: inet:port_number().
sockname(#sslsocket{pid = {Listen, #config{transport_info = {Transport,_,_,_,_}}}}) when is_port(Listen) ->
tls_socket:sockname(Transport, Listen);
sockname(#sslsocket{pid = {dtls, #config{dtls_handler = {Pid, _}}}}) ->
dtls_packet_demux:sockname(Pid);
sockname(#sslsocket{pid = [Pid|_], fd = {Transport, Socket,_}}) when is_pid(Pid) ->
dtls_socket:sockname(Transport, Socket);
sockname(#sslsocket{pid = [Pid| _], fd = {Transport, Socket,_,_}}) when is_pid(Pid) ->
tls_socket:sockname(Transport, Socket).
-spec versions() -> [VersionInfo] when
VersionInfo :: {ssl_app, string()} |
{supported | available | implemented, [tls_version()]} |
{supported_dtls | available_dtls | implemented_dtls, [dtls_version()]}.
versions() ->
ConfTLSVsns = tls_record:supported_protocol_versions(),
ConfDTLSVsns = dtls_record:supported_protocol_versions(),
ImplementedTLSVsns = ?ALL_AVAILABLE_VERSIONS,
ImplementedDTLSVsns = ?ALL_AVAILABLE_DATAGRAM_VERSIONS,
TLSCryptoSupported = fun(Vsn) ->
tls_record:sufficient_crypto_support(Vsn)
end,
DTLSCryptoSupported = fun(Vsn) ->
tls_record:sufficient_crypto_support(dtls_v1:corresponding_tls_version(Vsn))
end,
SupportedTLSVsns = [tls_record:protocol_version(Vsn) || Vsn <- ConfTLSVsns, TLSCryptoSupported(Vsn)],
SupportedDTLSVsns = [dtls_record:protocol_version(Vsn) || Vsn <- ConfDTLSVsns, DTLSCryptoSupported(Vsn)],
AvailableTLSVsns = [Vsn || Vsn <- ImplementedTLSVsns, TLSCryptoSupported(tls_record:protocol_version(Vsn))],
AvailableDTLSVsns = [Vsn || Vsn <- ImplementedDTLSVsns, DTLSCryptoSupported(dtls_record:protocol_version(Vsn))],
[{ssl_app, ?VSN},
{supported, SupportedTLSVsns},
{supported_dtls, SupportedDTLSVsns},
{available, AvailableTLSVsns},
{available_dtls, AvailableDTLSVsns},
{implemented, ImplementedTLSVsns},
{implemented_dtls, ImplementedDTLSVsns}
].
-spec renegotiate(SslSocket) -> ok | {error, reason()} when
SslSocket :: sslsocket().
renegotiate(#sslsocket{pid = [Pid, Sender |_]}) when is_pid(Pid),
is_pid(Sender) ->
case tls_sender:renegotiate(Sender) of
{ok, Write} ->
tls_dtls_connection:renegotiation(Pid, Write);
Error ->
Error
end;
renegotiate(#sslsocket{pid = [Pid |_]}) when is_pid(Pid) ->
tls_dtls_connection:renegotiation(Pid);
renegotiate(#sslsocket{pid = {dtls,_}}) ->
{error, enotconn};
renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
{error, enotconn}.
-spec update_keys(SslSocket, Type) -> ok | {error, reason()} when
SslSocket :: sslsocket(),
Type :: write | read_write.
update_keys(#sslsocket{pid = [Pid, Sender |_]}, Type0) when is_pid(Pid) andalso
is_pid(Sender) andalso
(Type0 =:= write orelse
Type0 =:= read_write) ->
Type = case Type0 of
write ->
update_not_requested;
read_write ->
update_requested
end,
tls_connection_1_3:send_key_update(Sender, Type);
update_keys(_, Type) ->
{error, {illegal_parameter, Type}}.
-spec prf(SslSocket, Secret, Label, Seed, WantedLength) ->
{ok, binary()} | {error, reason()} when
SslSocket :: sslsocket(),
Secret :: binary() | 'master_secret',
Label::binary(),
Seed :: [binary() | prf_random()],
WantedLength :: non_neg_integer().
Description : use a ssl sessions TLS PRF to generate key material
prf(#sslsocket{pid = [Pid|_]},
Secret, Label, Seed, WantedLength) when is_pid(Pid) ->
tls_dtls_connection:prf(Pid, Secret, Label, Seed, WantedLength);
prf(#sslsocket{pid = {dtls,_}}, _,_,_,_) ->
{error, enotconn};
prf(#sslsocket{pid = {Listen,_}}, _,_,_,_) when is_port(Listen) ->
{error, enotconn}.
-spec clear_pem_cache() -> ok.
clear_pem_cache() ->
ssl_pem_cache:clear().
-spec format_error({error, Reason}) -> string() when
Reason :: any().
format_error({error, Reason}) ->
format_error(Reason);
format_error(Reason) when is_list(Reason) ->
Reason;
format_error(closed) ->
"TLS connection is closed";
format_error({tls_alert, {_, Description}}) ->
Description;
format_error({options,{FileType, File, Reason}}) when FileType == cacertfile;
FileType == certfile;
FileType == keyfile;
FileType == dhfile ->
Error = file_error_format(Reason),
file_desc(FileType) ++ File ++ ": " ++ Error;
format_error({options, {socket_options, Option, Error}}) ->
lists:flatten(io_lib:format("Invalid transport socket option ~p: ~s", [Option, format_error(Error)]));
format_error({options, {socket_options, Option}}) ->
lists:flatten(io_lib:format("Invalid socket option: ~p", [Option]));
format_error({options, Options}) ->
lists:flatten(io_lib:format("Invalid TLS option: ~p", [Options]));
format_error(Error) ->
case inet:format_error(Error) of
"unknown POSIX" ++ _ ->
unexpected_format(Error);
Other ->
Other
end.
tls_version({3, _} = Version) ->
Version;
tls_version({254, _} = Version) ->
dtls_v1:corresponding_tls_version(Version).
-spec suite_to_str(CipherSuite) -> string() when
CipherSuite :: erl_cipher_suite();
(CipherSuite) -> string() when
CipherSuite :: #{key_exchange := null,
cipher := null,
mac := null,
prf := null}.
suite_to_str(Cipher) ->
ssl_cipher_format:suite_map_to_str(Cipher).
-spec suite_to_openssl_str(CipherSuite) -> string() when
CipherSuite :: erl_cipher_suite().
suite_to_openssl_str(Cipher) ->
ssl_cipher_format:suite_map_to_openssl_str(Cipher).
-spec str_to_suite(CipherSuiteName) -> erl_cipher_suite() | {error, {not_recognized, CipherSuiteName}} when
CipherSuiteName :: string().
str_to_suite(CipherSuiteName) ->
try
Note in TLS-1.3 OpenSSL conforms to RFC names
so if CipherSuiteName starts with TLS this
function will call : suite_str_to_map
so both RFC names and legacy OpenSSL names of supported
ssl_cipher_format:suite_openssl_str_to_map(CipherSuiteName)
catch
_:_ ->
{error, {not_recognized, CipherSuiteName}}
end.
Internal functions
supported_suites(exclusive, {3,Minor}) ->
tls_v1:exclusive_suites(Minor);
supported_suites(exclusive, {254, Minor}) ->
dtls_v1:exclusive_suites(Minor);
supported_suites(default, Version) ->
ssl_cipher:suites(Version);
supported_suites(all, Version) ->
ssl_cipher:all_suites(Version);
supported_suites(anonymous, Version) ->
ssl_cipher:anonymous_suites(Version);
supported_suites(exclusive_anonymous, {3, Minor}) ->
tls_v1:exclusive_anonymous_suites(Minor);
supported_suites(exclusive_anonymous, {254, Minor}) ->
dtls_v1:exclusive_anonymous_suites(Minor).
do_listen(Port, #config{transport_info = {Transport, _, _, _,_}} = Config, tls_gen_connection) ->
tls_socket:listen(Transport, Port, Config);
do_listen(Port, Config, dtls_gen_connection) ->
dtls_socket:listen(Port, Config).
-spec handle_options([any()], client | server) -> {ok, #config{}};
([any()], ssl_options()) -> ssl_options().
handle_options(Opts, Role) ->
handle_options(undefined, undefined, Opts, Role, undefined).
handle_options(Opts, Role, InheritedSslOpts) ->
handle_options(undefined, undefined, Opts, Role, InheritedSslOpts).
handle_options(_, _, Opts0, Role, InheritedSslOpts) when is_map(InheritedSslOpts) ->
{SslOpts, _} = expand_options(Opts0, ?RULES),
process_options(SslOpts, InheritedSslOpts, #{role => Role,
rules => ?RULES});
handle_options(Transport, Socket, Opts0, Role, Host) ->
{SslOpts0, SockOpts0} = expand_options(Opts0, ?RULES),
SslOpts1 = add_missing_options(SslOpts0, ?RULES),
SslOpts2 = #{protocol := Protocol}
= process_options(SslOpts1,
#{},
#{role => Role,
host => Host,
rules => ?RULES}),
maybe_client_warn_no_verify(SslOpts2, Role),
SslOpts = maps:without([warn_verify_none], SslOpts2),
{Sock, Emulated} = emulated_options(Transport, Socket, Protocol, SockOpts0),
ConnetionCb = connection_cb(Protocol),
CbInfo = handle_option_cb_info(Opts0, Protocol),
{ok, #config{
ssl = SslOpts,
emulated = Emulated,
inet_ssl = Sock,
inet_user = Sock,
transport_info = CbInfo,
connection_cb = ConnetionCb
}}.
process_options(SSLOptions , , Env ) where
SSLOptions is the following tuple :
{ InOptions , SkippedOptions , Counter }
process_options({[], [], _}, OptionsMap, _Env) ->
OptionsMap;
process_options({[], [_|_] = Skipped, Counter}, OptionsMap, Env)
when length(Skipped) < Counter ->
process_options({Skipped, [], length(Skipped)}, OptionsMap, Env);
process_options({[], [_|_], _Counter}, _OptionsMap, _Env) ->
throw({error, faulty_configuration});
process_options({[{K0,V} = E|T], S, Counter}, OptionsMap0, Env) ->
K = maybe_map_key_internal(K0),
case check_dependencies(K, OptionsMap0, Env) of
true ->
OptionsMap = handle_option(K, V, OptionsMap0, Env),
process_options({T, S, Counter}, OptionsMap, Env);
false ->
process_options({T, [E|S], Counter}, OptionsMap0, Env)
end.
handle_option(anti_replay = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(anti_replay = Option, Value0,
#{session_tickets := SessionTickets,
versions := Versions} = OptionsMap, #{rules := Rules}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_option_dependency(Option, session_tickets, [SessionTickets], [stateless]),
case SessionTickets of
stateless ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
_ ->
OptionsMap#{Option => default_value(Option, Rules)}
end;
handle_option(beast_mitigation = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(beast_mitigation = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(cacertfile = Option, unbound, #{cacerts := CaCerts,
verify := Verify,
verify_fun := VerifyFun} = OptionsMap, _Env)
when Verify =:= verify_none orelse
Verify =:= 0 ->
Value = validate_option(Option, ca_cert_default(verify_none, VerifyFun, CaCerts)),
OptionsMap#{Option => Value};
handle_option(cacertfile = Option, unbound, #{cacerts := CaCerts,
verify := Verify,
verify_fun := VerifyFun} = OptionsMap, _Env)
when Verify =:= verify_peer orelse
Verify =:= 1 orelse
Verify =:= 2 ->
Value = validate_option(Option, ca_cert_default(verify_peer, VerifyFun, CaCerts)),
OptionsMap#{Option => Value};
handle_option(cacertfile = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(ciphers = Option, unbound, #{versions := Versions} = OptionsMap, #{rules := Rules}) ->
Value = handle_cipher_option(default_value(Option, Rules), Versions),
OptionsMap#{Option => Value};
handle_option(ciphers = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
Value = handle_cipher_option(Value0, Versions),
OptionsMap#{Option => Value};
handle_option(client_renegotiation = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, true, Role),
OptionsMap#{Option => Value};
handle_option(client_renegotiation = Option, Value0,
#{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_role(server_only, Role, Option, Value0),
assert_option_dependency(Option, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(early_data = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(early_data = Option, Value0, #{session_tickets := SessionTickets,
versions := Versions} = OptionsMap,
#{role := server = Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_option_dependency(Option, session_tickets, [SessionTickets],
[stateful, stateless]),
Value = validate_option(Option, Value0, Role),
OptionsMap#{Option => Value};
handle_option(early_data = Option, Value0, #{session_tickets := SessionTickets,
use_ticket := UseTicket,
versions := Versions} = OptionsMap,
#{role := client = Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_option_dependency(Option, session_tickets, [SessionTickets],
[manual, auto]),
case UseTicket of
undefined when SessionTickets =/= auto ->
throw({error, {options, dependency, {Option, use_ticket}}});
_ ->
ok
end,
Value = validate_option(Option, Value0, Role),
OptionsMap#{Option => Value};
handle_option(eccs = Option, unbound, #{versions := [HighestVersion|_]} = OptionsMap, #{rules := _Rules}) ->
Value = handle_eccs_option(eccs(), HighestVersion),
OptionsMap#{Option => Value};
handle_option(eccs = Option, Value0, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
Value = handle_eccs_option(Value0, HighestVersion),
OptionsMap#{Option => Value};
handle_option(fallback = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(client, false, Role),
OptionsMap#{Option => Value};
handle_option(fallback = Option, Value0, OptionsMap, #{role := Role}) ->
assert_role(client_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(certificate_authorities = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(client, false, Role),
OptionsMap#{Option => Value};
handle_option(certificate_authorities = Option, Value0, #{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_role(client_only, Role, Option, Value0),
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(cookie = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, true, Role),
OptionsMap#{Option => Value};
handle_option(cookie = Option, Value0, #{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
assert_role(server_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(honor_cipher_order = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, false, Role),
OptionsMap#{Option => Value};
handle_option(honor_cipher_order = Option, Value0, OptionsMap, #{role := Role}) ->
assert_role(server_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(honor_ecc_order = Option, unbound, OptionsMap, #{role := Role}) ->
Value = default_option_role(server, false, Role),
OptionsMap#{Option => Value};
handle_option(honor_ecc_order = Option, Value0, OptionsMap, #{role := Role}) ->
assert_role(server_only, Role, Option, Value0),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(keyfile = Option, unbound, #{certfile := CertFile} = OptionsMap, _Env) ->
Value = validate_option(Option, CertFile),
OptionsMap#{Option => Value};
handle_option(key_update_at = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(key_update_at = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(next_protocols_advertised = Option, unbound, OptionsMap,
#{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(next_protocols_advertised = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(next_protocols_advertised, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(next_protocol_selector = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = default_value(Option, Rules),
OptionsMap#{Option => Value};
handle_option(next_protocol_selector = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(client_preferred_next_protocols, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = make_next_protocol_selector(
validate_option(client_preferred_next_protocols, Value0)),
OptionsMap#{Option => Value};
handle_option(padding_check = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(padding_check = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(password = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{password => Value};
handle_option(password = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{password => Value};
handle_option(psk_identity = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(psk_identity = Option, Value0, #{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(secure_renegotiate = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(secure_renegotiate= Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(secure_renegotiate, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(reuse_session = Option, unbound, OptionsMap, #{role := Role}) ->
Value =
case Role of
client ->
undefined;
server ->
fun(_, _, _, _) -> true end
end,
OptionsMap#{Option => Value};
handle_option(reuse_session = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(reuse_session, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(reuse_sessions = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(reuse_sessions = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(reuse_sessions, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(server_name_indication = Option, unbound, OptionsMap, #{host := Host,
role := Role}) ->
Value = default_option_role(client, server_name_indication_default(Host), Role),
OptionsMap#{Option => Value};
handle_option(server_name_indication = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(session_tickets = Option, unbound, OptionsMap, #{role := Role,
rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules), Role),
OptionsMap#{Option => Value};
handle_option(session_tickets = Option, Value0, #{versions := Versions} = OptionsMap, #{role := Role}) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0, Role),
OptionsMap#{Option => Value};
handle_option(signature_algs = Option, unbound, #{versions := [HighestVersion | _] = Versions} = OptionsMap, #{role := Role}) ->
Value =
handle_hashsigns_option(
default_option_role_sign_algs(
server,
tls_v1:default_signature_algs(Versions),
Role,
HighestVersion),
tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(signature_algs = Option, Value0, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
Value = handle_hashsigns_option(Value0, tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(signature_algs_cert = Option, unbound, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
Value = handle_signature_algorithms_option(undefined, tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(signature_algs_cert = Option, Value0, #{versions := [HighestVersion|_]} = OptionsMap, _Env) ->
Value = handle_signature_algorithms_option(Value0, tls_version(HighestVersion)),
OptionsMap#{Option => Value};
handle_option(sni_fun = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = default_value(Option, Rules),
OptionsMap#{Option => Value};
handle_option(sni_fun = Option, Value0, OptionsMap, _Env) ->
validate_option(Option, Value0),
OptHosts = maps:get(sni_hosts, OptionsMap, undefined),
Value =
case {Value0, OptHosts} of
{undefined, _} ->
Value0;
{_, []} ->
Value0;
_ ->
throw({error, {conflict_options, [sni_fun, sni_hosts]}})
end,
OptionsMap#{Option => Value};
handle_option(srp_identity = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(srp_identity = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(srp_identity, versions, Versions,
['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(supported_groups = Option, unbound, #{versions := [HighestVersion|_]} = OptionsMap, #{rules := _Rules}) ->
Value = handle_supported_groups_option(groups(default), HighestVersion),
OptionsMap#{Option => Value};
handle_option(supported_groups = Option, Value0,
#{versions := [HighestVersion|_] = Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = handle_supported_groups_option(Value0, HighestVersion),
OptionsMap#{Option => Value};
handle_option(use_ticket = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(use_ticket = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1.3']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(user_lookup_fun = Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(user_lookup_fun = Option, Value0,
#{versions := Versions} = OptionsMap, _Env) ->
assert_option_dependency(Option, versions, Versions, ['tlsv1','tlsv1.1','tlsv1.2']),
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(verify = Option, unbound, OptionsMap, #{rules := Rules}) ->
handle_verify_option(default_value(Option, Rules), OptionsMap#{warn_verify_none => true});
handle_option(verify = _Option, Value, OptionsMap, _Env) ->
handle_verify_option(Value, OptionsMap);
handle_option(verify_fun = Option, unbound, #{verify := Verify} = OptionsMap, #{rules := Rules})
when Verify =:= verify_none ->
OptionsMap#{Option => default_value(Option, Rules)};
handle_option(verify_fun = Option, unbound, #{verify := Verify} = OptionsMap, _Env)
when Verify =:= verify_peer ->
OptionsMap#{Option => undefined};
handle_option(verify_fun = Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value};
handle_option(versions = Option, unbound, #{protocol := Protocol} = OptionsMap, _Env) ->
RecordCb = record_cb(Protocol),
Vsns0 = RecordCb:supported_protocol_versions(),
Value = lists:sort(fun RecordCb:is_higher/2, Vsns0),
OptionsMap#{Option => Value};
handle_option(versions = Option, Vsns0, #{protocol := Protocol} = OptionsMap, _Env) ->
validate_option(versions, Vsns0),
RecordCb = record_cb(Protocol),
Vsns1 = [RecordCb:protocol_version(Vsn) || Vsn <- Vsns0],
Value = lists:sort(fun RecordCb:is_higher/2, Vsns1),
OptionsMap#{Option => Value};
handle_option(cb_info = Option, unbound, #{protocol := Protocol} = OptionsMap, _Env) ->
Default = default_cb_info(Protocol),
validate_option(Option, Default),
Value = handle_cb_info(Default),
OptionsMap#{Option => Value};
handle_option(cb_info = Option, Value0, OptionsMap, _Env) ->
validate_option(Option, Value0),
Value = handle_cb_info(Value0),
OptionsMap#{Option => Value};
Generic case
handle_option(Option, unbound, OptionsMap, #{rules := Rules}) ->
Value = validate_option(Option, default_value(Option, Rules)),
OptionsMap#{Option => Value};
handle_option(Option, Value0, OptionsMap, _Env) ->
Value = validate_option(Option, Value0),
OptionsMap#{Option => Value}.
handle_option_cb_info(Options, Protocol) ->
Value = proplists:get_value(cb_info, Options, default_cb_info(Protocol)),
#{cb_info := CbInfo} = handle_option(cb_info, Value, #{protocol => Protocol}, #{}),
CbInfo.
maybe_map_key_internal(client_preferred_next_protocols) ->
next_protocol_selector;
maybe_map_key_internal(K) ->
K.
maybe_map_key_external(next_protocol_selector) ->
client_preferred_next_protocols;
maybe_map_key_external(K) ->
K.
check_dependencies(K, OptionsMap, Env) ->
Rules = maps:get(rules, Env),
Deps = get_dependencies(K, Rules),
case Deps of
[] ->
true;
L ->
option_already_defined(K,OptionsMap) orelse
dependecies_already_defined(L, OptionsMap)
end.
get_dependencies(K, _) when K =:= cb_info orelse K =:= log_alert->
[];
get_dependencies(K, Rules) ->
{_, Deps} = maps:get(K, Rules),
Deps.
option_already_defined(K, Map) ->
maps:get(K, Map, unbound) =/= unbound.
dependecies_already_defined(L, OptionsMap) ->
Fun = fun (E) -> option_already_defined(E, OptionsMap) end,
lists:all(Fun, L).
expand_options(Opts0, Rules) ->
Opts1 = proplists:expand([{binary, [{mode, binary}]},
{list, [{mode, list}]}], Opts0),
Opts2 = handle_option_format(Opts1, []),
Opts = proplists:delete(ssl_imp, Opts2),
AllOpts = maps:keys(Rules),
SockOpts = lists:foldl(fun(Key, PropList) -> proplists:delete(Key, PropList) end,
Opts,
AllOpts ++
cb_info,
obsoleted by log_level
SslOpts0 = Opts -- SockOpts,
SslOpts = {SslOpts0, [], length(SslOpts0)},
{SslOpts, SockOpts}.
add_missing_options({L0, S, _C}, Rules) ->
Fun = fun(K0, Acc) ->
K = maybe_map_key_external(K0),
case proplists:is_defined(K, Acc) of
true ->
Acc;
false ->
Default = unbound,
[{K, Default}|Acc]
end
end,
AllOpts = maps:keys(Rules),
L = lists:foldl(Fun, L0, AllOpts),
{L, S, length(L)}.
default_value(Key, Rules) ->
{Default, _} = maps:get(Key, Rules, {undefined, []}),
Default.
assert_role(client_only, client, _, _) ->
ok;
assert_role(server_only, server, _, _) ->
ok;
assert_role(client_only, _, _, undefined) ->
ok;
assert_role(server_only, _, _, undefined) ->
ok;
assert_role(Type, _, Key, _) ->
throw({error, {option, Type, Key}}).
assert_option_dependency(Option, OptionDep, Values0, AllowedValues) ->
case is_dtls_configured(Values0) of
true ->
TODO : Check option dependency for DTLS
ok;
false ->
Values =
case OptionDep of
versions ->
lists:map(fun tls_record:protocol_version/1, Values0);
_ ->
Values0
end,
Set1 = sets:from_list(Values),
Set2 = sets:from_list(AllowedValues),
case sets:size(sets:intersection(Set1, Set2)) > 0 of
true ->
ok;
false ->
throw({error, {options, dependency,
{Option, {OptionDep, AllowedValues}}}})
end
end.
is_dtls_configured(Versions) ->
Fun = fun (Version) when Version =:= {254, 253} orelse
Version =:= {254, 255} ->
true;
(_) ->
false
end,
lists:any(Fun, Versions).
validate_option(Option, Value) ->
validate_option(Option, Value, undefined).
validate_option(Opt, Value, _)
when Opt =:= alpn_advertised_protocols orelse
Opt =:= alpn_preferred_protocols,
is_list(Value) ->
validate_binary_list(Opt, Value),
Value;
validate_option(Opt, Value, _)
when Opt =:= alpn_advertised_protocols orelse
Opt =:= alpn_preferred_protocols,
Value =:= undefined ->
undefined;
validate_option(anti_replay, '10k', _) ->
n = 10000
p = 0.030003564 ( 1 in 33 )
m = 72985 ( 8.91KiB )
k = 5
{10, 5, 72985};
validate_option(anti_replay, '100k', _) ->
n = 100000
p = 0.03000428 ( 1 in 33 )
m = 729845 ( 89.09KiB )
k = 5
{10, 5, 729845};
validate_option(anti_replay, Value, _)
when (is_tuple(Value) andalso
tuple_size(Value) =:= 3) ->
Value;
validate_option(beast_mitigation, Value, _)
when Value == one_n_minus_one orelse
Value == zero_n orelse
Value == disabled ->
Value;
validate_option(cacertfile, undefined, _) ->
<<>>;
validate_option(cacertfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(cacertfile, Value, _)
when is_list(Value), Value =/= ""->
binary_filename(Value);
validate_option(cacerts, Value, _)
when Value == undefined;
is_list(Value) ->
Value;
validate_option(cb_info, {V1, V2, V3, V4} = Value, _)
when is_atom(V1),
is_atom(V2),
is_atom(V3),
is_atom(V4) ->
Value;
validate_option(cb_info, {V1, V2, V3, V4, V5} = Value, _)
when is_atom(V1),
is_atom(V2),
is_atom(V3),
is_atom(V4),
is_atom(V5) ->
Value;
validate_option(cert, Value, _) when Value == undefined;
is_list(Value)->
Value;
validate_option(cert, Value, _) when Value == undefined;
is_binary(Value)->
[Value];
validate_option(certificate_authorities, Value, _) when is_boolean(Value)->
Value;
validate_option(certfile, undefined = Value, _) ->
Value;
validate_option(certfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(certfile, Value, _)
when is_list(Value) ->
binary_filename(Value);
validate_option(client_preferred_next_protocols, {Precedence, PreferredProtocols}, _)
when is_list(PreferredProtocols) ->
validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
validate_npn_ordering(Precedence),
{Precedence, PreferredProtocols, ?NO_PROTOCOL};
validate_option(client_preferred_next_protocols,
{Precedence, PreferredProtocols, Default} = Value, _)
when is_list(PreferredProtocols), is_binary(Default),
byte_size(Default) > 0, byte_size(Default) < 256 ->
validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
validate_npn_ordering(Precedence),
Value;
validate_option(client_preferred_next_protocols, undefined, _) ->
undefined;
validate_option(client_renegotiation, Value, _)
when is_boolean(Value) ->
Value;
validate_option(cookie, Value, _)
when is_boolean(Value) ->
Value;
validate_option(crl_cache, {Cb, {_Handle, Options}} = Value, _)
when is_atom(Cb) and is_list(Options) ->
Value;
validate_option(crl_check, Value, _)
when is_boolean(Value) ->
Value;
validate_option(crl_check, Value, _)
when (Value == best_effort) or
(Value == peer) ->
Value;
validate_option(customize_hostname_check, Value, _)
when is_list(Value) ->
Value;
validate_option(depth, Value, _)
when is_integer(Value),
Value >= 0, Value =< 255->
Value;
validate_option(dh, Value, _)
when Value == undefined;
is_binary(Value) ->
Value;
validate_option(dhfile, undefined = Value, _) ->
Value;
validate_option(dhfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(dhfile, Value, _)
when is_list(Value), Value =/= "" ->
binary_filename(Value);
validate_option(early_data, Value, server)
when Value =:= disabled orelse
Value =:= enabled ->
Value;
validate_option(early_data = Option, Value, server) ->
throw({error,
{options, role, {Option, {Value, {server, [disabled, enabled]}}}}});
validate_option(early_data, Value, client)
when is_binary(Value) ->
Value;
validate_option(early_data = Option, Value, client) ->
throw({error,
{options, type, {Option, {Value, not_binary}}}});
validate_option(erl_dist, Value, _)
when is_boolean(Value) ->
Value;
validate_option(fail_if_no_peer_cert, Value, _)
when is_boolean(Value) ->
Value;
validate_option(fallback, Value, _)
when is_boolean(Value) ->
Value;
validate_option(handshake, hello = Value, _) ->
Value;
validate_option(handshake, full = Value, _) ->
Value;
infinity;
validate_option(hibernate_after, infinity, _) ->
infinity;
validate_option(hibernate_after, Value, _)
when is_integer(Value), Value >= 0 ->
Value;
validate_option(honor_cipher_order, Value, _)
when is_boolean(Value) ->
Value;
validate_option(honor_ecc_order, Value, _)
when is_boolean(Value) ->
Value;
validate_option(keep_secrets, Value, _) when is_boolean(Value) ->
Value;
validate_option(key, undefined, _) ->
undefined;
validate_option(key, {KeyType, Value}, _)
when is_binary(Value),
KeyType == 'RSAPrivateKey';
KeyType == 'DSAPrivateKey';
KeyType == 'ECPrivateKey';
KeyType == 'PrivateKeyInfo' ->
{KeyType, Value};
validate_option(key, #{algorithm := _} = Value, _) ->
Value;
validate_option(keyfile, undefined, _) ->
<<>>;
validate_option(keyfile, Value, _)
when is_binary(Value) ->
Value;
validate_option(keyfile, Value, _)
when is_list(Value), Value =/= "" ->
binary_filename(Value);
validate_option(key_update_at, Value, _)
when is_integer(Value) andalso
Value > 0 ->
Value;
validate_option(log_level, Value, _) when
is_atom(Value) andalso
(Value =:= none orelse
Value =:= all orelse
Value =:= emergency orelse
Value =:= alert orelse
Value =:= critical orelse
Value =:= error orelse
Value =:= warning orelse
Value =:= notice orelse
Value =:= info orelse
Value =:= debug) ->
Value;
validate_option(max_fragment_length, I, _)
when I == ?MAX_FRAGMENT_LENGTH_BYTES_1;
I == ?MAX_FRAGMENT_LENGTH_BYTES_2;
I == ?MAX_FRAGMENT_LENGTH_BYTES_3;
I == ?MAX_FRAGMENT_LENGTH_BYTES_4 ->
I;
validate_option(max_fragment_length, undefined, _) ->
undefined;
validate_option(max_handshake_size, Value, _)
when is_integer(Value) andalso
Value =< ?MAX_UNIT24 ->
Value;
validate_option(middlebox_comp_mode, Value, _)
when is_boolean(Value) ->
Value;
validate_option(next_protocols_advertised, Value, _) when is_list(Value) ->
validate_binary_list(next_protocols_advertised, Value),
Value;
validate_option(next_protocols_advertised, undefined, _) ->
undefined;
validate_option(ocsp_nonce, Value, _)
when Value =:= true orelse
Value =:= false ->
Value;
validate_option(ocsp_responder_certs, Value, _)
when is_list(Value) ->
[public_key:pkix_decode_cert(CertDer, plain) || CertDer <- Value,
is_binary(CertDer)];
validate_option(ocsp_stapling, Value, _)
when Value =:= true orelse
Value =:= false ->
Value;
validate_option(padding_check, Value, _)
when is_boolean(Value) ->
Value;
validate_option(partial_chain, Value, _)
when is_function(Value) ->
Value;
validate_option(password, Value, _)
when is_list(Value) ->
Value;
validate_option(password, Value, _)
when is_function(Value, 0) ->
Value;
validate_option(protocol, Value = tls, _) ->
Value;
validate_option(protocol, Value = dtls, _) ->
Value;
validate_option(psk_identity, undefined, _) ->
undefined;
validate_option(psk_identity, Identity, _)
when is_list(Identity), Identity =/= "", length(Identity) =< 65535 ->
binary_filename(Identity);
validate_option(renegotiate_at, Value, _) when is_integer(Value) ->
erlang:min(Value, ?DEFAULT_RENEGOTIATE_AT);
validate_option(reuse_session, undefined, _) ->
undefined;
validate_option(reuse_session, Value, _)
when is_function(Value) ->
Value;
validate_option(reuse_session, Value, _)
when is_binary(Value) ->
Value;
validate_option(reuse_session, {Id, Data} = Value, _)
when is_binary(Id) andalso
is_binary(Data) ->
Value;
validate_option(reuse_sessions, Value, _)
when is_boolean(Value) ->
Value;
validate_option(reuse_sessions, save = Value, _) ->
Value;
validate_option(secure_renegotiate, Value, _)
when is_boolean(Value) ->
Value;
validate_option(server_name_indication, Value, _)
when is_list(Value) ->
DNS hostnames
Value;
validate_option(server_name_indication, undefined, _) ->
undefined;
validate_option(server_name_indication, disable, _) ->
disable;
validate_option(session_tickets, Value, server)
when Value =:= disabled orelse
Value =:= stateful orelse
Value =:= stateless ->
Value;
validate_option(session_tickets, Value, server) ->
throw({error,
{options, role,
{session_tickets,
{Value, {server, [disabled, stateful, stateless]}}}}});
validate_option(session_tickets, Value, client)
when Value =:= disabled orelse
Value =:= manual orelse
Value =:= auto ->
Value;
validate_option(session_tickets, Value, client) ->
throw({error,
{options, role,
{session_tickets,
{Value, {client, [disabled, manual, auto]}}}}});
validate_option(sni_fun, undefined, _) ->
undefined;
validate_option(sni_fun, Fun, _)
when is_function(Fun) ->
Fun;
validate_option(sni_hosts, [], _) ->
[];
validate_option(sni_hosts, [{Hostname, SSLOptions} | Tail], _)
when is_list(Hostname) ->
RecursiveSNIOptions = proplists:get_value(sni_hosts, SSLOptions, undefined),
case RecursiveSNIOptions of
undefined ->
[{Hostname, validate_options(SSLOptions)} |
validate_option(sni_hosts, Tail)];
_ ->
throw({error, {options, {sni_hosts, RecursiveSNIOptions}}})
end;
validate_option(srp_identity, undefined, _) ->
undefined;
validate_option(srp_identity, {Username, Password}, _)
when is_list(Username),
is_list(Password), Username =/= "",
length(Username) =< 255 ->
{unicode:characters_to_binary(Username),
unicode:characters_to_binary(Password)};
validate_option(user_lookup_fun, undefined, _) ->
undefined;
validate_option(user_lookup_fun, {Fun, _} = Value, _)
when is_function(Fun, 3) ->
Value;
validate_option(use_ticket, Value, _)
when is_list(Value) ->
Value;
validate_option(verify, Value, _)
when Value == verify_none; Value == verify_peer ->
Value;
validate_option(verify_fun, undefined, _) ->
undefined;
validate_option(verify_fun, Fun, _) when is_function(Fun) ->
{fun(_,{bad_cert, _} = Reason, OldFun) ->
case OldFun([Reason]) of
true ->
{valid, OldFun};
false ->
{fail, Reason}
end;
(_,{extension, _}, UserState) ->
{unknown, UserState};
(_, valid, UserState) ->
{valid, UserState};
(_, valid_peer, UserState) ->
{valid, UserState}
end, Fun};
validate_option(verify_fun, {Fun, _} = Value, _) when is_function(Fun) ->
Value;
validate_option(versions, Versions, _) ->
validate_versions(Versions, Versions);
validate_option(Opt, undefined = Value, _) ->
AllOpts = maps:keys(?RULES),
case lists:member(Opt, AllOpts) of
true ->
Value;
false ->
throw({error, {options, {Opt, Value}}})
end;
validate_option(Opt, Value, _) ->
throw({error, {options, {Opt, Value}}}).
handle_cb_info({V1, V2, V3, V4}) ->
{V1,V2,V3,V4, list_to_atom(atom_to_list(V2) ++ "_passive")};
handle_cb_info(CbInfo) ->
CbInfo.
handle_hashsigns_option(Value, Version) when is_list(Value)
andalso Version >= {3, 4} ->
case tls_v1:signature_schemes(Version, Value) of
[] ->
throw({error, {options,
no_supported_signature_schemes,
{signature_algs, Value}}});
_ ->
Value
end;
handle_hashsigns_option(Value, Version) when is_list(Value)
andalso Version =:= {3, 3} ->
case tls_v1:signature_algs(Version, Value) of
[] ->
throw({error, {options, no_supported_algorithms, {signature_algs, Value}}});
_ ->
Value
end;
handle_hashsigns_option(_, Version) when Version =:= {3, 3} ->
handle_hashsigns_option(tls_v1:default_signature_algs([Version]), Version);
handle_hashsigns_option(_, _Version) ->
undefined.
handle_signature_algorithms_option(Value, Version) when is_list(Value)
andalso Version >= {3, 4} ->
case tls_v1:signature_schemes(Version, Value) of
[] ->
throw({error, {options,
no_supported_signature_schemes,
{signature_algs_cert, Value}}});
_ ->
Value
end;
handle_signature_algorithms_option(_, _Version) ->
undefined.
validate_options([]) ->
[];
validate_options([{Opt, Value} | Tail]) ->
[{Opt, validate_option(Opt, Value)} | validate_options(Tail)].
validate_npn_ordering(client) ->
ok;
validate_npn_ordering(server) ->
ok;
validate_npn_ordering(Value) ->
throw({error, {options, {client_preferred_next_protocols, {invalid_precedence, Value}}}}).
validate_binary_list(Opt, List) ->
lists:foreach(
fun(Bin) when is_binary(Bin),
byte_size(Bin) > 0,
byte_size(Bin) < 256 ->
ok;
(Bin) ->
throw({error, {options, {Opt, {invalid_protocol, Bin}}}})
end, List).
validate_versions([], Versions) ->
Versions;
validate_versions([Version | Rest], Versions) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
case tls_record:sufficient_crypto_support(Version) of
true ->
tls_validate_versions(Rest, Versions);
false ->
throw({error, {options, {insufficient_crypto_support, {Version, {versions, Versions}}}}})
end;
validate_versions([Version | Rest], Versions) when Version == 'dtlsv1';
Version == 'dtlsv1.2'->
DTLSVer = dtls_record:protocol_version(Version),
case tls_record:sufficient_crypto_support(dtls_v1:corresponding_tls_version(DTLSVer)) of
true ->
dtls_validate_versions(Rest, Versions);
false ->
throw({error, {options, {insufficient_crypto_support, {Version, {versions, Versions}}}}})
end;
validate_versions([Version| _], Versions) ->
throw({error, {options, {Version, {versions, Versions}}}}).
tls_validate_versions([], Versions) ->
tls_validate_version_gap(Versions);
tls_validate_versions([Version | Rest], Versions) when Version == 'tlsv1.3';
Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1 ->
tls_validate_versions(Rest, Versions);
tls_validate_versions([Version| _], Versions) ->
throw({error, {options, {Version, {versions, Versions}}}}).
tls_validate_version_gap(Versions) ->
case lists:member('tlsv1.3', Versions) of
true when length(Versions) >= 2 ->
case lists:member('tlsv1.2', Versions) of
true ->
Versions;
false ->
throw({error, {options, missing_version, {'tlsv1.2', {versions, Versions}}}})
end;
_ ->
Versions
end.
dtls_validate_versions([], Versions) ->
Versions;
dtls_validate_versions([Version | Rest], Versions) when Version == 'dtlsv1';
Version == 'dtlsv1.2'->
dtls_validate_versions(Rest, Versions);
dtls_validate_versions([Ver| _], Versions) ->
throw({error, {options, {Ver, {versions, Versions}}}}).
The option cacerts overrides cacertsfile
ca_cert_default(_,_, [_|_]) ->
undefined;
ca_cert_default(verify_none, _, _) ->
undefined;
ca_cert_default(verify_peer, {Fun,_}, _) when is_function(Fun) ->
undefined;
Server that wants to and has no verify_fun must have
ca_cert_default(verify_peer, undefined, _) ->
"".
emulated_options(undefined, undefined, Protocol, Opts) ->
case Protocol of
tls ->
tls_socket:emulated_options(Opts);
dtls ->
dtls_socket:emulated_options(Opts)
end;
emulated_options(Transport, Socket, Protocol, Opts) ->
EmulatedOptions = tls_socket:emulated_options(),
{ok, Original} = tls_socket:getopts(Transport, Socket, EmulatedOptions),
{Inet, Emulated0} = emulated_options(undefined, undefined, Protocol, Opts),
{Inet, lists:ukeymerge(1, Emulated0, Original)}.
handle_cipher_option(Value, Versions) when is_list(Value) ->
try binary_cipher_suites(Versions, Value) of
Suites ->
Suites
catch
exit:_ ->
throw({error, {options, {ciphers, Value}}});
error:_->
throw({error, {options, {ciphers, Value}}})
end.
binary_cipher_suites([{3,4} = Version], []) ->
not require explicit configuration TLS-1.3
default_binary_suites(exclusive, Version);
binary_cipher_suites([Version| _], []) ->
default_binary_suites(default, Version);
binary_cipher_suites(Versions, [Map|_] = Ciphers0) when is_map(Map) ->
Ciphers = [ssl_cipher_format:suite_map_to_bin(C) || C <- Ciphers0],
binary_cipher_suites(Versions, Ciphers);
binary_cipher_suites(Versions, [Tuple|_] = Ciphers0) when is_tuple(Tuple) ->
Ciphers = [ssl_cipher_format:suite_map_to_bin(tuple_to_map(C)) || C <- Ciphers0],
binary_cipher_suites(Versions, Ciphers);
binary_cipher_suites(Versions, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) ->
All = all_suites(Versions),
case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of
[] ->
binary_cipher_suites(Versions, []);
Ciphers ->
Ciphers
end;
binary_cipher_suites(Versions, [Head | _] = Ciphers0) when is_list(Head) ->
Ciphers = [ssl_cipher_format:suite_openssl_str_to_map(C) || C <- Ciphers0],
binary_cipher_suites(Versions, Ciphers);
binary_cipher_suites(Versions, Ciphers0) ->
Format : " RC4 - SHA : RC4 - MD5 "
Ciphers = [ssl_cipher_format:suite_openssl_str_to_map(C) || C <- string:lexemes(Ciphers0, ":")],
binary_cipher_suites(Versions, Ciphers).
default_binary_suites(exclusive, {_, Minor}) ->
ssl_cipher:filter_suites(tls_v1:exclusive_suites(Minor));
default_binary_suites(default, Version) ->
ssl_cipher:filter_suites(ssl_cipher:suites(Version)).
all_suites([{3, 4 = Minor}]) ->
tls_v1:exclusive_suites(Minor);
all_suites([{3, 4} = Version0, Version1 |_]) ->
all_suites([Version0]) ++
ssl_cipher:all_suites(Version1) ++
ssl_cipher:anonymous_suites(Version1);
all_suites([Version|_]) ->
ssl_cipher:all_suites(Version) ++
ssl_cipher:anonymous_suites(Version).
tuple_to_map({Kex, Cipher, Mac}) ->
#{key_exchange => Kex,
cipher => Cipher,
mac => Mac,
prf => default_prf};
tuple_to_map({Kex, Cipher, Mac, Prf}) ->
#{key_exchange => Kex,
cipher => Cipher,
mac => tuple_to_map_mac(Cipher, Mac),
prf => Prf}.
tuple_to_map_mac(aes_128_gcm, _) ->
aead;
tuple_to_map_mac(aes_256_gcm, _) ->
aead;
tuple_to_map_mac(chacha20_poly1305, _) ->
aead;
tuple_to_map_mac(_, MAC) ->
MAC.
handle_eccs_option(Value, Version) when is_list(Value) ->
{_Major, Minor} = tls_version(Version),
try tls_v1:ecc_curves(Minor, Value) of
Curves -> #elliptic_curves{elliptic_curve_list = Curves}
catch
exit:_ -> throw({error, {options, {eccs, Value}}});
error:_ -> throw({error, {options, {eccs, Value}}})
end.
handle_supported_groups_option(Value, Version) when is_list(Value) ->
{_Major, Minor} = tls_version(Version),
try tls_v1:groups(Minor, Value) of
Groups -> #supported_groups{supported_groups = Groups}
catch
exit:_ -> throw({error, {options, {supported_groups, Value}}});
error:_ -> throw({error, {options, {supported_groups, Value}}})
end.
unexpected_format(Error) ->
lists:flatten(io_lib:format("Unexpected error: ~p", [Error])).
file_error_format({error, Error})->
case file:format_error(Error) of
"unknown POSIX error" ->
"decoding error";
Str ->
Str
end;
file_error_format(_) ->
"decoding error".
file_desc(cacertfile) ->
"Invalid CA certificate file ";
file_desc(certfile) ->
"Invalid certificate file ";
file_desc(keyfile) ->
"Invalid key file ";
file_desc(dhfile) ->
"Invalid DH params file ".
detect(_Pred, []) ->
undefined;
detect(Pred, [H|T]) ->
case Pred(H) of
true ->
H;
_ ->
detect(Pred, T)
end.
make_next_protocol_selector(undefined) ->
undefined;
make_next_protocol_selector({client, AllProtocols, DefaultProtocol}) ->
fun(AdvertisedProtocols) ->
case detect(fun(PreferredProtocol) ->
lists:member(PreferredProtocol, AdvertisedProtocols)
end, AllProtocols) of
undefined ->
DefaultProtocol;
PreferredProtocol ->
PreferredProtocol
end
end;
make_next_protocol_selector({server, AllProtocols, DefaultProtocol}) ->
fun(AdvertisedProtocols) ->
case detect(fun(PreferredProtocol) ->
lists:member(PreferredProtocol, AllProtocols)
end,
AdvertisedProtocols) of
undefined ->
DefaultProtocol;
PreferredProtocol ->
PreferredProtocol
end
end.
connection_cb(tls) ->
tls_gen_connection;
connection_cb(dtls) ->
dtls_gen_connection;
connection_cb(Opts) ->
connection_cb(proplists:get_value(protocol, Opts, tls)).
record_cb(tls) ->
tls_record;
record_cb(dtls) ->
dtls_record;
record_cb(Opts) ->
record_cb(proplists:get_value(protocol, Opts, tls)).
binary_filename(FileName) ->
Enc = file:native_name_encoding(),
unicode:characters_to_binary(FileName, unicode, Enc).
with a few exceptions and phase out
handle_option_format([], Acc) ->
lists:reverse(Acc);
handle_option_format([{log_alert, Bool} | Rest], Acc) when is_boolean(Bool) ->
case proplists:get_value(log_level, Acc ++ Rest, undefined) of
undefined ->
handle_option_format(Rest, [{log_level,
map_log_level(Bool)} | Acc]);
_ ->
handle_option_format(Rest, Acc)
end;
handle_option_format([{Key,_} = Opt | Rest], Acc) when is_atom(Key) ->
handle_option_format(Rest, [Opt | Acc]);
handle_option_format([{raw,_,_,_} = Opt | Rest], Acc) ->
handle_option_format(Rest, [Opt | Acc]);
handle_option_format([inet = Opt | Rest], Acc) ->
handle_option_format(Rest, [Opt | Acc]);
handle_option_format([inet6 = Opt | Rest], Acc) ->
handle_option_format(Rest, [Opt | Acc]);
handle_option_format([Value | _], _) ->
throw({option_not_a_key_value_tuple, Value}).
map_log_level(true) ->
notice;
map_log_level(false) ->
none.
handle_verify_option(verify_none, #{fail_if_no_peer_cert := false} = OptionsMap) ->
OptionsMap#{verify => verify_none};
handle_verify_option(verify_none, #{fail_if_no_peer_cert := true}) ->
throw({error, {options, incompatible,
{verify, verify_none},
{fail_if_no_peer_cert, true}}});
the configuration . If ' verify ' is later changed from verify_none to ,
undefined , public_key 's default verify_fun will be used that performs a full
handle_verify_option(verify_peer, #{verify := verify_none} = OptionsMap) ->
OptionsMap#{verify => verify_peer,
verify_fun => undefined};
handle_verify_option(verify_peer, OptionsMap) ->
OptionsMap#{verify => verify_peer};
handle_verify_option(Value, _) ->
throw({error, {options, {verify, Value}}}).
Added to handle default values for signature_algs in TLS 1.3
default_option_role_sign_algs(_, Value, _, Version) when Version >= {3,4} ->
Value;
default_option_role_sign_algs(Role, Value, Role, _) ->
Value;
default_option_role_sign_algs(_, _, _, _) ->
undefined.
default_option_role(Role, Value, Role) ->
Value;
default_option_role(_,_,_) ->
undefined.
default_cb_info(tls) ->
{gen_tcp, tcp, tcp_closed, tcp_error, tcp_passive};
default_cb_info(dtls) ->
{gen_udp, udp, udp_closed, udp_error, udp_passive}.
include_security_info([]) ->
false;
include_security_info([Item | Items]) ->
case lists:member(Item, [client_random, server_random, master_secret, keylog]) of
true ->
true;
false ->
include_security_info(Items)
end.
server_name_indication_default(Host) when is_list(Host) ->
string:strip(Host, right, $.);
server_name_indication_default(_) ->
undefined.
add_filter(undefined, Filters) ->
Filters;
add_filter(Filter, Filters) ->
[Filter | Filters].
maybe_client_warn_no_verify(#{verify := verify_none,
warn_verify_none := true,
log_level := LogLevel}, client) ->
ssl_logger:log(warning, LogLevel, #{description => "Authenticity is not established by certificate path validation",
reason => "Option {verify, verify_peer} and cacertfile/cacerts is missing"}, #{});
maybe_client_warn_no_verify(_,_) ->
ok.
|
3583b3cd48250b0fe28e5b291e995979e2e5dbb3ecc0da3fe53655059622e10e | deadpendency/deadpendency | TheMain.hs | module CRC.TheMain
( theMain,
)
where
import CRC.AppGoogleScopes (AppGoogleScopes)
import CRC.Loader.ComponentDetailsLoader (loadComponentDetails)
import CRC.Loader.ConfigLoader
import CRC.Model.AppContext (AppContext (..))
import CRC.Serve.Server (runServer)
import Common.Loader.CommonConfigLoader (loadCommonConfig)
import Common.Loader.GHAppAuthLoader (loadGitHubAppAuth)
import Common.Loader.GoogleEnvLoader (loadGoogleEnv)
import Common.Loader.HttpManagerLoader (loadHttpManager)
import Common.Loader.InstanceConfigLoader (loadInstanceConfig)
import Common.Loader.SecretLoader
import Common.Model.GitHub.GHAppRawPrivateKey
theMain :: IO ()
theMain = do
commonConfig <- loadCommonConfig
config <- loadConfig
httpManager <- loadHttpManager
instanceConfig <- loadInstanceConfig httpManager
googleEnv <- loadGoogleEnv @AppGoogleScopes httpManager
let webhookPrivateKeyName = config ^. #_githubPrivateKeySecretName
appId = config ^. #_appId
ghAppPrivateKey <- GHAppRawPrivateKey <$> loadSecret googleEnv webhookPrivateKeyName
ghAppAuthGlobal <- loadGitHubAppAuth appId ghAppPrivateKey True -- preload as crc will always do the initial install auth
let componentDetails = loadComponentDetails
let appContext =
AppContext
{ _googleEnv = googleEnv,
_commonConfig = commonConfig,
_instanceConfig = instanceConfig,
_componentDetails = componentDetails,
_ghAppAuthGlobal = ghAppAuthGlobal
}
runServer appContext
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/check-run-creator/src/CRC/TheMain.hs | haskell | preload as crc will always do the initial install auth | module CRC.TheMain
( theMain,
)
where
import CRC.AppGoogleScopes (AppGoogleScopes)
import CRC.Loader.ComponentDetailsLoader (loadComponentDetails)
import CRC.Loader.ConfigLoader
import CRC.Model.AppContext (AppContext (..))
import CRC.Serve.Server (runServer)
import Common.Loader.CommonConfigLoader (loadCommonConfig)
import Common.Loader.GHAppAuthLoader (loadGitHubAppAuth)
import Common.Loader.GoogleEnvLoader (loadGoogleEnv)
import Common.Loader.HttpManagerLoader (loadHttpManager)
import Common.Loader.InstanceConfigLoader (loadInstanceConfig)
import Common.Loader.SecretLoader
import Common.Model.GitHub.GHAppRawPrivateKey
theMain :: IO ()
theMain = do
commonConfig <- loadCommonConfig
config <- loadConfig
httpManager <- loadHttpManager
instanceConfig <- loadInstanceConfig httpManager
googleEnv <- loadGoogleEnv @AppGoogleScopes httpManager
let webhookPrivateKeyName = config ^. #_githubPrivateKeySecretName
appId = config ^. #_appId
ghAppPrivateKey <- GHAppRawPrivateKey <$> loadSecret googleEnv webhookPrivateKeyName
let componentDetails = loadComponentDetails
let appContext =
AppContext
{ _googleEnv = googleEnv,
_commonConfig = commonConfig,
_instanceConfig = instanceConfig,
_componentDetails = componentDetails,
_ghAppAuthGlobal = ghAppAuthGlobal
}
runServer appContext
|
79e2288d71738657af938d696081ecfee0dff1bf9f76888bc8ae2ef725484a54 | ocaml-omake/omake | omake_value_print.mli |
val pp_print_target : Omake_value_type.target Lm_printf.t
val pp_print_wild_list : Lm_wild.in_patt list Lm_printf.t
val pp_print_source_list : ('a * Omake_value_type.source_core) list Lm_printf.t
val pp_print_value : Omake_value_type.t Lm_printf.t
val pp_print_simple_value : Omake_value_type.t Lm_printf.t
val pp_print_value_list : Omake_value_type.t list Lm_printf.t
val pp_print_path : Omake_value_type.path Lm_printf.t
val pp_print_item : Omake_value_type.item Lm_printf.t
val pp_print_exn : Omake_value_type.omake_error Lm_printf.t
(* Helpers, used in printing and for $(Fun.arity) function *)
val fun_arity :
Omake_value_type.keyword_param_value list ->
Omake_ir.param list -> Omake_ir.arity
val curry_fun_arity :
Omake_value_type.param_value list ->
Omake_value_type.keyword_param_value list ->
Omake_ir.param list -> Omake_value_type.keyword_value list -> Omake_ir.arity
| null | https://raw.githubusercontent.com/ocaml-omake/omake/08b2a83fb558f6eb6847566cbe1a562230da2b14/src/ir/omake_value_print.mli | ocaml | Helpers, used in printing and for $(Fun.arity) function |
val pp_print_target : Omake_value_type.target Lm_printf.t
val pp_print_wild_list : Lm_wild.in_patt list Lm_printf.t
val pp_print_source_list : ('a * Omake_value_type.source_core) list Lm_printf.t
val pp_print_value : Omake_value_type.t Lm_printf.t
val pp_print_simple_value : Omake_value_type.t Lm_printf.t
val pp_print_value_list : Omake_value_type.t list Lm_printf.t
val pp_print_path : Omake_value_type.path Lm_printf.t
val pp_print_item : Omake_value_type.item Lm_printf.t
val pp_print_exn : Omake_value_type.omake_error Lm_printf.t
val fun_arity :
Omake_value_type.keyword_param_value list ->
Omake_ir.param list -> Omake_ir.arity
val curry_fun_arity :
Omake_value_type.param_value list ->
Omake_value_type.keyword_param_value list ->
Omake_ir.param list -> Omake_value_type.keyword_value list -> Omake_ir.arity
|
5f091e5a882eacde0c25c745ab9f169b91295bd848fe52bc44f18d418eaa6fb8 | ucsd-progsys/dsolve | baseinst.ml | let _ = if true then assert false else 102
| null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/tests/POPL2008/baseinst.ml | ocaml | let _ = if true then assert false else 102
|
|
a0ed771b7567e4f7e775be10f1ef10474812d88b92ff527c9c46b15a2ce18416 | goldfirere/video-resources | Records.hs | # LANGUAGE TypeFamilies , FlexibleInstances , DataKinds , StandaloneKindSignatures ,
PolyKinds , MultiParamTypeClasses #
PolyKinds, MultiParamTypeClasses #-}
module Records where
import Data.Kind
data A = MkA { a_field :: Maybe B }
data B = MKB { b_field :: Maybe C }
data C = MkC { c_field :: [D] }
data D = MkD { d_field :: E }
data E = MkE { e_field :: Maybe Int }
type List = []
type Sort :: Type -> (Type, Maybe (Type -> Type))
type family Sort field_type where
Sort (Maybe a) = '(a, Just Maybe)
Sort (List a) = '(a, Just List)
Sort other = '(other, Nothing)
type family Fst a where
Fst '(a, _) = a
type family Snd a where
Snd '(_, b) = b
class Snd (Sort field) ~ wrapper => FieldType field wrapper where
get :: Maybe r -> (r -> field) -> Maybe (Fst (Sort field))
infixl 9 `get`
instance FieldType (Maybe b) (Just Maybe) where
get = (>>=)
instance FieldType (List a) (Just List) where
get (Just x) f | y:_ <- f x = Just y
| otherwise = Nothing
get Nothing _ = Nothing
instance Sort something_else ~ '(something_else, Nothing) => FieldType something_else Nothing where
get x f = fmap f x
goal :: A -> Maybe Int
goal a = Just a `get` a_field `get` b_field `get` c_field `get` d_field `get` e_field
| null | https://raw.githubusercontent.com/goldfirere/video-resources/8bc06dd701f308564c13ac1b802f8812158636d0/2021-02-22-closed-type-family-trick/Records.hs | haskell | # LANGUAGE TypeFamilies , FlexibleInstances , DataKinds , StandaloneKindSignatures ,
PolyKinds , MultiParamTypeClasses #
PolyKinds, MultiParamTypeClasses #-}
module Records where
import Data.Kind
data A = MkA { a_field :: Maybe B }
data B = MKB { b_field :: Maybe C }
data C = MkC { c_field :: [D] }
data D = MkD { d_field :: E }
data E = MkE { e_field :: Maybe Int }
type List = []
type Sort :: Type -> (Type, Maybe (Type -> Type))
type family Sort field_type where
Sort (Maybe a) = '(a, Just Maybe)
Sort (List a) = '(a, Just List)
Sort other = '(other, Nothing)
type family Fst a where
Fst '(a, _) = a
type family Snd a where
Snd '(_, b) = b
class Snd (Sort field) ~ wrapper => FieldType field wrapper where
get :: Maybe r -> (r -> field) -> Maybe (Fst (Sort field))
infixl 9 `get`
instance FieldType (Maybe b) (Just Maybe) where
get = (>>=)
instance FieldType (List a) (Just List) where
get (Just x) f | y:_ <- f x = Just y
| otherwise = Nothing
get Nothing _ = Nothing
instance Sort something_else ~ '(something_else, Nothing) => FieldType something_else Nothing where
get x f = fmap f x
goal :: A -> Maybe Int
goal a = Just a `get` a_field `get` b_field `get` c_field `get` d_field `get` e_field
|
|
7429b9065a24df267a0457e7916d726c3334e673eb855fcbf5a943710857965a | melange-re/melange | res_parens.ml | open Import.Ast_406
module ParsetreeViewer = Res_parsetree_viewer
type kind = Parenthesized | Braced of Location.t | Nothing
let expr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| _ ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| _ -> Nothing
end
let callExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| _ ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| _ when ParsetreeViewer.isUnaryExpression expr || ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let structureExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| _ when ParsetreeViewer.hasAttributes expr.pexp_attributes &&
not (ParsetreeViewer.isJsxExpression expr) -> Parenthesized
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| _ -> Nothing
end
let unaryExprOperand expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| expr when
ParsetreeViewer.isUnaryExpression expr ||
ParsetreeViewer.isBinaryExpression expr
-> Parenthesized
| {pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_extension _ (* readability? maybe remove *)
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let binaryExprOperand ~isLhs expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc = Pexp_constraint _ | Pexp_fun _ | Pexp_function _ | Pexp_newtype _} -> Parenthesized
| expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| expr when ParsetreeViewer.isTernaryExpr expr -> Parenthesized
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
} when isLhs -> Parenthesized
| _ -> Nothing
end
let subBinaryExprOperand parentOperator childOperator =
let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
let precChild = ParsetreeViewer.operatorPrecedence childOperator in
precParent > precChild ||
(precParent == precChild &&
not (ParsetreeViewer.flattenableOperators parentOperator childOperator)) ||
(* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… *)
(parentOperator = "||" && childOperator = "&&")
let rhsBinaryExprOperand parentOperator rhs =
match rhs.Parsetree.pexp_desc with
| Parsetree.Pexp_apply(
{pexp_attributes = [];
pexp_desc = Pexp_ident {txt = Longident.Lident operator; loc = operatorLoc}},
[_, _left; _, _right]
) when ParsetreeViewer.isBinaryOperator operator &&
not (operatorLoc.loc_ghost && operator = "^") ->
let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
let precChild = ParsetreeViewer.operatorPrecedence operator in
precParent == precChild
| _ -> false
let flattenOperandRhs parentOperator rhs =
match rhs.Parsetree.pexp_desc with
| Parsetree.Pexp_apply(
{pexp_desc = Pexp_ident {txt = Longident.Lident operator; loc = operatorLoc}},
[_, _left; _, _right]
) when ParsetreeViewer.isBinaryOperator operator &&
not (operatorLoc.loc_ghost && operator = "^") ->
let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
let precChild = ParsetreeViewer.operatorPrecedence operator in
precParent >= precChild || rhs.pexp_attributes <> []
| Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
) -> false
| Pexp_fun _ when ParsetreeViewer.isUnderscoreApplySugar rhs -> false
| Pexp_fun _
| Pexp_newtype _
| Pexp_setfield _
| Pexp_constraint _ -> true
| _ when ParsetreeViewer.isTernaryExpr rhs -> true
| _ -> false
let lazyOrAssertExprRhs expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| {pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let isNegativeConstant constant =
let isNeg txt =
let len = String.length txt in
len > 0 && (String.get [@doesNotRaise]) txt 0 = '-'
in
match constant with
| Parsetree.Pconst_integer (i, _) | Pconst_float (i, _) when isNeg i -> true
| _ -> false
let fieldExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| expr when
ParsetreeViewer.isBinaryExpression expr ||
ParsetreeViewer.isUnaryExpression expr
-> Parenthesized
| {pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constant c } when isNegativeConstant c -> Parenthesized
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_extension _ (* %extension.x vs (%extension).x *)
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let setFieldExprRhs expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| _ -> Nothing
end
let ternaryOperand expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| {pexp_desc = Pexp_fun _ | Pexp_newtype _} ->
let (_attrsOnArrow, _parameters, returnExpr) = ParsetreeViewer.funExpr expr in
begin match returnExpr.pexp_desc with
| Pexp_constraint _ -> Parenthesized
| _ -> Nothing
end
| _ -> Nothing
end
let startsWithMinus txt =
let len = String.length txt in
if len == 0 then
false
else
let s = (String.get [@doesNotRaise]) txt 0 in
s = '-'
let jsxPropExpr expr =
match expr.Parsetree.pexp_desc with
| Parsetree.Pexp_let _
| Pexp_sequence _
| Pexp_letexception _
| Pexp_letmodule _
| Pexp_open _ -> Nothing
| _ ->
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
begin match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc =
Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));
pexp_attributes = []}
when startsWithMinus x -> Parenthesized
| {Parsetree.pexp_desc =
Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ |
Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ |
Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ |
Pexp_let _ | Pexp_tuple _;
pexp_attributes = []
} -> Nothing
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
); pexp_attributes = []} -> Nothing
| _ -> Parenthesized
end
end
let jsxChildExpr expr =
match expr.Parsetree.pexp_desc with
| Parsetree.Pexp_let _
| Pexp_sequence _
| Pexp_letexception _
| Pexp_letmodule _
| Pexp_open _ -> Nothing
| _ ->
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
begin match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| _ ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));
pexp_attributes = []
} when startsWithMinus x -> Parenthesized
| {Parsetree.pexp_desc =
Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ |
Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ |
Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ |
Pexp_let _;
pexp_attributes = []
} -> Nothing
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
); pexp_attributes = []} -> Nothing
| expr when ParsetreeViewer.isJsxExpression expr -> Nothing
| _ -> Parenthesized
end
end
let binaryExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = _::_} as expr
when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| _ -> Nothing
end
let modTypeFunctorReturn modType = match modType with
| {Parsetree.pmty_desc = Pmty_with _} -> true
| _ -> false
Add parens for readability :
module type Functor = SetLike = > Set with type t = A.t
This is actually :
module type Functor = ( SetLike = > Set ) with type t =
module type Functor = SetLike => Set with type t = A.t
This is actually:
module type Functor = (SetLike => Set) with type t = A.t
*)
let modTypeWithOperand modType = match modType with
| {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true
| _ -> false
let modExprFunctorConstraint modType = match modType with
| {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true
| _ -> false
let bracedExpr expr = match expr.Parsetree.pexp_desc with
| Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
) -> false
| Pexp_constraint _ -> true
| _ -> false
let includeModExpr modExpr = match modExpr.Parsetree.pmod_desc with
| Parsetree.Pmod_constraint _ -> true
| _ -> false
let arrowReturnTypExpr typExpr = match typExpr.Parsetree.ptyp_desc with
| Parsetree.Ptyp_arrow _ -> true
| _ -> false
let patternRecordRowRhs (pattern : Parsetree.pattern) =
match pattern.ppat_desc with
| Ppat_constraint ({ppat_desc = Ppat_unpack _}, {ptyp_desc = Ptyp_package _}) -> false
| Ppat_constraint _ -> true
| _ -> false
| null | https://raw.githubusercontent.com/melange-re/melange/d6f41989ec092eea5a623171fe5e54e17fde0d10/jscomp/napkin/res_parens.ml | ocaml | readability? maybe remove
a && b || c, add parens to (a && b) for readability, who knows the difference by heart…
%extension.x vs (%extension).x | open Import.Ast_406
module ParsetreeViewer = Res_parsetree_viewer
type kind = Parenthesized | Braced of Location.t | Nothing
let expr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| _ ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| _ -> Nothing
end
let callExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| _ ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| _ when ParsetreeViewer.isUnaryExpression expr || ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let structureExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| _ when ParsetreeViewer.hasAttributes expr.pexp_attributes &&
not (ParsetreeViewer.isJsxExpression expr) -> Parenthesized
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| _ -> Nothing
end
let unaryExprOperand expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| expr when
ParsetreeViewer.isUnaryExpression expr ||
ParsetreeViewer.isBinaryExpression expr
-> Parenthesized
| {pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let binaryExprOperand ~isLhs expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc = Pexp_constraint _ | Pexp_fun _ | Pexp_function _ | Pexp_newtype _} -> Parenthesized
| expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| expr when ParsetreeViewer.isTernaryExpr expr -> Parenthesized
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
} when isLhs -> Parenthesized
| _ -> Nothing
end
let subBinaryExprOperand parentOperator childOperator =
let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
let precChild = ParsetreeViewer.operatorPrecedence childOperator in
precParent > precChild ||
(precParent == precChild &&
not (ParsetreeViewer.flattenableOperators parentOperator childOperator)) ||
(parentOperator = "||" && childOperator = "&&")
let rhsBinaryExprOperand parentOperator rhs =
match rhs.Parsetree.pexp_desc with
| Parsetree.Pexp_apply(
{pexp_attributes = [];
pexp_desc = Pexp_ident {txt = Longident.Lident operator; loc = operatorLoc}},
[_, _left; _, _right]
) when ParsetreeViewer.isBinaryOperator operator &&
not (operatorLoc.loc_ghost && operator = "^") ->
let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
let precChild = ParsetreeViewer.operatorPrecedence operator in
precParent == precChild
| _ -> false
let flattenOperandRhs parentOperator rhs =
match rhs.Parsetree.pexp_desc with
| Parsetree.Pexp_apply(
{pexp_desc = Pexp_ident {txt = Longident.Lident operator; loc = operatorLoc}},
[_, _left; _, _right]
) when ParsetreeViewer.isBinaryOperator operator &&
not (operatorLoc.loc_ghost && operator = "^") ->
let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
let precChild = ParsetreeViewer.operatorPrecedence operator in
precParent >= precChild || rhs.pexp_attributes <> []
| Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
) -> false
| Pexp_fun _ when ParsetreeViewer.isUnderscoreApplySugar rhs -> false
| Pexp_fun _
| Pexp_newtype _
| Pexp_setfield _
| Pexp_constraint _ -> true
| _ when ParsetreeViewer.isTernaryExpr rhs -> true
| _ -> false
let lazyOrAssertExprRhs expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| {pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let isNegativeConstant constant =
let isNeg txt =
let len = String.length txt in
len > 0 && (String.get [@doesNotRaise]) txt 0 = '-'
in
match constant with
| Parsetree.Pconst_integer (i, _) | Pconst_float (i, _) when isNeg i -> true
| _ -> false
let fieldExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = attrs} when
begin match ParsetreeViewer.filterParsingAttrs attrs with
| _::_ -> true
| [] -> false
end
-> Parenthesized
| expr when
ParsetreeViewer.isBinaryExpression expr ||
ParsetreeViewer.isUnaryExpression expr
-> Parenthesized
| {pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constant c } when isNegativeConstant c -> Parenthesized
| {pexp_desc = Pexp_fun _}
when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
| {pexp_desc =
Pexp_lazy _
| Pexp_assert _
| Pexp_fun _
| Pexp_newtype _
| Pexp_function _
| Pexp_constraint _
| Pexp_setfield _
| Pexp_match _
| Pexp_try _
| Pexp_while _
| Pexp_for _
| Pexp_ifthenelse _
} -> Parenthesized
| _ -> Nothing
end
let setFieldExprRhs expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| _ -> Nothing
end
let ternaryOperand expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
)} -> Nothing
| {pexp_desc = Pexp_constraint _ } -> Parenthesized
| {pexp_desc = Pexp_fun _ | Pexp_newtype _} ->
let (_attrsOnArrow, _parameters, returnExpr) = ParsetreeViewer.funExpr expr in
begin match returnExpr.pexp_desc with
| Pexp_constraint _ -> Parenthesized
| _ -> Nothing
end
| _ -> Nothing
end
let startsWithMinus txt =
let len = String.length txt in
if len == 0 then
false
else
let s = (String.get [@doesNotRaise]) txt 0 in
s = '-'
let jsxPropExpr expr =
match expr.Parsetree.pexp_desc with
| Parsetree.Pexp_let _
| Pexp_sequence _
| Pexp_letexception _
| Pexp_letmodule _
| Pexp_open _ -> Nothing
| _ ->
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
begin match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_desc =
Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));
pexp_attributes = []}
when startsWithMinus x -> Parenthesized
| {Parsetree.pexp_desc =
Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ |
Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ |
Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ |
Pexp_let _ | Pexp_tuple _;
pexp_attributes = []
} -> Nothing
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
); pexp_attributes = []} -> Nothing
| _ -> Parenthesized
end
end
let jsxChildExpr expr =
match expr.Parsetree.pexp_desc with
| Parsetree.Pexp_let _
| Pexp_sequence _
| Pexp_letexception _
| Pexp_letmodule _
| Pexp_open _ -> Nothing
| _ ->
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
begin match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| _ ->
begin match expr with
| {Parsetree.pexp_desc = Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));
pexp_attributes = []
} when startsWithMinus x -> Parenthesized
| {Parsetree.pexp_desc =
Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ |
Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ |
Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ |
Pexp_let _;
pexp_attributes = []
} -> Nothing
| {Parsetree.pexp_desc = Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
); pexp_attributes = []} -> Nothing
| expr when ParsetreeViewer.isJsxExpression expr -> Nothing
| _ -> Parenthesized
end
end
let binaryExpr expr =
let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
match optBraces with
| Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
| None ->
begin match expr with
| {Parsetree.pexp_attributes = _::_} as expr
when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
| _ -> Nothing
end
let modTypeFunctorReturn modType = match modType with
| {Parsetree.pmty_desc = Pmty_with _} -> true
| _ -> false
Add parens for readability :
module type Functor = SetLike = > Set with type t = A.t
This is actually :
module type Functor = ( SetLike = > Set ) with type t =
module type Functor = SetLike => Set with type t = A.t
This is actually:
module type Functor = (SetLike => Set) with type t = A.t
*)
let modTypeWithOperand modType = match modType with
| {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true
| _ -> false
let modExprFunctorConstraint modType = match modType with
| {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true
| _ -> false
let bracedExpr expr = match expr.Parsetree.pexp_desc with
| Pexp_constraint (
{pexp_desc = Pexp_pack _},
{ptyp_desc = Ptyp_package _}
) -> false
| Pexp_constraint _ -> true
| _ -> false
let includeModExpr modExpr = match modExpr.Parsetree.pmod_desc with
| Parsetree.Pmod_constraint _ -> true
| _ -> false
let arrowReturnTypExpr typExpr = match typExpr.Parsetree.ptyp_desc with
| Parsetree.Ptyp_arrow _ -> true
| _ -> false
let patternRecordRowRhs (pattern : Parsetree.pattern) =
match pattern.ppat_desc with
| Ppat_constraint ({ppat_desc = Ppat_unpack _}, {ptyp_desc = Ptyp_package _}) -> false
| Ppat_constraint _ -> true
| _ -> false
|
7308f55a9325218583ff3b2b5de6b7607db358a66855ff11c2740b2a38b54ad6 | cedlemo/OCaml-GI-ctypes-bindings-generator | Tool_item.ml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_tool_item_new" (void @-> returning (ptr t_typ))
let get_ellipsize_mode =
foreign "gtk_tool_item_get_ellipsize_mode" (t_typ @-> returning (Ellipsize_mode.t_view))
let get_expand =
foreign "gtk_tool_item_get_expand" (t_typ @-> returning (bool))
let get_homogeneous =
foreign "gtk_tool_item_get_homogeneous" (t_typ @-> returning (bool))
let get_icon_size =
foreign "gtk_tool_item_get_icon_size" (t_typ @-> returning (int32_t))
let get_is_important =
foreign "gtk_tool_item_get_is_important" (t_typ @-> returning (bool))
let get_orientation =
foreign "gtk_tool_item_get_orientation" (t_typ @-> returning (Orientation.t_view))
let get_proxy_menu_item =
foreign "gtk_tool_item_get_proxy_menu_item" (t_typ @-> string @-> returning (ptr_opt Widget.t_typ))
let get_relief_style =
foreign "gtk_tool_item_get_relief_style" (t_typ @-> returning (Relief_style.t_view))
let get_text_alignment =
foreign "gtk_tool_item_get_text_alignment" (t_typ @-> returning (float))
let get_text_orientation =
foreign "gtk_tool_item_get_text_orientation" (t_typ @-> returning (Orientation.t_view))
let get_text_size_group =
foreign "gtk_tool_item_get_text_size_group" (t_typ @-> returning (ptr Size_group.t_typ))
let get_toolbar_style =
foreign "gtk_tool_item_get_toolbar_style" (t_typ @-> returning (Toolbar_style.t_view))
let get_use_drag_window =
foreign "gtk_tool_item_get_use_drag_window" (t_typ @-> returning (bool))
let get_visible_horizontal =
foreign "gtk_tool_item_get_visible_horizontal" (t_typ @-> returning (bool))
let get_visible_vertical =
foreign "gtk_tool_item_get_visible_vertical" (t_typ @-> returning (bool))
let rebuild_menu =
foreign "gtk_tool_item_rebuild_menu" (t_typ @-> returning (void))
let retrieve_proxy_menu_item =
foreign "gtk_tool_item_retrieve_proxy_menu_item" (t_typ @-> returning (ptr Widget.t_typ))
let set_expand =
foreign "gtk_tool_item_set_expand" (t_typ @-> bool @-> returning (void))
let set_homogeneous =
foreign "gtk_tool_item_set_homogeneous" (t_typ @-> bool @-> returning (void))
let set_is_important =
foreign "gtk_tool_item_set_is_important" (t_typ @-> bool @-> returning (void))
let set_proxy_menu_item =
foreign "gtk_tool_item_set_proxy_menu_item" (t_typ @-> string @-> ptr_opt Widget.t_typ @-> returning (void))
let set_tooltip_markup =
foreign "gtk_tool_item_set_tooltip_markup" (t_typ @-> string @-> returning (void))
let set_tooltip_text =
foreign "gtk_tool_item_set_tooltip_text" (t_typ @-> string @-> returning (void))
let set_use_drag_window =
foreign "gtk_tool_item_set_use_drag_window" (t_typ @-> bool @-> returning (void))
let set_visible_horizontal =
foreign "gtk_tool_item_set_visible_horizontal" (t_typ @-> bool @-> returning (void))
let set_visible_vertical =
foreign "gtk_tool_item_set_visible_vertical" (t_typ @-> bool @-> returning (void))
let toolbar_reconfigured =
foreign "gtk_tool_item_toolbar_reconfigured" (t_typ @-> returning (void))
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Tool_item.ml | ocaml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_tool_item_new" (void @-> returning (ptr t_typ))
let get_ellipsize_mode =
foreign "gtk_tool_item_get_ellipsize_mode" (t_typ @-> returning (Ellipsize_mode.t_view))
let get_expand =
foreign "gtk_tool_item_get_expand" (t_typ @-> returning (bool))
let get_homogeneous =
foreign "gtk_tool_item_get_homogeneous" (t_typ @-> returning (bool))
let get_icon_size =
foreign "gtk_tool_item_get_icon_size" (t_typ @-> returning (int32_t))
let get_is_important =
foreign "gtk_tool_item_get_is_important" (t_typ @-> returning (bool))
let get_orientation =
foreign "gtk_tool_item_get_orientation" (t_typ @-> returning (Orientation.t_view))
let get_proxy_menu_item =
foreign "gtk_tool_item_get_proxy_menu_item" (t_typ @-> string @-> returning (ptr_opt Widget.t_typ))
let get_relief_style =
foreign "gtk_tool_item_get_relief_style" (t_typ @-> returning (Relief_style.t_view))
let get_text_alignment =
foreign "gtk_tool_item_get_text_alignment" (t_typ @-> returning (float))
let get_text_orientation =
foreign "gtk_tool_item_get_text_orientation" (t_typ @-> returning (Orientation.t_view))
let get_text_size_group =
foreign "gtk_tool_item_get_text_size_group" (t_typ @-> returning (ptr Size_group.t_typ))
let get_toolbar_style =
foreign "gtk_tool_item_get_toolbar_style" (t_typ @-> returning (Toolbar_style.t_view))
let get_use_drag_window =
foreign "gtk_tool_item_get_use_drag_window" (t_typ @-> returning (bool))
let get_visible_horizontal =
foreign "gtk_tool_item_get_visible_horizontal" (t_typ @-> returning (bool))
let get_visible_vertical =
foreign "gtk_tool_item_get_visible_vertical" (t_typ @-> returning (bool))
let rebuild_menu =
foreign "gtk_tool_item_rebuild_menu" (t_typ @-> returning (void))
let retrieve_proxy_menu_item =
foreign "gtk_tool_item_retrieve_proxy_menu_item" (t_typ @-> returning (ptr Widget.t_typ))
let set_expand =
foreign "gtk_tool_item_set_expand" (t_typ @-> bool @-> returning (void))
let set_homogeneous =
foreign "gtk_tool_item_set_homogeneous" (t_typ @-> bool @-> returning (void))
let set_is_important =
foreign "gtk_tool_item_set_is_important" (t_typ @-> bool @-> returning (void))
let set_proxy_menu_item =
foreign "gtk_tool_item_set_proxy_menu_item" (t_typ @-> string @-> ptr_opt Widget.t_typ @-> returning (void))
let set_tooltip_markup =
foreign "gtk_tool_item_set_tooltip_markup" (t_typ @-> string @-> returning (void))
let set_tooltip_text =
foreign "gtk_tool_item_set_tooltip_text" (t_typ @-> string @-> returning (void))
let set_use_drag_window =
foreign "gtk_tool_item_set_use_drag_window" (t_typ @-> bool @-> returning (void))
let set_visible_horizontal =
foreign "gtk_tool_item_set_visible_horizontal" (t_typ @-> bool @-> returning (void))
let set_visible_vertical =
foreign "gtk_tool_item_set_visible_vertical" (t_typ @-> bool @-> returning (void))
let toolbar_reconfigured =
foreign "gtk_tool_item_toolbar_reconfigured" (t_typ @-> returning (void))
|
|
df1943abc5c875892cf2dda5c386a216841e955ce3dac40f50c24a41b976d39c | furkan3ayraktar/clojure-polylith-realworld-example-app | store_test.clj | (ns clojure.realworld.comment.store-test
(:require [clj-time.core :as t]
[clojure.java.jdbc :as jdbc]
[clojure.realworld.comment.store :as store]
[clojure.realworld.database.interface :as database]
[clojure.test :refer [deftest is use-fixtures]]))
(defn test-db
([] {:classname "org.sqlite.JDBC"
:subprotocol "sqlite"
:subname "test.db"})
([_] (test-db)))
(defn prepare-for-tests [f]
(with-redefs [database/db test-db]
(let [db (test-db)]
(database/generate-db db)
(f)
(database/drop-db db))))
(use-fixtures :each prepare-for-tests)
(deftest comments--no-comments--return-empty-vector
(let [res (store/comments 1)]
(is (= [] res))))
(deftest comments--some-comments--return-all-comments
(let [_ (jdbc/insert-multi! (test-db) :comment [{:articleId 1 :body "body1"}
{:articleId 1 :body "body2"}
{:articleId 1 :body "body3"}
{:articleId 1 :body "body4"}])
res (store/comments 1)]
(is (= 4 (count res)))))
(deftest find-by-id--comment-exists--return-comment
(let [_ (jdbc/insert! (test-db) :comment {:body "body"})
comment (store/find-by-id 1)]
(is (= "body" (:body comment)))))
(deftest find-by-id--comment-does-not-exist--return-nil
(let [comment (store/find-by-id 1)]
(is (nil? comment))))
(deftest add-comment!--test
(let [now (t/now)
comment {:body "body"
:createdAt now
:updatedAt now
:userId 1
:articleId 1}
res (store/add-comment! comment)
added (store/find-by-id 1)]
(is (= (assoc comment :id 1
:createdAt (-> comment :createdAt str)
:updatedAt (-> comment :updatedAt str))
added))
(is (= 1 res))))
(deftest delete-comment!--test
(let [now (t/now)
_ (store/add-comment! {:body "body"
:createdAt now
:updatedAt now
:userId 1
:articleId 1})
comment-before (store/find-by-id 1)
_ (store/delete-comment! 1)
comment-after (store/find-by-id 1)]
(is (not (nil? comment-before)))
(is (nil? comment-after))))
| null | https://raw.githubusercontent.com/furkan3ayraktar/clojure-polylith-realworld-example-app/7703ee7af93887ea600d1c05e400255303a6ed47/components/comment/test/clojure/realworld/comment/store_test.clj | clojure | (ns clojure.realworld.comment.store-test
(:require [clj-time.core :as t]
[clojure.java.jdbc :as jdbc]
[clojure.realworld.comment.store :as store]
[clojure.realworld.database.interface :as database]
[clojure.test :refer [deftest is use-fixtures]]))
(defn test-db
([] {:classname "org.sqlite.JDBC"
:subprotocol "sqlite"
:subname "test.db"})
([_] (test-db)))
(defn prepare-for-tests [f]
(with-redefs [database/db test-db]
(let [db (test-db)]
(database/generate-db db)
(f)
(database/drop-db db))))
(use-fixtures :each prepare-for-tests)
(deftest comments--no-comments--return-empty-vector
(let [res (store/comments 1)]
(is (= [] res))))
(deftest comments--some-comments--return-all-comments
(let [_ (jdbc/insert-multi! (test-db) :comment [{:articleId 1 :body "body1"}
{:articleId 1 :body "body2"}
{:articleId 1 :body "body3"}
{:articleId 1 :body "body4"}])
res (store/comments 1)]
(is (= 4 (count res)))))
(deftest find-by-id--comment-exists--return-comment
(let [_ (jdbc/insert! (test-db) :comment {:body "body"})
comment (store/find-by-id 1)]
(is (= "body" (:body comment)))))
(deftest find-by-id--comment-does-not-exist--return-nil
(let [comment (store/find-by-id 1)]
(is (nil? comment))))
(deftest add-comment!--test
(let [now (t/now)
comment {:body "body"
:createdAt now
:updatedAt now
:userId 1
:articleId 1}
res (store/add-comment! comment)
added (store/find-by-id 1)]
(is (= (assoc comment :id 1
:createdAt (-> comment :createdAt str)
:updatedAt (-> comment :updatedAt str))
added))
(is (= 1 res))))
(deftest delete-comment!--test
(let [now (t/now)
_ (store/add-comment! {:body "body"
:createdAt now
:updatedAt now
:userId 1
:articleId 1})
comment-before (store/find-by-id 1)
_ (store/delete-comment! 1)
comment-after (store/find-by-id 1)]
(is (not (nil? comment-before)))
(is (nil? comment-after))))
|
|
2ba6b19acbed719ac93e43e628f83f0a6595faba852beb79f6670b3a99cea549 | NorfairKing/smos | Actions.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module Smos.Docs.Site.Handler.Actions
( getSmosActionsR,
)
where
import Data.List
import Smos.Actions
import Smos.Docs.Site.Foundation
import Smos.Types
getSmosActionsR :: Handler Html
getSmosActionsR =
defaultLayout $ do
setSmosTitle "Actions"
setDescriptionIdemp "A full reference list of all actions that can be bound to keys"
$(widgetFile "smos-actions")
actionTable :: [AnyAction] -> Widget
actionTable aas =
let ws = map actionRow aas
in [whamlet|
<table .is-bordered .is-striped .is-fullwidth>
<colgroup>
<col span="1" style="width: 15%;">
<col span="1" style="width: 85%;">
<thead>
<tr>
<th>
Name
<th>
Description
<tbody>
$forall w <- ws
^{w}
|]
actionRow :: AnyAction -> Widget
actionRow aa =
let actionName = actionNameText $ anyActionName aa
in [whamlet|
<tr>
<td>
<a name=#{actionName} href="##{actionName}">
<code>
#{actionName}
<td>
#{anyActionDescription aa}
|]
| null | https://raw.githubusercontent.com/NorfairKing/smos/3bb953f467d83aacec1ce1dd59b687c87258559e/smos-docs-site/src/Smos/Docs/Site/Handler/Actions.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE QuasiQuotes #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module Smos.Docs.Site.Handler.Actions
( getSmosActionsR,
)
where
import Data.List
import Smos.Actions
import Smos.Docs.Site.Foundation
import Smos.Types
getSmosActionsR :: Handler Html
getSmosActionsR =
defaultLayout $ do
setSmosTitle "Actions"
setDescriptionIdemp "A full reference list of all actions that can be bound to keys"
$(widgetFile "smos-actions")
actionTable :: [AnyAction] -> Widget
actionTable aas =
let ws = map actionRow aas
in [whamlet|
<table .is-bordered .is-striped .is-fullwidth>
<colgroup>
<col span="1" style="width: 15%;">
<col span="1" style="width: 85%;">
<thead>
<tr>
<th>
Name
<th>
Description
<tbody>
$forall w <- ws
^{w}
|]
actionRow :: AnyAction -> Widget
actionRow aa =
let actionName = actionNameText $ anyActionName aa
in [whamlet|
<tr>
<td>
<a name=#{actionName} href="##{actionName}">
<code>
#{actionName}
<td>
#{anyActionDescription aa}
|]
|
5142545a77ac72d5314ff8f58d7704bd5e7d39f5c9a8b3416b75e8e791588733 | ghc/testsuite | tcfail156.hs | # LANGUAGE ExistentialQuantification #
-- Illegal existential context on a newtype
module ShouldFail where
newtype Foo = forall a . Foo a
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/tcfail156.hs | haskell | Illegal existential context on a newtype | # LANGUAGE ExistentialQuantification #
module ShouldFail where
newtype Foo = forall a . Foo a
|
ac151f7561a8925b2136443f499212fc606bdff75725ce358cdbf657f3cb69ce | jrh13/hol-light | holby.ml | (* ========================================================================= *)
A HOL " by " tactic , doing - like things , trying something that is
sufficient for HOL 's basic rules , trying a few other things like
(* arithmetic, and finally if all else fails using MESON_TAC[]. *)
(* ========================================================================= *)
(* ------------------------------------------------------------------------- *)
(* More refined net lookup that double-checks conditions like matchability. *)
(* ------------------------------------------------------------------------- *)
let matching_enter tm y net =
enter [] (tm,((fun tm' -> can (term_match [] tm) tm'),y)) net;;
let unconditional_enter (tm,y) net =
enter [] (tm,((fun t -> true),y)) net;;
let conditional_enter (tm,condy) net =
enter [] (tm,condy) net;;
let careful_lookup tm net =
map snd (filter (fun (c,y) -> c tm) (lookup tm net));;
(* ------------------------------------------------------------------------- *)
Transform theorem list to simplify , eliminate redundant connectives and
(* split the problem into (generally multiple) subproblems. Then, call the *)
prover given as the first argument on each component .
(* ------------------------------------------------------------------------- *)
let SPLIT_THEN =
let action_false th f oths = th
and action_true th f oths = f oths
and action_conj th f oths =
f (CONJUNCT1 th :: CONJUNCT2 th :: oths)
and action_disj th f oths =
let th1 = f (ASSUME(lhand(concl th)) :: oths)
and th2 = f (ASSUME(rand(concl th)) :: oths) in
DISJ_CASES th th1 th2
and action_taut tm =
let pfun = PART_MATCH lhs (TAUT tm) in
let prule th = EQ_MP (pfun (concl th)) th in
lhand tm,(fun th f oths -> f(prule th :: oths)) in
let enet = itlist unconditional_enter
[`F`,action_false;
`T`,action_true;
`p /\ q`,action_conj;
`p \/ q`,action_disj;
action_taut `(p ==> q) <=> ~p \/ q`;
action_taut `~F <=> T`;
action_taut `~T <=> F`;
action_taut `~(~p) <=> p`;
action_taut `~(p /\ q) <=> ~p \/ ~q`;
action_taut `~(p \/ q) <=> ~p /\ ~q`;
action_taut `~(p ==> q) <=> p /\ ~q`;
action_taut `p /\ F <=> F`;
action_taut `F /\ p <=> F`;
action_taut `p /\ T <=> p`;
action_taut `T /\ p <=> p`;
action_taut `p \/ F <=> p`;
action_taut `F \/ p <=> p`;
action_taut `p \/ T <=> T`;
action_taut `T \/ p <=> T`]
(let tm,act = action_taut `~(p <=> q) <=> p /\ ~q \/ ~p /\ q` in
let cond tm = type_of(rand(rand tm)) = bool_ty in
conditional_enter (tm,(cond,act))
(let tm,act = action_taut `(p <=> q) <=> p /\ q \/ ~p /\ ~q` in
let cond tm = type_of(rand tm) = bool_ty in
conditional_enter (tm,(cond,act)) empty_net)) in
fun prover ->
let rec splitthen splat tosplit =
match tosplit with
[] -> prover (rev splat)
| th::oths ->
let funs = careful_lookup (concl th) enet in
if funs = [] then splitthen (th::splat) oths
else (hd funs) th (splitthen splat) oths in
splitthen [];;
(* ------------------------------------------------------------------------- *)
A similar thing that also introduces constants ( but not functions )
and does some slight first - order simplification like trivial miniscoping .
(* ------------------------------------------------------------------------- *)
let SPLIT_FOL_THEN =
let action_false th f splat oths = th
and action_true th f splat oths = f oths
and action_conj th f splat oths =
f (CONJUNCT1 th :: CONJUNCT2 th :: oths)
and action_disj th f splat oths =
let th1 = f (ASSUME(lhand(concl th)) :: oths)
and th2 = f (ASSUME(rand(concl th)) :: oths) in
DISJ_CASES th th1 th2
and action_exists th f splat oths =
let v,bod = dest_exists(concl th) in
let vars = itlist (union o thm_frees) (oths @ splat) (thm_frees th) in
let v' = variant vars v in
let th' = ASSUME (subst [v',v] bod) in
CHOOSE (v',th) (f (th'::oths))
and action_taut tm =
let pfun = PART_MATCH lhs (TAUT tm) in
let prule th = EQ_MP (pfun (concl th)) th in
lhand tm,(fun th f splat oths -> f(prule th :: oths))
and action_fol tm =
let pfun = PART_MATCH lhs (prove(tm,MESON_TAC[])) in
let prule th = EQ_MP (pfun (concl th)) th in
lhand tm,(fun th f splat oths -> f(prule th :: oths)) in
let enet = itlist unconditional_enter
[`F`,action_false;
`T`,action_true;
`p /\ q`,action_conj;
`p \/ q`,action_disj;
`?x. P x`,action_exists;
action_taut `~(~p) <=> p`;
action_taut `~(p /\ q) <=> ~p \/ ~q`;
action_taut `~(p \/ q) <=> ~p /\ ~q`;
action_fol `~(!x. P x) <=> (?x. ~(P x))`;
action_fol `(!x. P x /\ Q x) <=> (!x. P x) /\ (!x. Q x)`]
empty_net in
fun prover ->
let rec splitthen splat tosplit =
match tosplit with
[] -> prover (rev splat)
| th::oths ->
let funs = careful_lookup (concl th) enet in
if funs = [] then splitthen (th::splat) oths
else (hd funs) th (splitthen splat) splat oths in
splitthen [];;
(* ------------------------------------------------------------------------- *)
(* Do the basic "semantic correlates" stuff. *)
This is more like NNF than 's version .
(* ------------------------------------------------------------------------- *)
let CORRELATE_RULE =
PURE_REWRITE_RULE
[TAUT `(a <=> b) <=> (a ==> b) /\ (b ==> a)`;
TAUT `(a ==> b) <=> ~a \/ b`;
DE_MORGAN_THM;
TAUT `~(~a) <=> a`;
TAUT `~T <=> F`;
TAUT `~F <=> T`;
TAUT `T /\ p <=> p`;
TAUT `p /\ T <=> p`;
TAUT `F /\ p <=> F`;
TAUT `p /\ F <=> F`;
TAUT `T \/ p <=> T`;
TAUT `p \/ T <=> T`;
TAUT `F \/ p <=> p`;
TAUT `p \/ F <=> p`;
GSYM CONJ_ASSOC; GSYM DISJ_ASSOC;
prove(`(?x. P x) <=> ~(!x. ~(P x))`,MESON_TAC[])];;
(* ------------------------------------------------------------------------- *)
(* Look for an immediate contradictory pair of theorems. This is quadratic, *)
(* but I doubt if that's much of an issue in practice. We could do something *)
(* fancier, but need to be careful over alpha-equivalence if sorting. *)
(* ------------------------------------------------------------------------- *)
let THMLIST_CONTR_RULE =
let CONTR_PAIR_THM = UNDISCH_ALL(TAUT `p ==> ~p ==> F`)
and p_tm = `p:bool` in
fun ths ->
let ths_n,ths_p = partition (is_neg o concl) ths in
let th_n = find (fun thn -> let tm = rand(concl thn) in
exists (aconv tm o concl) ths_p) ths_n in
let tm = rand(concl th_n) in
let th_p = find (aconv tm o concl) ths_p in
itlist PROVE_HYP [th_p; th_n] (INST [tm,p_tm] CONTR_PAIR_THM);;
(* ------------------------------------------------------------------------- *)
Hence something similar to 's " " .
(* ------------------------------------------------------------------------- *)
let PRECHECKER_THEN prover =
SPLIT_THEN (fun ths -> try THMLIST_CONTR_RULE ths
with Failure _ ->
SPLIT_FOL_THEN prover (map CORRELATE_RULE ths));;
(* ------------------------------------------------------------------------- *)
(* Lazy equations for use in congruence closure. *)
(* ------------------------------------------------------------------------- *)
type lazyeq = Lazy of (term * term) * (unit -> thm);;
let cache f =
let store = ref TRUTH in
fun () -> let th = !store in
if is_eq(concl th) then th else
let th' = f() in
(store := th'; th');;
let lazy_eq th =
Lazy((dest_eq(concl th)),(fun () -> th));;
let lazy_eval (Lazy(_,f)) = f();;
let REFL' t = Lazy((t,t),cache(fun () -> REFL t));;
let SYM' = fun (Lazy((t,t'),f)) -> Lazy((t',t),cache(fun () -> SYM(f ())));;
let TRANS' =
fun (Lazy((s,s'),f)) (Lazy((t,t'),g)) ->
if not(aconv s' t) then failwith "TRANS'"
else Lazy((s,t'),cache(fun () -> TRANS (f ()) (g ())));;
let MK_COMB' =
fun (Lazy((s,s'),f),Lazy((t,t'),g)) ->
Lazy((mk_comb(s,t),mk_comb(s',t')),cache(fun () -> MK_COMB (f (),g ())));;
let concl' = fun (Lazy(tmp,g)) -> tmp;;
(* ------------------------------------------------------------------------- *)
(* Successors of a term, and predecessor function. *)
(* ------------------------------------------------------------------------- *)
let successors tm =
try let f,x = dest_comb tm in [f;x]
with Failure _ -> [];;
let predecessor_function tms =
itlist (fun x -> itlist (fun y f -> (y |-> insert x (tryapplyd f y [])) f)
(successors x))
tms undefined;;
(* ------------------------------------------------------------------------- *)
(* A union-find structure for equivalences, with theorems for edges. *)
(* ------------------------------------------------------------------------- *)
type termnode = Nonterminal of lazyeq | Terminal of term * term list;;
type termequivalence = Equivalence of (term,termnode)func;;
let rec terminus (Equivalence f as eqv) a =
match (apply f a) with
Nonterminal(th) -> let b = snd(concl' th) in
let th',n = terminus eqv b in
TRANS' th th',n
| Terminal(t,n) -> (REFL' t,n);;
let tryterminus eqv a =
try terminus eqv a with Failure _ -> (REFL' a,[a]);;
let canonize eqv a = fst(tryterminus eqv a);;
let equate th (Equivalence f as eqv) =
let a,b = concl' th in
let (ath,na) = tryterminus eqv a
and (bth,nb) = tryterminus eqv b in
let a' = snd(concl' ath) and b' = snd(concl' bth) in
Equivalence
(if a' = b' then f else
if length na <= length nb then
let th' = TRANS' (TRANS' (SYM' ath) th) bth in
(a' |-> Nonterminal th') ((b' |-> Terminal(b',na@nb)) f)
else
let th' = TRANS'(SYM'(TRANS' th bth)) ath in
(b' |-> Nonterminal th') ((a' |-> Terminal(a',na@nb)) f));;
let unequal = Equivalence undefined;;
let equated (Equivalence f) = dom f;;
let prove_equal eqv (s,t) =
let sth = canonize eqv s and tth = canonize eqv t in
TRANS' (canonize eqv s) (SYM'(canonize eqv t));;
let equivalence_class eqv a = snd(tryterminus eqv a);;
(* ------------------------------------------------------------------------- *)
Prove composite terms equivalent based on 1 - step congruence .
(* ------------------------------------------------------------------------- *)
let provecongruent eqv (tm1,tm2) =
let f1,x1 = dest_comb tm1
and f2,x2 = dest_comb tm2 in
MK_COMB'(prove_equal eqv (f1,f2),prove_equal eqv (x1,x2));;
(* ------------------------------------------------------------------------- *)
(* Merge equivalence classes given equation "th", using congruence closure. *)
(* ------------------------------------------------------------------------- *)
let rec emerge th (eqv,pfn) =
let s,t = concl' th in
let sth = canonize eqv s and tth = canonize eqv t in
let s' = snd(concl' sth) and t' = snd(concl' tth) in
if s' = t' then (eqv,pfn) else
let sp = tryapplyd pfn s' [] and tp = tryapplyd pfn t' [] in
let eqv' = equate th eqv in
let stth = canonize eqv' s' in
let sttm = snd(concl' stth) in
let pfn' = (sttm |-> union sp tp) pfn in
itlist (fun (u,v) (eqv,pfn as eqp) ->
try let thuv = provecongruent eqv (u,v) in emerge thuv eqp
with Failure _ -> eqp)
(allpairs (fun u v -> (u,v)) sp tp) (eqv',pfn');;
(* ------------------------------------------------------------------------- *)
(* Find subterms of "tm" that contain as a subterm one of the "tms" terms. *)
(* This is intended to be more efficient than the obvious "find_terms ...". *)
(* ------------------------------------------------------------------------- *)
let rec supersubterms tms tm =
let ltms,tms' =
if mem tm tms then [tm],filter (fun t -> t <> tm) tms
else [],tms in
if tms' = [] then ltms else
let stms =
try let l,r = dest_comb tm in
union (supersubterms tms' l) (supersubterms tms' r)
with Failure _ -> [] in
if stms = [] then ltms
else tm::stms;;
(* ------------------------------------------------------------------------- *)
(* Find an appropriate term universe for overall terms "tms". *)
(* ------------------------------------------------------------------------- *)
let term_universe tms =
setify (itlist ((@) o supersubterms tms) tms []);;
(* ------------------------------------------------------------------------- *)
Congruence closure of " eqs " over term universe " tms " .
(* ------------------------------------------------------------------------- *)
let congruence_closure tms eqs =
let pfn = predecessor_function tms in
let eqv,_ = itlist emerge eqs (unequal,pfn) in
eqv;;
(* ------------------------------------------------------------------------- *)
Prove that " eq " follows from " eqs " by congruence closure .
(* ------------------------------------------------------------------------- *)
let CCPROVE eqs eq =
let tps = dest_eq eq :: map concl' eqs in
let otms = itlist (fun (x,y) l -> x::y::l) tps [] in
let tms = term_universe(setify otms) in
let eqv = congruence_closure tms eqs in
prove_equal eqv (dest_eq eq);;
(* ------------------------------------------------------------------------- *)
(* Inference rule for `eq1 /\ ... /\ eqn ==> eq` *)
(* ------------------------------------------------------------------------- *)
let CONGRUENCE_CLOSURE tm =
if is_imp tm then
let eqs,eq = dest_imp tm in
DISCH eqs (lazy_eval(CCPROVE (map lazy_eq (CONJUNCTS(ASSUME eqs))) eq))
else lazy_eval(CCPROVE [] tm);;
(* ------------------------------------------------------------------------- *)
(* Inference rule for contradictoriness of set of +ve and -ve eqns. *)
(* ------------------------------------------------------------------------- *)
let CONGRUENCE_CLOSURE_CONTR ths =
let nths,pths = partition (is_neg o concl) ths in
let peqs = filter (is_eq o concl) pths
and neqs = filter (is_eq o rand o concl) nths in
let tps = map (dest_eq o concl) peqs @ map (dest_eq o rand o concl) neqs in
let otms = itlist (fun (x,y) l -> x::y::l) tps [] in
let tms = term_universe(setify otms) in
let eqv = congruence_closure tms (map lazy_eq peqs) in
let prover th =
let eq = dest_eq(rand(concl th)) in
let lth = prove_equal eqv eq in
EQ_MP (EQF_INTRO th) (lazy_eval lth) in
tryfind prover neqs;;
(* ------------------------------------------------------------------------- *)
(* Attempt to prove equality between terms/formulas based on equivalence. *)
(* Note that ABS sideconditions are only checked at inference-time... *)
(* ------------------------------------------------------------------------- *)
let ABS' v =
fun (Lazy((s,t),f)) ->
Lazy((mk_abs(v,s),mk_abs(v,t)),
cache(fun () -> ABS v (f ())));;
let ALPHA_EQ' s' t' =
fun (Lazy((s,t),f) as inp) ->
if s' = s && t' = t then inp else
Lazy((s',t'),
cache(fun () -> EQ_MP (ALPHA (mk_eq(s,t)) (mk_eq(s',t')))
(f ())));;
let rec PROVE_EQUAL eqv (tm1,tm2 as tmp) =
if tm1 = tm2 then REFL' tm1 else
try prove_equal eqv tmp with Failure _ ->
if is_comb tm1 && is_comb tm2 then
let f1,x1 = dest_comb tm1
and f2,x2 = dest_comb tm2 in
MK_COMB'(PROVE_EQUAL eqv (f1,f2),PROVE_EQUAL eqv (x1,x2))
else if is_abs tm1 && is_abs tm2 then
let x1,bod1 = dest_abs tm1
and x2,bod2 = dest_abs tm2 in
let gv = genvar(type_of x1) in
ALPHA_EQ' tm1 tm2
(ABS' x1 (PROVE_EQUAL eqv (vsubst[gv,x1] bod1,vsubst[gv,x2] bod2)))
else failwith "PROVE_EQUAL";;
let PROVE_EQUIVALENT eqv tm1 tm2 = lazy_eval (PROVE_EQUAL eqv (tm1,tm2));;
(* ------------------------------------------------------------------------- *)
(* Complementary version for formulas. *)
(* ------------------------------------------------------------------------- *)
let PROVE_COMPLEMENTARY eqv th1 th2 =
let tm1 = concl th1 and tm2 = concl th2 in
if is_neg tm1 then
let th = PROVE_EQUIVALENT eqv (rand tm1) tm2 in
EQ_MP (EQF_INTRO th1) (EQ_MP (SYM th) th2)
else if is_neg tm2 then
let th = PROVE_EQUIVALENT eqv (rand tm2) tm1 in
EQ_MP (EQF_INTRO th2) (EQ_MP (SYM th) th1)
else failwith "PROVE_COMPLEMENTARY";;
(* ------------------------------------------------------------------------- *)
Check equality under equivalence with " env " mapping for first term .
(* ------------------------------------------------------------------------- *)
let rec test_eq eqv (tm1,tm2) env =
if is_comb tm1 && is_comb tm2 then
let f1,x1 = dest_comb tm1
and f2,x2 = dest_comb tm2 in
test_eq eqv (f1,f2) env && test_eq eqv (x1,x2) env
else if is_abs tm1 && is_abs tm2 then
let x1,bod1 = dest_abs tm1
and x2,bod2 = dest_abs tm2 in
let gv = genvar(type_of x1) in
test_eq eqv (vsubst[gv,x1] bod1,vsubst[gv,x2] bod2) env
else if is_var tm1 && can (rev_assoc tm1) env then
test_eq eqv (rev_assoc tm1 env,tm2) []
else can (prove_equal eqv) (tm1,tm2);;
(* ------------------------------------------------------------------------- *)
(* Map a term to its equivalence class modulo equivalence *)
(* ------------------------------------------------------------------------- *)
let rec term_equivs eqv tm =
let l = equivalence_class eqv tm in
if l <> [tm] then l
else if is_comb tm then
let f,x = dest_comb tm in
allpairs (curry mk_comb) (term_equivs eqv f) (term_equivs eqv x)
else if is_abs tm then
let v,bod = dest_abs tm in
let gv = genvar(type_of v) in
map (fun t -> alpha v (mk_abs(gv,t))) (term_equivs eqv (vsubst [gv,v] bod))
else [tm];;
(* ------------------------------------------------------------------------- *)
Replace " outer " universal variables with genvars . This is " outer " in the
second sense , i.e. universals not in scope of an existential or negation .
(* ------------------------------------------------------------------------- *)
let rec GENSPEC th =
let tm = concl th in
if is_forall tm then
let v = bndvar(rand tm) in
let gv = genvar(type_of v) in
GENSPEC(SPEC gv th)
else if is_conj tm then
let th1,th2 = CONJ_PAIR th in
CONJ (GENSPEC th1) (GENSPEC th2)
else if is_disj tm then
let th1 = GENSPEC(ASSUME(lhand tm))
and th2 = GENSPEC(ASSUME(rand tm)) in
let th3 = DISJ1 th1 (concl th2)
and th4 = DISJ2 (concl th1) th2 in
DISJ_CASES th th3 th4
else th;;
(* ------------------------------------------------------------------------- *)
Simple first - order matching .
(* ------------------------------------------------------------------------- *)
let rec term_fmatch vars vtm ctm env =
if mem vtm vars then
if can (rev_assoc vtm) env then
term_fmatch vars (rev_assoc vtm env) ctm env
else if aconv vtm ctm then env else (ctm,vtm)::env
else if is_comb vtm && is_comb ctm then
let fv,xv = dest_comb vtm
and fc,xc = dest_comb ctm in
term_fmatch vars fv fc (term_fmatch vars xv xc env)
else if is_abs vtm && is_abs ctm then
let xv,bodv = dest_abs vtm
and xc,bodc = dest_abs ctm in
let gv = genvar(type_of xv) and gc = genvar(type_of xc) in
let gbodv = vsubst [gv,xv] bodv
and gbodc = vsubst [gc,xc] bodc in
term_fmatch (gv::vars) gbodv gbodc ((gc,gv)::env)
else if vtm = ctm then env
else failwith "term_fmatch";;
let rec check_consistency env =
match env with
[] -> true
| (c,v)::es -> forall (fun (c',v') -> v' <> v || c' = c) es;;
let separate_insts env =
let tyin = itlist (fun (c,v) -> type_match (type_of v) (type_of c))
env [] in
let ifn(c,v) = (inst tyin c,inst tyin v) in
let tmin = setify (map ifn env) in
if check_consistency tmin then (tmin,tyin)
else failwith "separate_insts";;
let first_order_match vars vtm ctm env =
let env' = term_fmatch vars vtm ctm env in
if can separate_insts env' then env' else failwith "first_order_match";;
(* ------------------------------------------------------------------------- *)
(* Try to match all leaves to negation of auxiliary propositions. *)
(* ------------------------------------------------------------------------- *)
let matchleaves =
let rec matchleaves vars vtm ctms env cont =
if is_conj vtm then
try matchleaves vars (rand vtm) ctms env cont
with Failure _ -> matchleaves vars (lhand vtm) ctms env cont
else if is_disj vtm then
matchleaves vars (lhand vtm) ctms env
(fun e -> matchleaves vars (rand vtm) ctms e cont)
else
tryfind (fun ctm -> cont (first_order_match vars vtm ctm env)) ctms in
fun vars vtm ctms env -> matchleaves vars vtm ctms env (fun e -> e);;
(* ------------------------------------------------------------------------- *)
(* Now actually do the refutation once theorem is instantiated. *)
(* ------------------------------------------------------------------------- *)
let rec REFUTE_LEAVES eqv cths th =
let tm = concl th in
if is_conj tm then
try REFUTE_LEAVES eqv cths (CONJUNCT1 th)
with Failure _ -> REFUTE_LEAVES eqv cths (CONJUNCT2 th)
else if is_disj tm then
let th1 = REFUTE_LEAVES eqv cths (ASSUME(lhand tm))
and th2 = REFUTE_LEAVES eqv cths (ASSUME(rand tm)) in
DISJ_CASES th th1 th2
else
tryfind (PROVE_COMPLEMENTARY eqv th) cths;;
(* ------------------------------------------------------------------------- *)
Hence the Mizar " unifier " for given universal formula .
(* ------------------------------------------------------------------------- *)
let negate tm = if is_neg tm then rand tm else mk_neg tm;;
let MIZAR_UNIFIER eqv ths th =
let gth = GENSPEC th in
let vtm = concl gth in
let vars = subtract (frees vtm) (frees(concl th))
and ctms = map (negate o concl) ths in
let allctms = itlist (union o term_equivs eqv) ctms [] in
let env = matchleaves vars vtm allctms [] in
let tmin,tyin = separate_insts env in
REFUTE_LEAVES eqv ths (PINST tyin tmin gth);;
(* ------------------------------------------------------------------------- *)
(* Deduce disequalities of subterms and add symmetric versions at the end. *)
(* ------------------------------------------------------------------------- *)
let rec DISEQUALITIES ths =
match ths with
[] -> []
| th::oths ->
let t1,t2 = dest_eq (rand(concl th)) in
let f1,args1 = strip_comb t1
and f2,args2 = strip_comb t2 in
if f1 <> f2 || length args1 <> length args2
then th::(GSYM th)::(DISEQUALITIES oths) else
let zargs = zip args1 args2 in
let diffs = filter (fun (a1,a2) -> a1 <> a2) zargs in
if length diffs <> 1 then th::(GSYM th)::(DISEQUALITIES oths) else
let eths = map (fun (a1,a2) ->
if a1 = a2 then REFL a1 else ASSUME(mk_eq(a1,a2))) zargs in
let th1 = rev_itlist (fun x y -> MK_COMB(y,x)) eths (REFL f1) in
let th2 =
MP (GEN_REWRITE_RULE I [GSYM CONTRAPOS_THM] (DISCH_ALL th1)) th in
th::(GSYM th)::(DISEQUALITIES(th2::oths));;
(* ------------------------------------------------------------------------- *)
(* Get such a starting inequality from complementary literals. *)
(* ------------------------------------------------------------------------- *)
let ATOMINEQUALITIES th1 th2 =
let t1 = concl th1 and t2' = concl th2 in
let t2 = dest_neg t2' in
let f1,args1 = strip_comb t1
and f2,args2 = strip_comb t2 in
if f1 <> f2 || length args1 <> length args2 then [] else
let zargs = zip args1 args2 in
let diffs = filter (fun (a1,a2) -> a1 <> a2) zargs in
if length diffs <> 1 then [] else
let eths = map (fun (a1,a2) ->
if a1 = a2 then REFL a1 else ASSUME(mk_eq(a1,a2))) zargs in
let th3 = rev_itlist (fun x y -> MK_COMB(y,x)) eths (REFL f1) in
let th4 = EQ_MP (TRANS th3 (EQF_INTRO th2)) th1 in
let th5 = NOT_INTRO(itlist (DISCH o mk_eq) diffs th4) in
[itlist PROVE_HYP [th1; th2] th5];;
(* ------------------------------------------------------------------------- *)
(* Basic prover. *)
(* ------------------------------------------------------------------------- *)
let BASIC_MIZARBY ths =
try let nths,pths = partition (is_neg o concl) ths in
let peqs,pneqs = partition (is_eq o concl) pths
and neqs,nneqs = partition (is_eq o rand o concl) nths in
let tps = map (dest_eq o concl) peqs @
map (dest_eq o rand o concl) neqs in
let otms = itlist (fun (x,y) l -> x::y::l) tps [] in
let tms = term_universe(setify otms) in
let eqv = congruence_closure tms (map lazy_eq peqs) in
let eqprover th =
let s,t = dest_eq(rand(concl th)) in
let th' = PROVE_EQUIVALENT eqv s t in
EQ_MP (EQF_INTRO th) th'
and contrprover thp thn =
let th = PROVE_EQUIVALENT eqv (concl thp) (rand(concl thn)) in
EQ_MP (TRANS th (EQF_INTRO thn)) thp in
try tryfind eqprover neqs with Failure _ ->
try tryfind (fun thp -> tryfind (contrprover thp) nneqs) pneqs
with Failure _ ->
let new_neqs = unions(allpairs ATOMINEQUALITIES pneqs nneqs) in
let allths = pneqs @ nneqs @ peqs @ DISEQUALITIES(neqs @ new_neqs) in
tryfind (MIZAR_UNIFIER eqv allths)
(filter (is_forall o concl) allths)
with Failure _ -> failwith "BASIC_MIZARBY";;
(* ------------------------------------------------------------------------- *)
(* Put it all together. *)
(* ------------------------------------------------------------------------- *)
let MIZAR_REFUTER ths = PRECHECKER_THEN BASIC_MIZARBY ths;;
(* ------------------------------------------------------------------------- *)
The Mizar prover for getting a conclusion from hypotheses .
(* ------------------------------------------------------------------------- *)
let MIZAR_BY =
let pth = TAUT `(~p ==> F) <=> p` and p_tm = `p:bool` in
fun ths tm ->
let tm' = mk_neg tm in
let th0 = ASSUME tm' in
let th1 = MIZAR_REFUTER (th0::ths) in
EQ_MP (INST [tm,p_tm] pth) (DISCH tm' th1);;
(* ------------------------------------------------------------------------- *)
(* As a standalone prover of formulas. *)
(* ------------------------------------------------------------------------- *)
let MIZAR_RULE tm = MIZAR_BY [] tm;;
(* ------------------------------------------------------------------------- *)
Some additional stuff for HOL .
(* ------------------------------------------------------------------------- *)
let HOL_BY =
let BETASET_CONV =
TOP_DEPTH_CONV GEN_BETA_CONV THENC REWRITE_CONV[IN_ELIM_THM]
and BUILTIN_CONV tm =
try EQT_ELIM(NUM_REDUCE_CONV tm) with Failure _ ->
try EQT_ELIM(REAL_RAT_REDUCE_CONV tm) with Failure _ ->
try ARITH_RULE tm with Failure _ ->
try REAL_ARITH tm with Failure _ ->
failwith "BUILTIN_CONV" in
fun ths tm ->
try MIZAR_BY ths tm with Failure _ ->
try tryfind (fun th -> PART_MATCH I th tm) ths with Failure _ ->
try let avs,bod = strip_forall tm in
let gvs = map (genvar o type_of) avs in
let gtm = vsubst (zip gvs avs) bod in
let th = tryfind (fun th -> PART_MATCH I th gtm) ths in
let gth = GENL gvs th in
EQ_MP (ALPHA (concl gth) tm) gth
with Failure _ -> try
(let ths' = map BETA_RULE ths
and th' = TOP_DEPTH_CONV BETA_CONV tm in
let tm' = rand(concl th') in
try EQ_MP (SYM th') (tryfind (fun th -> PART_MATCH I th tm') ths)
with Failure _ -> try EQ_MP (SYM th') (BUILTIN_CONV tm')
with Failure _ ->
let ths'' = map (CONV_RULE BETASET_CONV) ths'
and th'' = TRANS th' (BETASET_CONV tm') in
EQ_MP (SYM th'') (MESON ths'' (rand(concl th''))))
with Failure _ -> failwith "HOL_BY";;
(* ------------------------------------------------------------------------- *)
Standalone prover , breaking down an implication first .
(* ------------------------------------------------------------------------- *)
let HOL_RULE tm =
try let l,r = dest_imp tm in
DISCH l (HOL_BY (CONJUNCTS(ASSUME l)) r)
with Failure _ -> HOL_BY [] tm;;
(* ------------------------------------------------------------------------- *)
(* Tautology examples (Pelletier problems). *)
(* ------------------------------------------------------------------------- *)
let prop_1 = time HOL_RULE
`p ==> q <=> ~q ==> ~p`;;
let prop_2 = time HOL_RULE
`~ ~p <=> p`;;
let prop_3 = time HOL_RULE
`~(p ==> q) ==> q ==> p`;;
let prop_4 = time HOL_RULE
`~p ==> q <=> ~q ==> p`;;
let prop_5 = time HOL_RULE
`(p \/ q ==> p \/ r) ==> p \/ (q ==> r)`;;
let prop_6 = time HOL_RULE
`p \/ ~p`;;
let prop_7 = time HOL_RULE
`p \/ ~ ~ ~p`;;
let prop_8 = time HOL_RULE
`((p ==> q) ==> p) ==> p`;;
let prop_9 = time HOL_RULE
`(p \/ q) /\ (~p \/ q) /\ (p \/ ~q) ==> ~(~q \/ ~q)`;;
let prop_10 = time HOL_RULE
`(q ==> r) /\ (r ==> p /\ q) /\ (p ==> q /\ r) ==> (p <=> q)`;;
let prop_11 = time HOL_RULE
`p <=> p`;;
let prop_12 = time HOL_RULE
`((p <=> q) <=> r) <=> (p <=> (q <=> r))`;;
let prop_13 = time HOL_RULE
`p \/ q /\ r <=> (p \/ q) /\ (p \/ r)`;;
let prop_14 = time HOL_RULE
`(p <=> q) <=> (q \/ ~p) /\ (~q \/ p)`;;
let prop_15 = time HOL_RULE
`p ==> q <=> ~p \/ q`;;
let prop_16 = time HOL_RULE
`(p ==> q) \/ (q ==> p)`;;
let prop_17 = time HOL_RULE
`p /\ (q ==> r) ==> s <=> (~p \/ q \/ s) /\ (~p \/ ~r \/ s)`;;
(* ------------------------------------------------------------------------- *)
(* Congruence closure examples. *)
(* ------------------------------------------------------------------------- *)
time HOL_RULE
`(f(f(f(f(f(x))))) = x) /\ (f(f(f(x))) = x) ==> (f(x) = x)`;;
time HOL_RULE
`(f(f(f(f(f(f(x)))))) = x) /\ (f(f(f(f(x)))) = x) ==> (f(f(x)) = x)`;;
time HOL_RULE `(f a = a) ==> (f(f a) = a)`;;
time HOL_RULE
`(a = f a) /\ ((g b (f a))=(f (f a))) /\ ((g a b)=(f (g b a)))
==> (g a b = a)`;;
time HOL_RULE
`((s(s(s(s(s(s(s(s(s(s(s(s(s(s(s a)))))))))))))))=a) /\
((s (s (s (s (s (s (s (s (s (s a))))))))))=a) /\
((s (s (s (s (s (s a))))))=a)
==> (a = s a)`;;
time HOL_RULE `(u = v) ==> (P u <=> P v)`;;
time HOL_RULE
`(b + c + d + e + f + g + h + i + j + k + l + m =
m + l + k + j + i + h + g + f + e + d + c + b)
==> (a + b + c + d + e + f + g + h + i + j + k + l + m =
a + m + l + k + j + i + h + g + f + e + d + c + b)`;;
time HOL_RULE
`(f(f(f(f(a)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(f(f(a)))) ==> P(f(a))`;;
time HOL_RULE
`((a = b) \/ (c = d)) /\ ((a = c) \/ (b = d)) ==> (a = d) \/ (b = c)`;;
(* ------------------------------------------------------------------------- *)
(* Various combined examples. *)
(* ------------------------------------------------------------------------- *)
time HOL_RULE
`(f(f(f(f(a:A)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b:A)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(a)) /\ ~(f(f(f(a))) = f(a)) ==> ?x. P(f(f(f(x))))`;;
time HOL_RULE
`(f(f(f(f(a:A)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b:A)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(a))
==> P(f(f(f(a))))`;;
time HOL_RULE
`(f(f(f(f(a:A)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b:A)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(a))
==> ?x. P(f(f(f(x))))`;;
time HOL_RULE
`(a = f a) /\ ((g b (f a))=(f (f a))) /\ ((g a b)=(f (g b a))) /\
(!x y. ~P (g x y))
==> ~P(a)`;;
time HOL_RULE
`(!x y. x + y = y + x) /\ (1 + 2 = x) /\ (x = 3) ==> (3 = 2 + 1)`;;
time HOL_RULE
`(!x:num y. x + y = y + x) ==> (1 + 2 = 2 + 1)`;;
time HOL_RULE
`(!x:num y. ~(x + y = y + x)) ==> ~(1 + 2 = 2 + 1)`;;
time HOL_RULE
`(1 + 2 = 2 + 1) ==> ?x:num y. x + y = y + x`;;
time HOL_RULE
`(1 + x = x + 1) ==> ?x:num y. x + y = y + x`;;
time (HOL_BY []) `?x. P x ==> !y. P y`;;
(* ------------------------------------------------------------------------- *)
Testing the HOL extensions .
(* ------------------------------------------------------------------------- *)
time HOL_RULE `1 + 1 = 2`;;
time HOL_RULE `(\x. x + 1) 2 = 2 + 1`;;
time HOL_RULE `!x. x < 2 ==> 2 * x <= 3`;;
time HOL_RULE `y IN {x | x < 2} <=> y < 2`;;
time HOL_RULE `(!x. (x = a) \/ x > a) ==> (1 + x = a) \/ 1 + x > a`;;
time HOL_RULE `(\(x,y). x + y)(1,2) + 5 = (1 + 2) + 5`;;
(* ------------------------------------------------------------------------- *)
These and only these should go to .
(* ------------------------------------------------------------------------- *)
print_string "***** Now the following (only) should use MESON";
print_newline();;
time HOL_RULE `?x y. x = y`;;
time HOL_RULE `(!Y X Z. p(X,Y) /\ p(Y,Z) ==> p(X,Z)) /\
(!Y X Z. q(X,Y) /\ q(Y,Z) ==> q(X,Z)) /\
(!Y X. q(X,Y) ==> q(Y,X)) /\
(!X Y. p(X,Y) \/ q(X,Y))
==> p(a,b) \/ q(c,d)`;;
time HOL_BY [PAIR_EQ] `(1,2) IN {(x,y) | x < y} <=> 1 < 2`;;
HOL_BY [] `?x. !y. P x ==> P y`;;
| null | https://raw.githubusercontent.com/jrh13/hol-light/8822aa45052e2ec5f9608e79aa3ec7bc84abedf8/Examples/holby.ml | ocaml | =========================================================================
arithmetic, and finally if all else fails using MESON_TAC[].
=========================================================================
-------------------------------------------------------------------------
More refined net lookup that double-checks conditions like matchability.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
split the problem into (generally multiple) subproblems. Then, call the
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Do the basic "semantic correlates" stuff.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Look for an immediate contradictory pair of theorems. This is quadratic,
but I doubt if that's much of an issue in practice. We could do something
fancier, but need to be careful over alpha-equivalence if sorting.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Lazy equations for use in congruence closure.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Successors of a term, and predecessor function.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
A union-find structure for equivalences, with theorems for edges.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Merge equivalence classes given equation "th", using congruence closure.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Find subterms of "tm" that contain as a subterm one of the "tms" terms.
This is intended to be more efficient than the obvious "find_terms ...".
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Find an appropriate term universe for overall terms "tms".
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Inference rule for `eq1 /\ ... /\ eqn ==> eq`
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Inference rule for contradictoriness of set of +ve and -ve eqns.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Attempt to prove equality between terms/formulas based on equivalence.
Note that ABS sideconditions are only checked at inference-time...
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Complementary version for formulas.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Map a term to its equivalence class modulo equivalence
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Try to match all leaves to negation of auxiliary propositions.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Now actually do the refutation once theorem is instantiated.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Deduce disequalities of subterms and add symmetric versions at the end.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Get such a starting inequality from complementary literals.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Basic prover.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Put it all together.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
As a standalone prover of formulas.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Tautology examples (Pelletier problems).
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Congruence closure examples.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Various combined examples.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
------------------------------------------------------------------------- | A HOL " by " tactic , doing - like things , trying something that is
sufficient for HOL 's basic rules , trying a few other things like
let matching_enter tm y net =
enter [] (tm,((fun tm' -> can (term_match [] tm) tm'),y)) net;;
let unconditional_enter (tm,y) net =
enter [] (tm,((fun t -> true),y)) net;;
let conditional_enter (tm,condy) net =
enter [] (tm,condy) net;;
let careful_lookup tm net =
map snd (filter (fun (c,y) -> c tm) (lookup tm net));;
Transform theorem list to simplify , eliminate redundant connectives and
prover given as the first argument on each component .
let SPLIT_THEN =
let action_false th f oths = th
and action_true th f oths = f oths
and action_conj th f oths =
f (CONJUNCT1 th :: CONJUNCT2 th :: oths)
and action_disj th f oths =
let th1 = f (ASSUME(lhand(concl th)) :: oths)
and th2 = f (ASSUME(rand(concl th)) :: oths) in
DISJ_CASES th th1 th2
and action_taut tm =
let pfun = PART_MATCH lhs (TAUT tm) in
let prule th = EQ_MP (pfun (concl th)) th in
lhand tm,(fun th f oths -> f(prule th :: oths)) in
let enet = itlist unconditional_enter
[`F`,action_false;
`T`,action_true;
`p /\ q`,action_conj;
`p \/ q`,action_disj;
action_taut `(p ==> q) <=> ~p \/ q`;
action_taut `~F <=> T`;
action_taut `~T <=> F`;
action_taut `~(~p) <=> p`;
action_taut `~(p /\ q) <=> ~p \/ ~q`;
action_taut `~(p \/ q) <=> ~p /\ ~q`;
action_taut `~(p ==> q) <=> p /\ ~q`;
action_taut `p /\ F <=> F`;
action_taut `F /\ p <=> F`;
action_taut `p /\ T <=> p`;
action_taut `T /\ p <=> p`;
action_taut `p \/ F <=> p`;
action_taut `F \/ p <=> p`;
action_taut `p \/ T <=> T`;
action_taut `T \/ p <=> T`]
(let tm,act = action_taut `~(p <=> q) <=> p /\ ~q \/ ~p /\ q` in
let cond tm = type_of(rand(rand tm)) = bool_ty in
conditional_enter (tm,(cond,act))
(let tm,act = action_taut `(p <=> q) <=> p /\ q \/ ~p /\ ~q` in
let cond tm = type_of(rand tm) = bool_ty in
conditional_enter (tm,(cond,act)) empty_net)) in
fun prover ->
let rec splitthen splat tosplit =
match tosplit with
[] -> prover (rev splat)
| th::oths ->
let funs = careful_lookup (concl th) enet in
if funs = [] then splitthen (th::splat) oths
else (hd funs) th (splitthen splat) oths in
splitthen [];;
A similar thing that also introduces constants ( but not functions )
and does some slight first - order simplification like trivial miniscoping .
let SPLIT_FOL_THEN =
let action_false th f splat oths = th
and action_true th f splat oths = f oths
and action_conj th f splat oths =
f (CONJUNCT1 th :: CONJUNCT2 th :: oths)
and action_disj th f splat oths =
let th1 = f (ASSUME(lhand(concl th)) :: oths)
and th2 = f (ASSUME(rand(concl th)) :: oths) in
DISJ_CASES th th1 th2
and action_exists th f splat oths =
let v,bod = dest_exists(concl th) in
let vars = itlist (union o thm_frees) (oths @ splat) (thm_frees th) in
let v' = variant vars v in
let th' = ASSUME (subst [v',v] bod) in
CHOOSE (v',th) (f (th'::oths))
and action_taut tm =
let pfun = PART_MATCH lhs (TAUT tm) in
let prule th = EQ_MP (pfun (concl th)) th in
lhand tm,(fun th f splat oths -> f(prule th :: oths))
and action_fol tm =
let pfun = PART_MATCH lhs (prove(tm,MESON_TAC[])) in
let prule th = EQ_MP (pfun (concl th)) th in
lhand tm,(fun th f splat oths -> f(prule th :: oths)) in
let enet = itlist unconditional_enter
[`F`,action_false;
`T`,action_true;
`p /\ q`,action_conj;
`p \/ q`,action_disj;
`?x. P x`,action_exists;
action_taut `~(~p) <=> p`;
action_taut `~(p /\ q) <=> ~p \/ ~q`;
action_taut `~(p \/ q) <=> ~p /\ ~q`;
action_fol `~(!x. P x) <=> (?x. ~(P x))`;
action_fol `(!x. P x /\ Q x) <=> (!x. P x) /\ (!x. Q x)`]
empty_net in
fun prover ->
let rec splitthen splat tosplit =
match tosplit with
[] -> prover (rev splat)
| th::oths ->
let funs = careful_lookup (concl th) enet in
if funs = [] then splitthen (th::splat) oths
else (hd funs) th (splitthen splat) splat oths in
splitthen [];;
This is more like NNF than 's version .
let CORRELATE_RULE =
PURE_REWRITE_RULE
[TAUT `(a <=> b) <=> (a ==> b) /\ (b ==> a)`;
TAUT `(a ==> b) <=> ~a \/ b`;
DE_MORGAN_THM;
TAUT `~(~a) <=> a`;
TAUT `~T <=> F`;
TAUT `~F <=> T`;
TAUT `T /\ p <=> p`;
TAUT `p /\ T <=> p`;
TAUT `F /\ p <=> F`;
TAUT `p /\ F <=> F`;
TAUT `T \/ p <=> T`;
TAUT `p \/ T <=> T`;
TAUT `F \/ p <=> p`;
TAUT `p \/ F <=> p`;
GSYM CONJ_ASSOC; GSYM DISJ_ASSOC;
prove(`(?x. P x) <=> ~(!x. ~(P x))`,MESON_TAC[])];;
let THMLIST_CONTR_RULE =
let CONTR_PAIR_THM = UNDISCH_ALL(TAUT `p ==> ~p ==> F`)
and p_tm = `p:bool` in
fun ths ->
let ths_n,ths_p = partition (is_neg o concl) ths in
let th_n = find (fun thn -> let tm = rand(concl thn) in
exists (aconv tm o concl) ths_p) ths_n in
let tm = rand(concl th_n) in
let th_p = find (aconv tm o concl) ths_p in
itlist PROVE_HYP [th_p; th_n] (INST [tm,p_tm] CONTR_PAIR_THM);;
Hence something similar to 's " " .
let PRECHECKER_THEN prover =
SPLIT_THEN (fun ths -> try THMLIST_CONTR_RULE ths
with Failure _ ->
SPLIT_FOL_THEN prover (map CORRELATE_RULE ths));;
type lazyeq = Lazy of (term * term) * (unit -> thm);;
let cache f =
let store = ref TRUTH in
fun () -> let th = !store in
if is_eq(concl th) then th else
let th' = f() in
(store := th'; th');;
let lazy_eq th =
Lazy((dest_eq(concl th)),(fun () -> th));;
let lazy_eval (Lazy(_,f)) = f();;
let REFL' t = Lazy((t,t),cache(fun () -> REFL t));;
let SYM' = fun (Lazy((t,t'),f)) -> Lazy((t',t),cache(fun () -> SYM(f ())));;
let TRANS' =
fun (Lazy((s,s'),f)) (Lazy((t,t'),g)) ->
if not(aconv s' t) then failwith "TRANS'"
else Lazy((s,t'),cache(fun () -> TRANS (f ()) (g ())));;
let MK_COMB' =
fun (Lazy((s,s'),f),Lazy((t,t'),g)) ->
Lazy((mk_comb(s,t),mk_comb(s',t')),cache(fun () -> MK_COMB (f (),g ())));;
let concl' = fun (Lazy(tmp,g)) -> tmp;;
let successors tm =
try let f,x = dest_comb tm in [f;x]
with Failure _ -> [];;
let predecessor_function tms =
itlist (fun x -> itlist (fun y f -> (y |-> insert x (tryapplyd f y [])) f)
(successors x))
tms undefined;;
type termnode = Nonterminal of lazyeq | Terminal of term * term list;;
type termequivalence = Equivalence of (term,termnode)func;;
let rec terminus (Equivalence f as eqv) a =
match (apply f a) with
Nonterminal(th) -> let b = snd(concl' th) in
let th',n = terminus eqv b in
TRANS' th th',n
| Terminal(t,n) -> (REFL' t,n);;
let tryterminus eqv a =
try terminus eqv a with Failure _ -> (REFL' a,[a]);;
let canonize eqv a = fst(tryterminus eqv a);;
let equate th (Equivalence f as eqv) =
let a,b = concl' th in
let (ath,na) = tryterminus eqv a
and (bth,nb) = tryterminus eqv b in
let a' = snd(concl' ath) and b' = snd(concl' bth) in
Equivalence
(if a' = b' then f else
if length na <= length nb then
let th' = TRANS' (TRANS' (SYM' ath) th) bth in
(a' |-> Nonterminal th') ((b' |-> Terminal(b',na@nb)) f)
else
let th' = TRANS'(SYM'(TRANS' th bth)) ath in
(b' |-> Nonterminal th') ((a' |-> Terminal(a',na@nb)) f));;
let unequal = Equivalence undefined;;
let equated (Equivalence f) = dom f;;
let prove_equal eqv (s,t) =
let sth = canonize eqv s and tth = canonize eqv t in
TRANS' (canonize eqv s) (SYM'(canonize eqv t));;
let equivalence_class eqv a = snd(tryterminus eqv a);;
Prove composite terms equivalent based on 1 - step congruence .
let provecongruent eqv (tm1,tm2) =
let f1,x1 = dest_comb tm1
and f2,x2 = dest_comb tm2 in
MK_COMB'(prove_equal eqv (f1,f2),prove_equal eqv (x1,x2));;
let rec emerge th (eqv,pfn) =
let s,t = concl' th in
let sth = canonize eqv s and tth = canonize eqv t in
let s' = snd(concl' sth) and t' = snd(concl' tth) in
if s' = t' then (eqv,pfn) else
let sp = tryapplyd pfn s' [] and tp = tryapplyd pfn t' [] in
let eqv' = equate th eqv in
let stth = canonize eqv' s' in
let sttm = snd(concl' stth) in
let pfn' = (sttm |-> union sp tp) pfn in
itlist (fun (u,v) (eqv,pfn as eqp) ->
try let thuv = provecongruent eqv (u,v) in emerge thuv eqp
with Failure _ -> eqp)
(allpairs (fun u v -> (u,v)) sp tp) (eqv',pfn');;
let rec supersubterms tms tm =
let ltms,tms' =
if mem tm tms then [tm],filter (fun t -> t <> tm) tms
else [],tms in
if tms' = [] then ltms else
let stms =
try let l,r = dest_comb tm in
union (supersubterms tms' l) (supersubterms tms' r)
with Failure _ -> [] in
if stms = [] then ltms
else tm::stms;;
let term_universe tms =
setify (itlist ((@) o supersubterms tms) tms []);;
Congruence closure of " eqs " over term universe " tms " .
let congruence_closure tms eqs =
let pfn = predecessor_function tms in
let eqv,_ = itlist emerge eqs (unequal,pfn) in
eqv;;
Prove that " eq " follows from " eqs " by congruence closure .
let CCPROVE eqs eq =
let tps = dest_eq eq :: map concl' eqs in
let otms = itlist (fun (x,y) l -> x::y::l) tps [] in
let tms = term_universe(setify otms) in
let eqv = congruence_closure tms eqs in
prove_equal eqv (dest_eq eq);;
let CONGRUENCE_CLOSURE tm =
if is_imp tm then
let eqs,eq = dest_imp tm in
DISCH eqs (lazy_eval(CCPROVE (map lazy_eq (CONJUNCTS(ASSUME eqs))) eq))
else lazy_eval(CCPROVE [] tm);;
let CONGRUENCE_CLOSURE_CONTR ths =
let nths,pths = partition (is_neg o concl) ths in
let peqs = filter (is_eq o concl) pths
and neqs = filter (is_eq o rand o concl) nths in
let tps = map (dest_eq o concl) peqs @ map (dest_eq o rand o concl) neqs in
let otms = itlist (fun (x,y) l -> x::y::l) tps [] in
let tms = term_universe(setify otms) in
let eqv = congruence_closure tms (map lazy_eq peqs) in
let prover th =
let eq = dest_eq(rand(concl th)) in
let lth = prove_equal eqv eq in
EQ_MP (EQF_INTRO th) (lazy_eval lth) in
tryfind prover neqs;;
let ABS' v =
fun (Lazy((s,t),f)) ->
Lazy((mk_abs(v,s),mk_abs(v,t)),
cache(fun () -> ABS v (f ())));;
let ALPHA_EQ' s' t' =
fun (Lazy((s,t),f) as inp) ->
if s' = s && t' = t then inp else
Lazy((s',t'),
cache(fun () -> EQ_MP (ALPHA (mk_eq(s,t)) (mk_eq(s',t')))
(f ())));;
let rec PROVE_EQUAL eqv (tm1,tm2 as tmp) =
if tm1 = tm2 then REFL' tm1 else
try prove_equal eqv tmp with Failure _ ->
if is_comb tm1 && is_comb tm2 then
let f1,x1 = dest_comb tm1
and f2,x2 = dest_comb tm2 in
MK_COMB'(PROVE_EQUAL eqv (f1,f2),PROVE_EQUAL eqv (x1,x2))
else if is_abs tm1 && is_abs tm2 then
let x1,bod1 = dest_abs tm1
and x2,bod2 = dest_abs tm2 in
let gv = genvar(type_of x1) in
ALPHA_EQ' tm1 tm2
(ABS' x1 (PROVE_EQUAL eqv (vsubst[gv,x1] bod1,vsubst[gv,x2] bod2)))
else failwith "PROVE_EQUAL";;
let PROVE_EQUIVALENT eqv tm1 tm2 = lazy_eval (PROVE_EQUAL eqv (tm1,tm2));;
let PROVE_COMPLEMENTARY eqv th1 th2 =
let tm1 = concl th1 and tm2 = concl th2 in
if is_neg tm1 then
let th = PROVE_EQUIVALENT eqv (rand tm1) tm2 in
EQ_MP (EQF_INTRO th1) (EQ_MP (SYM th) th2)
else if is_neg tm2 then
let th = PROVE_EQUIVALENT eqv (rand tm2) tm1 in
EQ_MP (EQF_INTRO th2) (EQ_MP (SYM th) th1)
else failwith "PROVE_COMPLEMENTARY";;
Check equality under equivalence with " env " mapping for first term .
let rec test_eq eqv (tm1,tm2) env =
if is_comb tm1 && is_comb tm2 then
let f1,x1 = dest_comb tm1
and f2,x2 = dest_comb tm2 in
test_eq eqv (f1,f2) env && test_eq eqv (x1,x2) env
else if is_abs tm1 && is_abs tm2 then
let x1,bod1 = dest_abs tm1
and x2,bod2 = dest_abs tm2 in
let gv = genvar(type_of x1) in
test_eq eqv (vsubst[gv,x1] bod1,vsubst[gv,x2] bod2) env
else if is_var tm1 && can (rev_assoc tm1) env then
test_eq eqv (rev_assoc tm1 env,tm2) []
else can (prove_equal eqv) (tm1,tm2);;
let rec term_equivs eqv tm =
let l = equivalence_class eqv tm in
if l <> [tm] then l
else if is_comb tm then
let f,x = dest_comb tm in
allpairs (curry mk_comb) (term_equivs eqv f) (term_equivs eqv x)
else if is_abs tm then
let v,bod = dest_abs tm in
let gv = genvar(type_of v) in
map (fun t -> alpha v (mk_abs(gv,t))) (term_equivs eqv (vsubst [gv,v] bod))
else [tm];;
Replace " outer " universal variables with genvars . This is " outer " in the
second sense , i.e. universals not in scope of an existential or negation .
let rec GENSPEC th =
let tm = concl th in
if is_forall tm then
let v = bndvar(rand tm) in
let gv = genvar(type_of v) in
GENSPEC(SPEC gv th)
else if is_conj tm then
let th1,th2 = CONJ_PAIR th in
CONJ (GENSPEC th1) (GENSPEC th2)
else if is_disj tm then
let th1 = GENSPEC(ASSUME(lhand tm))
and th2 = GENSPEC(ASSUME(rand tm)) in
let th3 = DISJ1 th1 (concl th2)
and th4 = DISJ2 (concl th1) th2 in
DISJ_CASES th th3 th4
else th;;
Simple first - order matching .
let rec term_fmatch vars vtm ctm env =
if mem vtm vars then
if can (rev_assoc vtm) env then
term_fmatch vars (rev_assoc vtm env) ctm env
else if aconv vtm ctm then env else (ctm,vtm)::env
else if is_comb vtm && is_comb ctm then
let fv,xv = dest_comb vtm
and fc,xc = dest_comb ctm in
term_fmatch vars fv fc (term_fmatch vars xv xc env)
else if is_abs vtm && is_abs ctm then
let xv,bodv = dest_abs vtm
and xc,bodc = dest_abs ctm in
let gv = genvar(type_of xv) and gc = genvar(type_of xc) in
let gbodv = vsubst [gv,xv] bodv
and gbodc = vsubst [gc,xc] bodc in
term_fmatch (gv::vars) gbodv gbodc ((gc,gv)::env)
else if vtm = ctm then env
else failwith "term_fmatch";;
let rec check_consistency env =
match env with
[] -> true
| (c,v)::es -> forall (fun (c',v') -> v' <> v || c' = c) es;;
let separate_insts env =
let tyin = itlist (fun (c,v) -> type_match (type_of v) (type_of c))
env [] in
let ifn(c,v) = (inst tyin c,inst tyin v) in
let tmin = setify (map ifn env) in
if check_consistency tmin then (tmin,tyin)
else failwith "separate_insts";;
let first_order_match vars vtm ctm env =
let env' = term_fmatch vars vtm ctm env in
if can separate_insts env' then env' else failwith "first_order_match";;
let matchleaves =
let rec matchleaves vars vtm ctms env cont =
if is_conj vtm then
try matchleaves vars (rand vtm) ctms env cont
with Failure _ -> matchleaves vars (lhand vtm) ctms env cont
else if is_disj vtm then
matchleaves vars (lhand vtm) ctms env
(fun e -> matchleaves vars (rand vtm) ctms e cont)
else
tryfind (fun ctm -> cont (first_order_match vars vtm ctm env)) ctms in
fun vars vtm ctms env -> matchleaves vars vtm ctms env (fun e -> e);;
let rec REFUTE_LEAVES eqv cths th =
let tm = concl th in
if is_conj tm then
try REFUTE_LEAVES eqv cths (CONJUNCT1 th)
with Failure _ -> REFUTE_LEAVES eqv cths (CONJUNCT2 th)
else if is_disj tm then
let th1 = REFUTE_LEAVES eqv cths (ASSUME(lhand tm))
and th2 = REFUTE_LEAVES eqv cths (ASSUME(rand tm)) in
DISJ_CASES th th1 th2
else
tryfind (PROVE_COMPLEMENTARY eqv th) cths;;
Hence the Mizar " unifier " for given universal formula .
let negate tm = if is_neg tm then rand tm else mk_neg tm;;
let MIZAR_UNIFIER eqv ths th =
let gth = GENSPEC th in
let vtm = concl gth in
let vars = subtract (frees vtm) (frees(concl th))
and ctms = map (negate o concl) ths in
let allctms = itlist (union o term_equivs eqv) ctms [] in
let env = matchleaves vars vtm allctms [] in
let tmin,tyin = separate_insts env in
REFUTE_LEAVES eqv ths (PINST tyin tmin gth);;
let rec DISEQUALITIES ths =
match ths with
[] -> []
| th::oths ->
let t1,t2 = dest_eq (rand(concl th)) in
let f1,args1 = strip_comb t1
and f2,args2 = strip_comb t2 in
if f1 <> f2 || length args1 <> length args2
then th::(GSYM th)::(DISEQUALITIES oths) else
let zargs = zip args1 args2 in
let diffs = filter (fun (a1,a2) -> a1 <> a2) zargs in
if length diffs <> 1 then th::(GSYM th)::(DISEQUALITIES oths) else
let eths = map (fun (a1,a2) ->
if a1 = a2 then REFL a1 else ASSUME(mk_eq(a1,a2))) zargs in
let th1 = rev_itlist (fun x y -> MK_COMB(y,x)) eths (REFL f1) in
let th2 =
MP (GEN_REWRITE_RULE I [GSYM CONTRAPOS_THM] (DISCH_ALL th1)) th in
th::(GSYM th)::(DISEQUALITIES(th2::oths));;
let ATOMINEQUALITIES th1 th2 =
let t1 = concl th1 and t2' = concl th2 in
let t2 = dest_neg t2' in
let f1,args1 = strip_comb t1
and f2,args2 = strip_comb t2 in
if f1 <> f2 || length args1 <> length args2 then [] else
let zargs = zip args1 args2 in
let diffs = filter (fun (a1,a2) -> a1 <> a2) zargs in
if length diffs <> 1 then [] else
let eths = map (fun (a1,a2) ->
if a1 = a2 then REFL a1 else ASSUME(mk_eq(a1,a2))) zargs in
let th3 = rev_itlist (fun x y -> MK_COMB(y,x)) eths (REFL f1) in
let th4 = EQ_MP (TRANS th3 (EQF_INTRO th2)) th1 in
let th5 = NOT_INTRO(itlist (DISCH o mk_eq) diffs th4) in
[itlist PROVE_HYP [th1; th2] th5];;
let BASIC_MIZARBY ths =
try let nths,pths = partition (is_neg o concl) ths in
let peqs,pneqs = partition (is_eq o concl) pths
and neqs,nneqs = partition (is_eq o rand o concl) nths in
let tps = map (dest_eq o concl) peqs @
map (dest_eq o rand o concl) neqs in
let otms = itlist (fun (x,y) l -> x::y::l) tps [] in
let tms = term_universe(setify otms) in
let eqv = congruence_closure tms (map lazy_eq peqs) in
let eqprover th =
let s,t = dest_eq(rand(concl th)) in
let th' = PROVE_EQUIVALENT eqv s t in
EQ_MP (EQF_INTRO th) th'
and contrprover thp thn =
let th = PROVE_EQUIVALENT eqv (concl thp) (rand(concl thn)) in
EQ_MP (TRANS th (EQF_INTRO thn)) thp in
try tryfind eqprover neqs with Failure _ ->
try tryfind (fun thp -> tryfind (contrprover thp) nneqs) pneqs
with Failure _ ->
let new_neqs = unions(allpairs ATOMINEQUALITIES pneqs nneqs) in
let allths = pneqs @ nneqs @ peqs @ DISEQUALITIES(neqs @ new_neqs) in
tryfind (MIZAR_UNIFIER eqv allths)
(filter (is_forall o concl) allths)
with Failure _ -> failwith "BASIC_MIZARBY";;
let MIZAR_REFUTER ths = PRECHECKER_THEN BASIC_MIZARBY ths;;
The Mizar prover for getting a conclusion from hypotheses .
let MIZAR_BY =
let pth = TAUT `(~p ==> F) <=> p` and p_tm = `p:bool` in
fun ths tm ->
let tm' = mk_neg tm in
let th0 = ASSUME tm' in
let th1 = MIZAR_REFUTER (th0::ths) in
EQ_MP (INST [tm,p_tm] pth) (DISCH tm' th1);;
let MIZAR_RULE tm = MIZAR_BY [] tm;;
Some additional stuff for HOL .
let HOL_BY =
let BETASET_CONV =
TOP_DEPTH_CONV GEN_BETA_CONV THENC REWRITE_CONV[IN_ELIM_THM]
and BUILTIN_CONV tm =
try EQT_ELIM(NUM_REDUCE_CONV tm) with Failure _ ->
try EQT_ELIM(REAL_RAT_REDUCE_CONV tm) with Failure _ ->
try ARITH_RULE tm with Failure _ ->
try REAL_ARITH tm with Failure _ ->
failwith "BUILTIN_CONV" in
fun ths tm ->
try MIZAR_BY ths tm with Failure _ ->
try tryfind (fun th -> PART_MATCH I th tm) ths with Failure _ ->
try let avs,bod = strip_forall tm in
let gvs = map (genvar o type_of) avs in
let gtm = vsubst (zip gvs avs) bod in
let th = tryfind (fun th -> PART_MATCH I th gtm) ths in
let gth = GENL gvs th in
EQ_MP (ALPHA (concl gth) tm) gth
with Failure _ -> try
(let ths' = map BETA_RULE ths
and th' = TOP_DEPTH_CONV BETA_CONV tm in
let tm' = rand(concl th') in
try EQ_MP (SYM th') (tryfind (fun th -> PART_MATCH I th tm') ths)
with Failure _ -> try EQ_MP (SYM th') (BUILTIN_CONV tm')
with Failure _ ->
let ths'' = map (CONV_RULE BETASET_CONV) ths'
and th'' = TRANS th' (BETASET_CONV tm') in
EQ_MP (SYM th'') (MESON ths'' (rand(concl th''))))
with Failure _ -> failwith "HOL_BY";;
Standalone prover , breaking down an implication first .
let HOL_RULE tm =
try let l,r = dest_imp tm in
DISCH l (HOL_BY (CONJUNCTS(ASSUME l)) r)
with Failure _ -> HOL_BY [] tm;;
let prop_1 = time HOL_RULE
`p ==> q <=> ~q ==> ~p`;;
let prop_2 = time HOL_RULE
`~ ~p <=> p`;;
let prop_3 = time HOL_RULE
`~(p ==> q) ==> q ==> p`;;
let prop_4 = time HOL_RULE
`~p ==> q <=> ~q ==> p`;;
let prop_5 = time HOL_RULE
`(p \/ q ==> p \/ r) ==> p \/ (q ==> r)`;;
let prop_6 = time HOL_RULE
`p \/ ~p`;;
let prop_7 = time HOL_RULE
`p \/ ~ ~ ~p`;;
let prop_8 = time HOL_RULE
`((p ==> q) ==> p) ==> p`;;
let prop_9 = time HOL_RULE
`(p \/ q) /\ (~p \/ q) /\ (p \/ ~q) ==> ~(~q \/ ~q)`;;
let prop_10 = time HOL_RULE
`(q ==> r) /\ (r ==> p /\ q) /\ (p ==> q /\ r) ==> (p <=> q)`;;
let prop_11 = time HOL_RULE
`p <=> p`;;
let prop_12 = time HOL_RULE
`((p <=> q) <=> r) <=> (p <=> (q <=> r))`;;
let prop_13 = time HOL_RULE
`p \/ q /\ r <=> (p \/ q) /\ (p \/ r)`;;
let prop_14 = time HOL_RULE
`(p <=> q) <=> (q \/ ~p) /\ (~q \/ p)`;;
let prop_15 = time HOL_RULE
`p ==> q <=> ~p \/ q`;;
let prop_16 = time HOL_RULE
`(p ==> q) \/ (q ==> p)`;;
let prop_17 = time HOL_RULE
`p /\ (q ==> r) ==> s <=> (~p \/ q \/ s) /\ (~p \/ ~r \/ s)`;;
time HOL_RULE
`(f(f(f(f(f(x))))) = x) /\ (f(f(f(x))) = x) ==> (f(x) = x)`;;
time HOL_RULE
`(f(f(f(f(f(f(x)))))) = x) /\ (f(f(f(f(x)))) = x) ==> (f(f(x)) = x)`;;
time HOL_RULE `(f a = a) ==> (f(f a) = a)`;;
time HOL_RULE
`(a = f a) /\ ((g b (f a))=(f (f a))) /\ ((g a b)=(f (g b a)))
==> (g a b = a)`;;
time HOL_RULE
`((s(s(s(s(s(s(s(s(s(s(s(s(s(s(s a)))))))))))))))=a) /\
((s (s (s (s (s (s (s (s (s (s a))))))))))=a) /\
((s (s (s (s (s (s a))))))=a)
==> (a = s a)`;;
time HOL_RULE `(u = v) ==> (P u <=> P v)`;;
time HOL_RULE
`(b + c + d + e + f + g + h + i + j + k + l + m =
m + l + k + j + i + h + g + f + e + d + c + b)
==> (a + b + c + d + e + f + g + h + i + j + k + l + m =
a + m + l + k + j + i + h + g + f + e + d + c + b)`;;
time HOL_RULE
`(f(f(f(f(a)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(f(f(a)))) ==> P(f(a))`;;
time HOL_RULE
`((a = b) \/ (c = d)) /\ ((a = c) \/ (b = d)) ==> (a = d) \/ (b = c)`;;
time HOL_RULE
`(f(f(f(f(a:A)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b:A)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(a)) /\ ~(f(f(f(a))) = f(a)) ==> ?x. P(f(f(f(x))))`;;
time HOL_RULE
`(f(f(f(f(a:A)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b:A)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(a))
==> P(f(f(f(a))))`;;
time HOL_RULE
`(f(f(f(f(a:A)))) = a) /\ (f(f(f(f(f(f(a)))))) = a) /\
something(irrelevant) /\ (11 + 12 = 23) /\
(f(f(f(f(b:A)))) = f(f(f(f(f(f(f(f(f(f(c))))))))))) /\
~(otherthing) /\ ~(f(a) = a) /\ ~(f(b) = b) /\
P(f(a))
==> ?x. P(f(f(f(x))))`;;
time HOL_RULE
`(a = f a) /\ ((g b (f a))=(f (f a))) /\ ((g a b)=(f (g b a))) /\
(!x y. ~P (g x y))
==> ~P(a)`;;
time HOL_RULE
`(!x y. x + y = y + x) /\ (1 + 2 = x) /\ (x = 3) ==> (3 = 2 + 1)`;;
time HOL_RULE
`(!x:num y. x + y = y + x) ==> (1 + 2 = 2 + 1)`;;
time HOL_RULE
`(!x:num y. ~(x + y = y + x)) ==> ~(1 + 2 = 2 + 1)`;;
time HOL_RULE
`(1 + 2 = 2 + 1) ==> ?x:num y. x + y = y + x`;;
time HOL_RULE
`(1 + x = x + 1) ==> ?x:num y. x + y = y + x`;;
time (HOL_BY []) `?x. P x ==> !y. P y`;;
Testing the HOL extensions .
time HOL_RULE `1 + 1 = 2`;;
time HOL_RULE `(\x. x + 1) 2 = 2 + 1`;;
time HOL_RULE `!x. x < 2 ==> 2 * x <= 3`;;
time HOL_RULE `y IN {x | x < 2} <=> y < 2`;;
time HOL_RULE `(!x. (x = a) \/ x > a) ==> (1 + x = a) \/ 1 + x > a`;;
time HOL_RULE `(\(x,y). x + y)(1,2) + 5 = (1 + 2) + 5`;;
These and only these should go to .
print_string "***** Now the following (only) should use MESON";
print_newline();;
time HOL_RULE `?x y. x = y`;;
time HOL_RULE `(!Y X Z. p(X,Y) /\ p(Y,Z) ==> p(X,Z)) /\
(!Y X Z. q(X,Y) /\ q(Y,Z) ==> q(X,Z)) /\
(!Y X. q(X,Y) ==> q(Y,X)) /\
(!X Y. p(X,Y) \/ q(X,Y))
==> p(a,b) \/ q(c,d)`;;
time HOL_BY [PAIR_EQ] `(1,2) IN {(x,y) | x < y} <=> 1 < 2`;;
HOL_BY [] `?x. !y. P x ==> P y`;;
|
428e7fc98e884fe50c9fef2c905a4124578b92f0ec1448a373767f877aa53fe6 | plumatic/fnhouse | handlers.clj | (ns fnhouse.handlers
"Utilities for turning a set of fnhouse handlers into an API description.
A fnhouse handler is an ordinary Clojure function that accepts a map
with two keys:
:request is a Ring-style request [1] (see fnhouse.schemas/Request)
:resources is an arbitrary map of resources (e.g., database handles).
By default, the name of the function specifies the path and method of
the handler (overridable with :path and :method metadata). The handler
must also be annotated with metadata describing schemas [2] for the required
resources, key portions of the request (uri-args, query-params and body),
and response bodies.
The simplest way to specify this data is to use a `defnk` from
plumbing.core [2], which can simultaneously destructure items from the
resources and request, and produce the necessary corresponding schema
annotations.
For example, here is an example of a minimal fnhouse handler:
(defnk unimaginative$GET
{:responses {200 String}}
[]
{:body \"Hello, world!\"})
which defines a GET handler at path /unimaginative, which always returns
the string \"Hello, world!\".
A more complex example that illustrates most of the features of fnhouse:
(s/defschema Idea {:name String :difficulty Double})
(defn hammock$:id$ideas$POST
\"Save a new idea to hammock :id, and return the list of existing ideas\"
{:responses {200 [Idea]}}
[[:request
[:uri-args id :- Long]
[:query-params {hard? :- Boolean false}]
body :- Idea]
[:resources ideas-atom]]
{:body ((swap! ideas-atom update-in [id] conj
(if hard? (update-in idea [:difficulty] * 2) idea))
id)})
This is a handler that accepts POSTS at URIs like /hammock/12/ideas,
with an optional Boolean query-param hard?, and a body that matches the
Idea schema, adds the Idea to hammock 12, and returns the list of all
current ideas at hammock 12. The state of ideas is maintained in ideas-atom,
which is explicitly passed in as a resource (assigned the default schema
of s/Any by defnk).
This handler can be called as an ordinary Clojure function (i.e., in tests),
and runtime schema checking can be turned on following instructions in [2].
The handler can also be turned into an API description by calling nss->handlers-fn
(or related functions) and then passing in the map of resources, like:
((nss->handlers-fn {\"\" 'my-namespace})
{:ideas-atom (atom {})})
With this API description, you can do many things. Out of the box, there is support
for:
- Turning the full API into a normal Ring handler using fnhouse.routes
- Enabling schema checking and coercion using fnhouse.middleware
(so, e.g., the Long id in uri-args is automatically parsed for you)
- Producing minimal API docs
- Generating model classes and client libraries for ClojureScript and
Objective C using, e.g., coax [4]
For a complete example, see the included 'examples/guesthouse' project.
[1] -clojure
[2]
[3]
[4] "
(:use plumbing.core)
(:require
[clojure.string :as str]
[schema.core :as s]
[plumbing.fnk.pfnk :as pfnk]
[plumbing.fnk.schema :as fnk-schema]
[fnhouse.schemas :as schemas]
[fnhouse.routes :as routes])
(:import [clojure.lang Symbol Var]))
(def ^:dynamic ^String *path-separator*
"The string to be used as a path separator in fnhouse fn names."
"$")
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Private schemas
(s/defschema Resources
"A map of external resoures to be injected into a fnhouse handler"
schemas/KeywordMap)
(s/defschema AnnotatedProtoHandler
"A bundle of a raw fnhouse handler with its HandlerInfo"
{:info schemas/HandlerInfo
:proto-handler (s/=> schemas/Response
{:request schemas/Request
:resources Resources})})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Extracting handler info from a fnhouse handler var
(defn ^:private ensure-leading-slash [^String s]
(if (.startsWith s "/")
s
(str "/" s)))
(s/defn ^:private path-and-method
"Extract the path and method from the var name, or :path & :method
overrides in metadata. (You must pass both overrides, or neither)."
[var :- Var]
(or (not-empty (select-keys (meta var) [:path :method]))
(let [var-name (-> var meta (safe-get :name) name)
last-idx (.lastIndexOf var-name *path-separator*)]
{:path (-> var-name (subs 0 last-idx) (str/replace *path-separator* "/") ensure-leading-slash)
:method (-> var-name (subs (inc last-idx)) str/lower-case keyword)})))
(s/defn ^:private source-map
[var :- Var]
(-> (meta var)
(select-keys [:line :column :file :ns :name])
(update-in [:name] name)
(update-in [:ns] str)))
(defnk source-map->str [ns name file line]
(format "%s/%s (%s:%s)" ns name file line))
(defn ^:private default-map-schema [schema]
(if (= schema s/Any) {s/Keyword String} schema))
(s/defn var->handler-info :- schemas/HandlerInfo
"Extract the handler info for the function referred to by the specified var."
[var :- Var
extra-info-fn]
(letk [[method path] (path-and-method var)
[{doc ""} {responses {}}] (meta var)
[{resources {}} {request {}}] (pfnk/input-schema @var)
[{uri-args s/Any} {query-params s/Any} {body nil}] request]
(let [source-map (source-map var)
explicit-uri-args (dissoc (default-map-schema uri-args) s/Keyword)
raw-declared-args (routes/uri-arg-ks path)
declared-args (set raw-declared-args)
undeclared-args (remove declared-args (keys explicit-uri-args))
info {:path path
:method method
:description doc
:request {:query-params (default-map-schema query-params)
:body body
:uri-args (merge
(map-from-keys (constantly String) declared-args)
explicit-uri-args)}
:responses responses
:resources resources
:source-map source-map
:annotations (extra-info-fn var)}]
(when-let [error (s/check schemas/HandlerInfo info)]
(throw (IllegalArgumentException. (format "%s in %s" error (source-map->str source-map)))))
(fnk-schema/assert-iae
(empty? undeclared-args)
"Undeclared args %s in %s" (vec undeclared-args) (source-map->str source-map))
(fnk-schema/assert-iae
(or (not (boolean body)) (boolean (#{:post :put :patch} method)))
"Body only allowed in post or put method in %s" (source-map->str source-map))
(fnk-schema/assert-iae
(every? #{:resources :request s/Keyword} (keys (pfnk/input-schema @var)))
"Disallowed non- :request or :resources bindings in %s: %s"
(source-map->str source-map) (keys (pfnk/input-schema @var)))
(fnk-schema/assert-iae
(apply distinct? ::sentinel raw-declared-args) "Duplicate uri-args %s in %s"
(vec raw-declared-args) (source-map->str source-map))
info)))
(s/defn var->annotated-handler :- AnnotatedProtoHandler
"Take a Var corresponding to a fnhouse handler and return an AnnotatedProtoHandler."
[var :- Var
extra-info-fn]
{:info (var->handler-info var extra-info-fn)
:proto-handler (pfnk/fn->fnk (fn redefable [m] (@var m))
(pfnk/io-schemata @var))})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Extracting ProtoHandlers and injecting resources to make them Handlers
(s/defn curry-resources :- (s/=> schemas/API Resources)
"Take a sequence of AnnotatedProtoHandlers and return a function from resources
to a set of normal AnnotatedHandlers with appropriate resources injected.
Each handler only gets the specific top-level resources it asks for in its
schema."
[proto-handlers :- [AnnotatedProtoHandler]]
(pfnk/fn->fnk
(fn [all-resources]
(for [proto-handler proto-handlers]
(letk [[proto-handler info] proto-handler]
(let [resources (select-keys all-resources (keys (:resources (pfnk/input-schema proto-handler))))]
{:info info
:handler (pfnk/fn->fnk
(fn [request] (proto-handler {:request request :resources resources}))
(update-in (pfnk/io-schemata proto-handler) [0] :request {}))}))))
[(->> proto-handlers
(map #(:resources (pfnk/input-schema (:proto-handler %)) {}))
(reduce fnk-schema/union-input-schemata {}))
schemas/API]))
(s/defn ^:private fnhouse-handler? [var :- Var]
(and (fn? @var) (:responses (meta var))))
(s/defn apply-path-prefix :- schemas/HandlerInfo
"Add a prefix to handler-info, which must consist of one or more complete path
segments without URI args."
[handler-info :- schemas/HandlerInfo prefix :- String]
(fnk-schema/assert-iae
(empty? (routes/uri-arg-ks prefix)) "Path prefix %s cannot contain uri args" prefix)
(update-in handler-info [:path] (partial str (ensure-leading-slash prefix))))
(s/defn ns->handler-fns :- [AnnotatedProtoHandler]
"Take a namespace, return a seq of all the AnnotatedProtoHandlers corresponding to
fnhouse handlers in that namespace."
[ns-sym :- Symbol
extra-info-fn]
(for [var (vals (ns-interns ns-sym))
:when (fnhouse-handler? var)]
(var->annotated-handler var extra-info-fn)))
(s/defn nss->proto-handlers :- [AnnotatedProtoHandler]
"Take a map from path prefix string to namespace symbol.
Sucks up all the fnhouse handlers in each namespace, and prefixes each handler's
path with the corresponding path prefix. Finally, returns the resulting set of
handlers."
[prefix->ns-sym :- {(s/named String "path prefix")
(s/named Symbol "namespace")}
& [extra-info-fn :- (s/maybe (s/=> s/Any Var))]]
(->> prefix->ns-sym
(mapcat (fn [[prefix ns-sym]]
(cond->> (ns->handler-fns ns-sym (or extra-info-fn (constantly nil)))
(seq prefix) (map (fn [annotated-handler]
(update-in annotated-handler
[:info] apply-path-prefix prefix))))))))
(s/defn nss->handlers-fn :- (s/=> schemas/API Resources)
"Partially build an API from a map of prefix string to namespace symbols.
Returns a function that takes in the resources needed to construct the API,
and gives a seq of AnnotatedHandlers with the resources partialed in."
[prefix->ns-sym :- {(s/named String "path prefix")
(s/named Symbol "namespace")}
& [extra-info-fn :- (s/=> s/Any Var)]]
(-> prefix->ns-sym
(nss->proto-handlers extra-info-fn)
curry-resources))
| null | https://raw.githubusercontent.com/plumatic/fnhouse/e27ec5649a91470b63285ef9e28c6a77494f7ffd/src/fnhouse/handlers.clj | clojure |
Private schemas
Extracting handler info from a fnhouse handler var
| (ns fnhouse.handlers
"Utilities for turning a set of fnhouse handlers into an API description.
A fnhouse handler is an ordinary Clojure function that accepts a map
with two keys:
:request is a Ring-style request [1] (see fnhouse.schemas/Request)
:resources is an arbitrary map of resources (e.g., database handles).
By default, the name of the function specifies the path and method of
the handler (overridable with :path and :method metadata). The handler
must also be annotated with metadata describing schemas [2] for the required
resources, key portions of the request (uri-args, query-params and body),
and response bodies.
The simplest way to specify this data is to use a `defnk` from
plumbing.core [2], which can simultaneously destructure items from the
resources and request, and produce the necessary corresponding schema
annotations.
For example, here is an example of a minimal fnhouse handler:
(defnk unimaginative$GET
{:responses {200 String}}
[]
{:body \"Hello, world!\"})
which defines a GET handler at path /unimaginative, which always returns
the string \"Hello, world!\".
A more complex example that illustrates most of the features of fnhouse:
(s/defschema Idea {:name String :difficulty Double})
(defn hammock$:id$ideas$POST
\"Save a new idea to hammock :id, and return the list of existing ideas\"
{:responses {200 [Idea]}}
[[:request
[:uri-args id :- Long]
[:query-params {hard? :- Boolean false}]
body :- Idea]
[:resources ideas-atom]]
{:body ((swap! ideas-atom update-in [id] conj
(if hard? (update-in idea [:difficulty] * 2) idea))
id)})
This is a handler that accepts POSTS at URIs like /hammock/12/ideas,
with an optional Boolean query-param hard?, and a body that matches the
Idea schema, adds the Idea to hammock 12, and returns the list of all
current ideas at hammock 12. The state of ideas is maintained in ideas-atom,
which is explicitly passed in as a resource (assigned the default schema
of s/Any by defnk).
This handler can be called as an ordinary Clojure function (i.e., in tests),
and runtime schema checking can be turned on following instructions in [2].
The handler can also be turned into an API description by calling nss->handlers-fn
(or related functions) and then passing in the map of resources, like:
((nss->handlers-fn {\"\" 'my-namespace})
{:ideas-atom (atom {})})
With this API description, you can do many things. Out of the box, there is support
for:
- Turning the full API into a normal Ring handler using fnhouse.routes
- Enabling schema checking and coercion using fnhouse.middleware
(so, e.g., the Long id in uri-args is automatically parsed for you)
- Producing minimal API docs
- Generating model classes and client libraries for ClojureScript and
Objective C using, e.g., coax [4]
For a complete example, see the included 'examples/guesthouse' project.
[1] -clojure
[2]
[3]
[4] "
(:use plumbing.core)
(:require
[clojure.string :as str]
[schema.core :as s]
[plumbing.fnk.pfnk :as pfnk]
[plumbing.fnk.schema :as fnk-schema]
[fnhouse.schemas :as schemas]
[fnhouse.routes :as routes])
(:import [clojure.lang Symbol Var]))
(def ^:dynamic ^String *path-separator*
"The string to be used as a path separator in fnhouse fn names."
"$")
(s/defschema Resources
"A map of external resoures to be injected into a fnhouse handler"
schemas/KeywordMap)
(s/defschema AnnotatedProtoHandler
"A bundle of a raw fnhouse handler with its HandlerInfo"
{:info schemas/HandlerInfo
:proto-handler (s/=> schemas/Response
{:request schemas/Request
:resources Resources})})
(defn ^:private ensure-leading-slash [^String s]
(if (.startsWith s "/")
s
(str "/" s)))
(s/defn ^:private path-and-method
"Extract the path and method from the var name, or :path & :method
overrides in metadata. (You must pass both overrides, or neither)."
[var :- Var]
(or (not-empty (select-keys (meta var) [:path :method]))
(let [var-name (-> var meta (safe-get :name) name)
last-idx (.lastIndexOf var-name *path-separator*)]
{:path (-> var-name (subs 0 last-idx) (str/replace *path-separator* "/") ensure-leading-slash)
:method (-> var-name (subs (inc last-idx)) str/lower-case keyword)})))
(s/defn ^:private source-map
[var :- Var]
(-> (meta var)
(select-keys [:line :column :file :ns :name])
(update-in [:name] name)
(update-in [:ns] str)))
(defnk source-map->str [ns name file line]
(format "%s/%s (%s:%s)" ns name file line))
(defn ^:private default-map-schema [schema]
(if (= schema s/Any) {s/Keyword String} schema))
(s/defn var->handler-info :- schemas/HandlerInfo
"Extract the handler info for the function referred to by the specified var."
[var :- Var
extra-info-fn]
(letk [[method path] (path-and-method var)
[{doc ""} {responses {}}] (meta var)
[{resources {}} {request {}}] (pfnk/input-schema @var)
[{uri-args s/Any} {query-params s/Any} {body nil}] request]
(let [source-map (source-map var)
explicit-uri-args (dissoc (default-map-schema uri-args) s/Keyword)
raw-declared-args (routes/uri-arg-ks path)
declared-args (set raw-declared-args)
undeclared-args (remove declared-args (keys explicit-uri-args))
info {:path path
:method method
:description doc
:request {:query-params (default-map-schema query-params)
:body body
:uri-args (merge
(map-from-keys (constantly String) declared-args)
explicit-uri-args)}
:responses responses
:resources resources
:source-map source-map
:annotations (extra-info-fn var)}]
(when-let [error (s/check schemas/HandlerInfo info)]
(throw (IllegalArgumentException. (format "%s in %s" error (source-map->str source-map)))))
(fnk-schema/assert-iae
(empty? undeclared-args)
"Undeclared args %s in %s" (vec undeclared-args) (source-map->str source-map))
(fnk-schema/assert-iae
(or (not (boolean body)) (boolean (#{:post :put :patch} method)))
"Body only allowed in post or put method in %s" (source-map->str source-map))
(fnk-schema/assert-iae
(every? #{:resources :request s/Keyword} (keys (pfnk/input-schema @var)))
"Disallowed non- :request or :resources bindings in %s: %s"
(source-map->str source-map) (keys (pfnk/input-schema @var)))
(fnk-schema/assert-iae
(apply distinct? ::sentinel raw-declared-args) "Duplicate uri-args %s in %s"
(vec raw-declared-args) (source-map->str source-map))
info)))
(s/defn var->annotated-handler :- AnnotatedProtoHandler
"Take a Var corresponding to a fnhouse handler and return an AnnotatedProtoHandler."
[var :- Var
extra-info-fn]
{:info (var->handler-info var extra-info-fn)
:proto-handler (pfnk/fn->fnk (fn redefable [m] (@var m))
(pfnk/io-schemata @var))})
Extracting ProtoHandlers and injecting resources to make them Handlers
(s/defn curry-resources :- (s/=> schemas/API Resources)
"Take a sequence of AnnotatedProtoHandlers and return a function from resources
to a set of normal AnnotatedHandlers with appropriate resources injected.
Each handler only gets the specific top-level resources it asks for in its
schema."
[proto-handlers :- [AnnotatedProtoHandler]]
(pfnk/fn->fnk
(fn [all-resources]
(for [proto-handler proto-handlers]
(letk [[proto-handler info] proto-handler]
(let [resources (select-keys all-resources (keys (:resources (pfnk/input-schema proto-handler))))]
{:info info
:handler (pfnk/fn->fnk
(fn [request] (proto-handler {:request request :resources resources}))
(update-in (pfnk/io-schemata proto-handler) [0] :request {}))}))))
[(->> proto-handlers
(map #(:resources (pfnk/input-schema (:proto-handler %)) {}))
(reduce fnk-schema/union-input-schemata {}))
schemas/API]))
(s/defn ^:private fnhouse-handler? [var :- Var]
(and (fn? @var) (:responses (meta var))))
(s/defn apply-path-prefix :- schemas/HandlerInfo
"Add a prefix to handler-info, which must consist of one or more complete path
segments without URI args."
[handler-info :- schemas/HandlerInfo prefix :- String]
(fnk-schema/assert-iae
(empty? (routes/uri-arg-ks prefix)) "Path prefix %s cannot contain uri args" prefix)
(update-in handler-info [:path] (partial str (ensure-leading-slash prefix))))
(s/defn ns->handler-fns :- [AnnotatedProtoHandler]
"Take a namespace, return a seq of all the AnnotatedProtoHandlers corresponding to
fnhouse handlers in that namespace."
[ns-sym :- Symbol
extra-info-fn]
(for [var (vals (ns-interns ns-sym))
:when (fnhouse-handler? var)]
(var->annotated-handler var extra-info-fn)))
(s/defn nss->proto-handlers :- [AnnotatedProtoHandler]
"Take a map from path prefix string to namespace symbol.
Sucks up all the fnhouse handlers in each namespace, and prefixes each handler's
path with the corresponding path prefix. Finally, returns the resulting set of
handlers."
[prefix->ns-sym :- {(s/named String "path prefix")
(s/named Symbol "namespace")}
& [extra-info-fn :- (s/maybe (s/=> s/Any Var))]]
(->> prefix->ns-sym
(mapcat (fn [[prefix ns-sym]]
(cond->> (ns->handler-fns ns-sym (or extra-info-fn (constantly nil)))
(seq prefix) (map (fn [annotated-handler]
(update-in annotated-handler
[:info] apply-path-prefix prefix))))))))
(s/defn nss->handlers-fn :- (s/=> schemas/API Resources)
"Partially build an API from a map of prefix string to namespace symbols.
Returns a function that takes in the resources needed to construct the API,
and gives a seq of AnnotatedHandlers with the resources partialed in."
[prefix->ns-sym :- {(s/named String "path prefix")
(s/named Symbol "namespace")}
& [extra-info-fn :- (s/=> s/Any Var)]]
(-> prefix->ns-sym
(nss->proto-handlers extra-info-fn)
curry-resources))
|
8942b79175a1093cce3a9c1b23051a8b288438eeba8b3ea27cd2e59957ef1593 | McCLIM/McCLIM | package.lisp | (defpackage #:clim-null
(:use #:clim #:clim-lisp #:clim-backend)
(:import-from #:climi #:maybe-funcall))
| null | https://raw.githubusercontent.com/McCLIM/McCLIM/c079691b0913f8306ceff2620b045b6e24e2f745/Backends/Null/package.lisp | lisp | (defpackage #:clim-null
(:use #:clim #:clim-lisp #:clim-backend)
(:import-from #:climi #:maybe-funcall))
|
|
0acd119a3d1fae0e8bb517f07d28d35257523a2542ca80b5e583c8da1637fc3d | Rotaerk/vulkanTest | Framebuffer.hs | module Graphics.VulkanAux.Framebuffer where
import Data.Reflection
import Graphics.Vulkan.Core_1_0
import Graphics.Vulkan.Marshal.Create
import Graphics.VulkanAux.Resource
vkaFramebufferResource :: Given VkDevice => VkaResource VkFramebufferCreateInfo VkFramebuffer
vkaFramebufferResource = vkaSimpleParamResource_ vkCreateFramebuffer vkDestroyFramebuffer "vkCreateFramebuffer" given
initStandardFramebufferCreateInfo :: CreateVkStruct VkFramebufferCreateInfo '["sType", "pNext"] ()
initStandardFramebufferCreateInfo =
set @"sType" VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO &*
set @"pNext" VK_NULL
| null | https://raw.githubusercontent.com/Rotaerk/vulkanTest/beafd3cc27ba60561b9e76cd0058e30949a5affb/sandbox/sandbox/src/Graphics/VulkanAux/Framebuffer.hs | haskell | module Graphics.VulkanAux.Framebuffer where
import Data.Reflection
import Graphics.Vulkan.Core_1_0
import Graphics.Vulkan.Marshal.Create
import Graphics.VulkanAux.Resource
vkaFramebufferResource :: Given VkDevice => VkaResource VkFramebufferCreateInfo VkFramebuffer
vkaFramebufferResource = vkaSimpleParamResource_ vkCreateFramebuffer vkDestroyFramebuffer "vkCreateFramebuffer" given
initStandardFramebufferCreateInfo :: CreateVkStruct VkFramebufferCreateInfo '["sType", "pNext"] ()
initStandardFramebufferCreateInfo =
set @"sType" VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO &*
set @"pNext" VK_NULL
|
|
ce542a71d3cd33541078622b7c2c3d4611e1996595de45eadfb61b3eb385eed1 | BinaryAnalysisPlatform/bap-frames | frame_enum.mli | open Core_kernel
module type Enumerated = sig
type t
val rank : t -> int
val all : t list
end
(** Replaces [@@deriving enum] interface from ppx_deriving, that
treats variants with argument-less constructors as
enumerations with an integer value assigned to every constructor. *)
module type Enumerable = sig
type t
val to_enum : t -> int
val of_enum : int -> t option
val max : int
val min : int
end
module type Substitution = sig
include Enumerated
* [ subs ] is a list of substitions [ ( t , ) ; ... ] , where
an explicit index [ ind ] is set to a particular variant [ t ] .
an explicit index [ind] is set to a particular variant [t]. *)
val subs : (t * int) list
end
module Make(A : Enumerated) : Enumerable with type t := A.t
module Make_substitute(S : Substitution) : Enumerable with type t := S.t
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap-frames/c9f957492c13106171b04914dab4badc9ff5bcad/lib/frame_enum.mli | ocaml | * Replaces [@@deriving enum] interface from ppx_deriving, that
treats variants with argument-less constructors as
enumerations with an integer value assigned to every constructor. | open Core_kernel
module type Enumerated = sig
type t
val rank : t -> int
val all : t list
end
module type Enumerable = sig
type t
val to_enum : t -> int
val of_enum : int -> t option
val max : int
val min : int
end
module type Substitution = sig
include Enumerated
* [ subs ] is a list of substitions [ ( t , ) ; ... ] , where
an explicit index [ ind ] is set to a particular variant [ t ] .
an explicit index [ind] is set to a particular variant [t]. *)
val subs : (t * int) list
end
module Make(A : Enumerated) : Enumerable with type t := A.t
module Make_substitute(S : Substitution) : Enumerable with type t := S.t
|
da0f7e9989efed11bf272fa599faee11c4b2e42c396d3b1972bb29d7b5913cf5 | overtone/overtone | fx.clj | (ns
^{:doc "Audio effects library"
:author "Jeff Rose"}
overtone.studio.fx
(:use [overtone.libs.event]
[overtone.sc.synth]
[overtone.sc.ugens]))
(def BITS 32)
(defsynth fx-noise-gate
"A noise gate only lets audio above a certain amplitude threshold through. Often used to filter out hardware circuit noise or unwanted background noise."
[bus 0 threshold 0.4
slope-below 1 slope-above 0.1
clamp-time 0.01 relax-time 0.1]
(let [source (in bus)
gated (compander source source threshold
slope-below slope-above
clamp-time relax-time)]
(replace-out bus gated)))
(defsynth fx-compressor
"A compressor clamps audio signals above an amplitude threshold down, compressing the dynamic range. Used to normalize a poppy sound so that the amplitude is more consistent, or as a sound warping effect. The clamp time determines the delay from when the signal is detected as going over the threshold to when clamping begins, and the slope determines the rate at which the clamp occurs."
[bus 0 threshold 0.2
slope-below 1 slope-above 0.5
clamp-time 0.01 relax-time 0.01]
(let [source (in bus)]
(replace-out bus
(compander source source threshold
slope-below slope-above
clamp-time relax-time))))
(defsynth fx-limiter
"A limiter sets a maximum threshold for the audio amplitude, and anything above this threshold is quickly clamped down to within it."
[bus 0 threshold 0.2
slope-below 1 slope-above 0.1
clamp-time 0.01 relax-time 0.01]
(let [source (in bus)]
(replace-out bus
(compander source source threshold
slope-below slope-above
clamp-time relax-time))))
(defsynth fx-sustainer
[bus 0 threshold 0.2
slope-below 1 slope-above 0.5
clamp-time 0.01 relax-time 0.01]
(let [source (in bus)]
(replace-out bus
(compander source source threshold
slope-below slope-above
clamp-time relax-time))))
(defsynth fx-freeverb
"Uses the free-verb ugen."
[bus 0 wet-dry 0.5 room-size 0.5 dampening 0.5]
(let [source (in bus)
verbed (free-verb source wet-dry room-size dampening)]
(replace-out bus (* 1.4 verbed))))
(defsynth fx-reverb
"Implements Schroeder reverb using delays."
[bus 0]
(let [input (in bus)
delrd (local-in 4)
output (+ input [(first delrd) (second delrd)])
sig [(+ (first output) (second output)) (- (first output) (second output))
(+ (nth delrd 2) (nth delrd 3)) (- (nth delrd 2) (nth delrd 3))]
sig [(+ (nth sig 0) (nth sig 2)) (+ (nth sig 1) (nth sig 3))
(- (nth sig 0) (nth sig 2)) (- (nth sig 0) (nth sig 2))]
sig (* sig [0.4 0.37 0.333 0.3])
deltimes (- (* [101 143 165 177] 0.001) (control-dur))
lout (local-out (delay-c sig deltimes deltimes))]
(replace-out bus output)))
(defsynth fx-echo
[bus 0 max-delay 1.0 delay-time 0.4 decay-time 2.0]
(let [source (in bus)
echo (comb-n source max-delay delay-time decay-time)]
(replace-out bus (pan2 (+ echo source) 0))))
(defsynth fx-chorus
[bus 0 rate 0.002 depth 0.01]
(let [src (in bus)
dub-depth (* 2 depth)
rates [rate (+ rate 0.001)]
osc (+ dub-depth (* dub-depth (sin-osc:kr rates)))
dly-a (delay-l src 0.3 osc)
sig (apply + src dly-a)]
(replace-out bus (* 0.3 sig))))
(defsynth fx-distortion
[bus 0 boost 4 level 0.01]
(let [src (in bus)]
(replace-out bus (distort (* boost (clip2 src level))))))
; Equation for distortion:
; k = 2*amount/(1-amount)
f(x ) = ( 1+k)*x/(1+k*abs(x ) )
(defsynth fx-distortion2
[bus 0 amount 0.5]
(let [src (in bus)
k (/ (* 2 amount) (- 1 amount))
snd (/ (* src (+ 1 k)) (+ 1 (* k (abs src))))]
(replace-out bus snd)))
(defsynth fx-bitcrusher
[in-bus 0]
(let [src (in in-bus)
resolution (/ (Math/pow 2 (dec BITS)) 2)
crushed (floor (/ (+ 0.5 (* src resolution)) resolution))]
(replace-out in-bus crushed)))
(defsynth fx-distortion-tubescreamer
[bus 0 hi-freq 720.484 low-freq 723.431 hi-freq2 1 gain 4 threshold 0.4]
(let [src (in bus)
f1 (* (hpf src hi-freq) gain)
f2 (lpf (clip2 f1 threshold) low-freq)
f3 (hpf f2 hi-freq2)]
(replace-out bus f3)))
(defsynth fx-rlpf
[bus 0 cutoff 20000 res 0.6]
(let [src (in bus)]
(replace-out bus (rlpf src cutoff res))))
(defsynth fx-rhpf
[bus 0 cutoff 2 res 0.6]
(let [src (in bus)]
(replace-out bus (rhpf src cutoff res))))
(def MAX-DELAY 4)
(defsynth fx-feedback
[bus 0 delay-t 0.5 decay 0.5]
(let [input (in bus)
fb-in (local-in 1)
snd (* decay (leak-dc (delay-n fb-in MAX-DELAY (min MAX-DELAY delay-t))))
snd (+ input snd)
fb-out (local-out snd)
snd (limiter snd 0.8)]
(replace-out bus snd)))
(defsynth fx-feedback-distortion
[bus 0 delay-t 0.5 noise-rate 0.5 boost 1.1 decay 0.8]
(let [noiz (mul-add (lf-noise0:kr noise-rate) 2 2.05)
input (in bus)
fb-in (local-in 1)
snd (* boost (delay-n fb-in MAX-DELAY noiz))
snd (+ input (leak-dc snd))
snd (clip:ar (distort snd) 0 0.9)
fb-out (local-out (* decay snd))]
(replace-out bus snd)))
| null | https://raw.githubusercontent.com/overtone/overtone/9afb513297662716860a4010bc76a0e73c65ca37/src/overtone/studio/fx.clj | clojure | Equation for distortion:
k = 2*amount/(1-amount) | (ns
^{:doc "Audio effects library"
:author "Jeff Rose"}
overtone.studio.fx
(:use [overtone.libs.event]
[overtone.sc.synth]
[overtone.sc.ugens]))
(def BITS 32)
(defsynth fx-noise-gate
"A noise gate only lets audio above a certain amplitude threshold through. Often used to filter out hardware circuit noise or unwanted background noise."
[bus 0 threshold 0.4
slope-below 1 slope-above 0.1
clamp-time 0.01 relax-time 0.1]
(let [source (in bus)
gated (compander source source threshold
slope-below slope-above
clamp-time relax-time)]
(replace-out bus gated)))
(defsynth fx-compressor
"A compressor clamps audio signals above an amplitude threshold down, compressing the dynamic range. Used to normalize a poppy sound so that the amplitude is more consistent, or as a sound warping effect. The clamp time determines the delay from when the signal is detected as going over the threshold to when clamping begins, and the slope determines the rate at which the clamp occurs."
[bus 0 threshold 0.2
slope-below 1 slope-above 0.5
clamp-time 0.01 relax-time 0.01]
(let [source (in bus)]
(replace-out bus
(compander source source threshold
slope-below slope-above
clamp-time relax-time))))
(defsynth fx-limiter
"A limiter sets a maximum threshold for the audio amplitude, and anything above this threshold is quickly clamped down to within it."
[bus 0 threshold 0.2
slope-below 1 slope-above 0.1
clamp-time 0.01 relax-time 0.01]
(let [source (in bus)]
(replace-out bus
(compander source source threshold
slope-below slope-above
clamp-time relax-time))))
(defsynth fx-sustainer
[bus 0 threshold 0.2
slope-below 1 slope-above 0.5
clamp-time 0.01 relax-time 0.01]
(let [source (in bus)]
(replace-out bus
(compander source source threshold
slope-below slope-above
clamp-time relax-time))))
(defsynth fx-freeverb
"Uses the free-verb ugen."
[bus 0 wet-dry 0.5 room-size 0.5 dampening 0.5]
(let [source (in bus)
verbed (free-verb source wet-dry room-size dampening)]
(replace-out bus (* 1.4 verbed))))
(defsynth fx-reverb
"Implements Schroeder reverb using delays."
[bus 0]
(let [input (in bus)
delrd (local-in 4)
output (+ input [(first delrd) (second delrd)])
sig [(+ (first output) (second output)) (- (first output) (second output))
(+ (nth delrd 2) (nth delrd 3)) (- (nth delrd 2) (nth delrd 3))]
sig [(+ (nth sig 0) (nth sig 2)) (+ (nth sig 1) (nth sig 3))
(- (nth sig 0) (nth sig 2)) (- (nth sig 0) (nth sig 2))]
sig (* sig [0.4 0.37 0.333 0.3])
deltimes (- (* [101 143 165 177] 0.001) (control-dur))
lout (local-out (delay-c sig deltimes deltimes))]
(replace-out bus output)))
(defsynth fx-echo
[bus 0 max-delay 1.0 delay-time 0.4 decay-time 2.0]
(let [source (in bus)
echo (comb-n source max-delay delay-time decay-time)]
(replace-out bus (pan2 (+ echo source) 0))))
(defsynth fx-chorus
[bus 0 rate 0.002 depth 0.01]
(let [src (in bus)
dub-depth (* 2 depth)
rates [rate (+ rate 0.001)]
osc (+ dub-depth (* dub-depth (sin-osc:kr rates)))
dly-a (delay-l src 0.3 osc)
sig (apply + src dly-a)]
(replace-out bus (* 0.3 sig))))
(defsynth fx-distortion
[bus 0 boost 4 level 0.01]
(let [src (in bus)]
(replace-out bus (distort (* boost (clip2 src level))))))
f(x ) = ( 1+k)*x/(1+k*abs(x ) )
(defsynth fx-distortion2
[bus 0 amount 0.5]
(let [src (in bus)
k (/ (* 2 amount) (- 1 amount))
snd (/ (* src (+ 1 k)) (+ 1 (* k (abs src))))]
(replace-out bus snd)))
(defsynth fx-bitcrusher
[in-bus 0]
(let [src (in in-bus)
resolution (/ (Math/pow 2 (dec BITS)) 2)
crushed (floor (/ (+ 0.5 (* src resolution)) resolution))]
(replace-out in-bus crushed)))
(defsynth fx-distortion-tubescreamer
[bus 0 hi-freq 720.484 low-freq 723.431 hi-freq2 1 gain 4 threshold 0.4]
(let [src (in bus)
f1 (* (hpf src hi-freq) gain)
f2 (lpf (clip2 f1 threshold) low-freq)
f3 (hpf f2 hi-freq2)]
(replace-out bus f3)))
(defsynth fx-rlpf
[bus 0 cutoff 20000 res 0.6]
(let [src (in bus)]
(replace-out bus (rlpf src cutoff res))))
(defsynth fx-rhpf
[bus 0 cutoff 2 res 0.6]
(let [src (in bus)]
(replace-out bus (rhpf src cutoff res))))
(def MAX-DELAY 4)
(defsynth fx-feedback
[bus 0 delay-t 0.5 decay 0.5]
(let [input (in bus)
fb-in (local-in 1)
snd (* decay (leak-dc (delay-n fb-in MAX-DELAY (min MAX-DELAY delay-t))))
snd (+ input snd)
fb-out (local-out snd)
snd (limiter snd 0.8)]
(replace-out bus snd)))
(defsynth fx-feedback-distortion
[bus 0 delay-t 0.5 noise-rate 0.5 boost 1.1 decay 0.8]
(let [noiz (mul-add (lf-noise0:kr noise-rate) 2 2.05)
input (in bus)
fb-in (local-in 1)
snd (* boost (delay-n fb-in MAX-DELAY noiz))
snd (+ input (leak-dc snd))
snd (clip:ar (distort snd) 0 0.9)
fb-out (local-out (* decay snd))]
(replace-out bus snd)))
|
10cc8884e6d6578c607e111003c27058bf1a8a7b4a6a0fe38da10d60b96543af | ranjitjhala/haddock-annot | Builtin.hs | -----------------------------------------------------------------------------
-- |
Module : Distribution . Simple . Program . Builtin
Copyright : 2006 , 2007 - 2009
--
-- Maintainer :
-- Portability : portable
--
The module defines all the known built - in ' Program 's .
--
-- Where possible we try to find their version numbers.
--
module Distribution.Simple.Program.Builtin (
-- * The collection of unconfigured and configured progams
builtinPrograms,
* Programs that knows about
ghcProgram,
ghcPkgProgram,
lhcProgram,
lhcPkgProgram,
nhcProgram,
hmakeProgram,
jhcProgram,
hugsProgram,
ffihugsProgram,
uhcProgram,
gccProgram,
ranlibProgram,
arProgram,
stripProgram,
happyProgram,
alexProgram,
hsc2hsProgram,
c2hsProgram,
cpphsProgram,
hscolourProgram,
haddockProgram,
greencardProgram,
ldProgram,
tarProgram,
cppProgram,
pkgConfigProgram,
) where
import Distribution.Simple.Program.Types
( Program(..), simpleProgram )
import Distribution.Simple.Utils
( findProgramLocation, findProgramVersion )
-- ------------------------------------------------------------
-- * Known programs
-- ------------------------------------------------------------
-- | The default list of programs.
These programs are typically used internally to Cabal .
builtinPrograms :: [Program]
builtinPrograms =
[
-- compilers and related progs
ghcProgram
, ghcPkgProgram
, hugsProgram
, ffihugsProgram
, nhcProgram
, hmakeProgram
, jhcProgram
, lhcProgram
, lhcPkgProgram
, uhcProgram
-- preprocessors
, hscolourProgram
, haddockProgram
, happyProgram
, alexProgram
, hsc2hsProgram
, c2hsProgram
, cpphsProgram
, greencardProgram
-- platform toolchain
, gccProgram
, ranlibProgram
, arProgram
, stripProgram
, ldProgram
, tarProgram
-- configuration tools
, pkgConfigProgram
]
ghcProgram :: Program
ghcProgram = (simpleProgram "ghc") {
programFindVersion = findProgramVersion "--numeric-version" id
}
ghcPkgProgram :: Program
ghcPkgProgram = (simpleProgram "ghc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "ghc-pkg --version" gives a string like
" GHC package manager version 6.4.1 "
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
lhcProgram :: Program
lhcProgram = (simpleProgram "lhc") {
programFindVersion = findProgramVersion "--numeric-version" id
}
lhcPkgProgram :: Program
lhcPkgProgram = (simpleProgram "lhc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "lhc-pkg --version" gives a string like
" LHC package manager version 0.7 "
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
nhcProgram :: Program
nhcProgram = (simpleProgram "nhc98") {
programFindVersion = findProgramVersion "--version" $ \str ->
Invoking " nhc98 --version " gives a string like
" /usr / local / bin / nhc98 : v1.20 ( 2007 - 11 - 22 ) "
case words str of
(_:('v':ver):_) -> ver
_ -> ""
}
hmakeProgram :: Program
hmakeProgram = (simpleProgram "hmake") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "hmake --version" gives a string line
" /usr / local / bin / : 3.13 ( 2006 - 11 - 01 ) "
case words str of
(_:ver:_) -> ver
_ -> ""
}
jhcProgram :: Program
jhcProgram = (simpleProgram "jhc") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- invoking "jhc --version" gives a string like
-- "jhc 0.3.20080208 (wubgipkamcep-2)
compiled by ghc-6.8 on a x86_64 running linux "
case words str of
(_:ver:_) -> ver
_ -> ""
}
uhcProgram :: Program
uhcProgram = (simpleProgram "uhc") {
programFindVersion = findProgramVersion "--version-dotted" id
}
AArgh ! Finding the version of hugs or ffihugs is almost impossible .
hugsProgram :: Program
hugsProgram = simpleProgram "hugs"
ffihugsProgram :: Program
ffihugsProgram = simpleProgram "ffihugs"
happyProgram :: Program
happyProgram = (simpleProgram "happy") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "happy --version" gives a string like
-- "Happy Version 1.16 Copyright (c) ...."
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
alexProgram :: Program
alexProgram = (simpleProgram "alex") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "alex --version" gives a string like
" Alex version 2.1.0 , ( c ) 2003 and "
case words str of
(_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver
_ -> ""
}
gccProgram :: Program
gccProgram = (simpleProgram "gcc") {
programFindVersion = findProgramVersion "-dumpversion" id
}
ranlibProgram :: Program
ranlibProgram = simpleProgram "ranlib"
arProgram :: Program
arProgram = simpleProgram "ar"
stripProgram :: Program
stripProgram = simpleProgram "strip"
hsc2hsProgram :: Program
hsc2hsProgram = (simpleProgram "hsc2hs") {
programFindVersion =
findProgramVersion "--version" $ \str ->
-- Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66"
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
c2hsProgram :: Program
c2hsProgram = (simpleProgram "c2hs") {
programFindVersion = findProgramVersion "--numeric-version" id
}
cpphsProgram :: Program
cpphsProgram = (simpleProgram "cpphs") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "cpphs --version" gives a string like "cpphs 1.3"
case words str of
(_:ver:_) -> ver
_ -> ""
}
hscolourProgram :: Program
hscolourProgram = (simpleProgram "hscolour") {
programFindLocation = \v -> findProgramLocation v "HsColour",
programFindVersion = findProgramVersion "-version" $ \str ->
-- Invoking "HsColour -version" gives a string like "HsColour 1.7"
case words str of
(_:ver:_) -> ver
_ -> ""
}
haddockProgram :: Program
haddockProgram = (simpleProgram "haddock") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "haddock --version" gives a string like
" version 0.8 , ( c ) 2006 "
case words str of
(_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver
_ -> ""
}
greencardProgram :: Program
greencardProgram = simpleProgram "greencard"
ldProgram :: Program
ldProgram = simpleProgram "ld"
tarProgram :: Program
tarProgram = simpleProgram "tar"
cppProgram :: Program
cppProgram = simpleProgram "cpp"
pkgConfigProgram :: Program
pkgConfigProgram = (simpleProgram "pkg-config") {
programFindVersion = findProgramVersion "--version" id
}
| null | https://raw.githubusercontent.com/ranjitjhala/haddock-annot/ffaa182b17c3047887ff43dbe358c246011903f6/Cabal-1.10.1.1/Distribution/Simple/Program/Builtin.hs | haskell | ---------------------------------------------------------------------------
|
Maintainer :
Portability : portable
Where possible we try to find their version numbers.
* The collection of unconfigured and configured progams
------------------------------------------------------------
* Known programs
------------------------------------------------------------
| The default list of programs.
compilers and related progs
preprocessors
platform toolchain
configuration tools
Invoking "ghc-pkg --version" gives a string like
Invoking "lhc-pkg --version" gives a string like
Invoking "hmake --version" gives a string line
invoking "jhc --version" gives a string like
"jhc 0.3.20080208 (wubgipkamcep-2)
Invoking "happy --version" gives a string like
"Happy Version 1.16 Copyright (c) ...."
Invoking "alex --version" gives a string like
Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66"
Invoking "cpphs --version" gives a string like "cpphs 1.3"
Invoking "HsColour -version" gives a string like "HsColour 1.7"
Invoking "haddock --version" gives a string like | Module : Distribution . Simple . Program . Builtin
Copyright : 2006 , 2007 - 2009
The module defines all the known built - in ' Program 's .
module Distribution.Simple.Program.Builtin (
builtinPrograms,
* Programs that knows about
ghcProgram,
ghcPkgProgram,
lhcProgram,
lhcPkgProgram,
nhcProgram,
hmakeProgram,
jhcProgram,
hugsProgram,
ffihugsProgram,
uhcProgram,
gccProgram,
ranlibProgram,
arProgram,
stripProgram,
happyProgram,
alexProgram,
hsc2hsProgram,
c2hsProgram,
cpphsProgram,
hscolourProgram,
haddockProgram,
greencardProgram,
ldProgram,
tarProgram,
cppProgram,
pkgConfigProgram,
) where
import Distribution.Simple.Program.Types
( Program(..), simpleProgram )
import Distribution.Simple.Utils
( findProgramLocation, findProgramVersion )
These programs are typically used internally to Cabal .
builtinPrograms :: [Program]
builtinPrograms =
[
ghcProgram
, ghcPkgProgram
, hugsProgram
, ffihugsProgram
, nhcProgram
, hmakeProgram
, jhcProgram
, lhcProgram
, lhcPkgProgram
, uhcProgram
, hscolourProgram
, haddockProgram
, happyProgram
, alexProgram
, hsc2hsProgram
, c2hsProgram
, cpphsProgram
, greencardProgram
, gccProgram
, ranlibProgram
, arProgram
, stripProgram
, ldProgram
, tarProgram
, pkgConfigProgram
]
ghcProgram :: Program
ghcProgram = (simpleProgram "ghc") {
programFindVersion = findProgramVersion "--numeric-version" id
}
ghcPkgProgram :: Program
ghcPkgProgram = (simpleProgram "ghc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
" GHC package manager version 6.4.1 "
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
lhcProgram :: Program
lhcProgram = (simpleProgram "lhc") {
programFindVersion = findProgramVersion "--numeric-version" id
}
lhcPkgProgram :: Program
lhcPkgProgram = (simpleProgram "lhc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
" LHC package manager version 0.7 "
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
nhcProgram :: Program
nhcProgram = (simpleProgram "nhc98") {
programFindVersion = findProgramVersion "--version" $ \str ->
Invoking " nhc98 --version " gives a string like
" /usr / local / bin / nhc98 : v1.20 ( 2007 - 11 - 22 ) "
case words str of
(_:('v':ver):_) -> ver
_ -> ""
}
hmakeProgram :: Program
hmakeProgram = (simpleProgram "hmake") {
programFindVersion = findProgramVersion "--version" $ \str ->
" /usr / local / bin / : 3.13 ( 2006 - 11 - 01 ) "
case words str of
(_:ver:_) -> ver
_ -> ""
}
jhcProgram :: Program
jhcProgram = (simpleProgram "jhc") {
programFindVersion = findProgramVersion "--version" $ \str ->
compiled by ghc-6.8 on a x86_64 running linux "
case words str of
(_:ver:_) -> ver
_ -> ""
}
uhcProgram :: Program
uhcProgram = (simpleProgram "uhc") {
programFindVersion = findProgramVersion "--version-dotted" id
}
AArgh ! Finding the version of hugs or ffihugs is almost impossible .
hugsProgram :: Program
hugsProgram = simpleProgram "hugs"
ffihugsProgram :: Program
ffihugsProgram = simpleProgram "ffihugs"
happyProgram :: Program
happyProgram = (simpleProgram "happy") {
programFindVersion = findProgramVersion "--version" $ \str ->
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
alexProgram :: Program
alexProgram = (simpleProgram "alex") {
programFindVersion = findProgramVersion "--version" $ \str ->
" Alex version 2.1.0 , ( c ) 2003 and "
case words str of
(_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver
_ -> ""
}
gccProgram :: Program
gccProgram = (simpleProgram "gcc") {
programFindVersion = findProgramVersion "-dumpversion" id
}
ranlibProgram :: Program
ranlibProgram = simpleProgram "ranlib"
arProgram :: Program
arProgram = simpleProgram "ar"
stripProgram :: Program
stripProgram = simpleProgram "strip"
hsc2hsProgram :: Program
hsc2hsProgram = (simpleProgram "hsc2hs") {
programFindVersion =
findProgramVersion "--version" $ \str ->
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
c2hsProgram :: Program
c2hsProgram = (simpleProgram "c2hs") {
programFindVersion = findProgramVersion "--numeric-version" id
}
cpphsProgram :: Program
cpphsProgram = (simpleProgram "cpphs") {
programFindVersion = findProgramVersion "--version" $ \str ->
case words str of
(_:ver:_) -> ver
_ -> ""
}
hscolourProgram :: Program
hscolourProgram = (simpleProgram "hscolour") {
programFindLocation = \v -> findProgramLocation v "HsColour",
programFindVersion = findProgramVersion "-version" $ \str ->
case words str of
(_:ver:_) -> ver
_ -> ""
}
haddockProgram :: Program
haddockProgram = (simpleProgram "haddock") {
programFindVersion = findProgramVersion "--version" $ \str ->
" version 0.8 , ( c ) 2006 "
case words str of
(_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver
_ -> ""
}
greencardProgram :: Program
greencardProgram = simpleProgram "greencard"
ldProgram :: Program
ldProgram = simpleProgram "ld"
tarProgram :: Program
tarProgram = simpleProgram "tar"
cppProgram :: Program
cppProgram = simpleProgram "cpp"
pkgConfigProgram :: Program
pkgConfigProgram = (simpleProgram "pkg-config") {
programFindVersion = findProgramVersion "--version" id
}
|
53f34e68a65585568378f1333710b5437ecc4ae474faa5247c9de47fa5de9364 | simmone/racket-simple-xlsx | plot-vis-only-test.rkt | #lang racket
(require simple-xml)
(require rackunit/text-ui rackunit)
(require "../../../../xlsx/xlsx.rkt")
(require "../../../../sheet/sheet.rkt")
(require "../../../../lib/lib.rkt")
(require"../../../../xl/charts/lib.rkt")
(require racket/runtime-path)
(define-runtime-path plot_vis_only_file "plot_vis_only.xml")
(define test-plot-vis-only
(test-suite
"test-plot-vis-only"
(test-case
"test-plot-vis-only"
(with-xlsx
(lambda ()
(add-data-sheet "DataSheet"
'(("month1" "201601" "201602" "201603" "real") (201601 100 300 200 6.9)))
(add-data-sheet "Sheet2" '((1)))
(add-data-sheet "Sheet3" '((1)))
(add-chart-sheet "Chart1" 'LINE "Chart1" '())
(add-chart-sheet "Chart2" 'LINE "Chart2" '())
(add-chart-sheet "Chart3" 'LINE "Chart3" '())
(with-sheet
(lambda ()
(call-with-input-file plot_vis_only_file
(lambda (expected)
(call-with-input-string
(lists->xml_content (plot-vis-only))
(lambda (actual)
(check-lines? expected actual))))))))))
))
(run-tests test-plot-vis-only)
| null | https://raw.githubusercontent.com/simmone/racket-simple-xlsx/e0ac3190b6700b0ee1dd80ed91a8f4318533d012/simple-xlsx/tests/xl/charts/lib/plot-vis-only-test.rkt | racket | #lang racket
(require simple-xml)
(require rackunit/text-ui rackunit)
(require "../../../../xlsx/xlsx.rkt")
(require "../../../../sheet/sheet.rkt")
(require "../../../../lib/lib.rkt")
(require"../../../../xl/charts/lib.rkt")
(require racket/runtime-path)
(define-runtime-path plot_vis_only_file "plot_vis_only.xml")
(define test-plot-vis-only
(test-suite
"test-plot-vis-only"
(test-case
"test-plot-vis-only"
(with-xlsx
(lambda ()
(add-data-sheet "DataSheet"
'(("month1" "201601" "201602" "201603" "real") (201601 100 300 200 6.9)))
(add-data-sheet "Sheet2" '((1)))
(add-data-sheet "Sheet3" '((1)))
(add-chart-sheet "Chart1" 'LINE "Chart1" '())
(add-chart-sheet "Chart2" 'LINE "Chart2" '())
(add-chart-sheet "Chart3" 'LINE "Chart3" '())
(with-sheet
(lambda ()
(call-with-input-file plot_vis_only_file
(lambda (expected)
(call-with-input-string
(lists->xml_content (plot-vis-only))
(lambda (actual)
(check-lines? expected actual))))))))))
))
(run-tests test-plot-vis-only)
|
|
098b42aa9923f933684bcecd6242ae124cb0dbaa0debda54b8b3f2a00570ba2c | coccinelle/herodotos | diff_type.mli | type difftype =
GNUDiff of string
| Gumtree of string
| Hybrid of string
| null | https://raw.githubusercontent.com/coccinelle/herodotos/5da230a18962ca445ed2368bc21abe0a8402e00f/herodotos/diff/diff_type.mli | ocaml | type difftype =
GNUDiff of string
| Gumtree of string
| Hybrid of string
|
|
d3d1aea6417da93bedc5a02a0bff09fdfc50b4c832793c008487aa7f05239d9e | gwathlobal/CotD | init-terrain-types.lisp | (in-package :cotd)
;;--------------------
TERRAIN - TEMPLATE Declarations
;;--------------------
;;--------------------
Borders
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-floor+ :name "dirt"
:glyph-idx 95 :glyph-color (sdl:color :r 205 :g 103 :b 63) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-blocks-sound t :trait-blocks-sound-floor t :trait-not-climable t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-floor-snow+ :name "snow"
:glyph-idx 95 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-not-climable t :trait-blocks-sound t :trait-blocks-sound-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-water+ :name "water"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*blue* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-blocks-sound 10 :trait-blocks-sound-floor 10 :trait-not-climable t :trait-water t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-grass+ :name "grass"
:glyph-idx 95 :glyph-color (sdl:color :r 0 :g 100 :b 0) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-not-climable t :trait-blocks-sound t :trait-blocks-sound-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-air+ :name "air"
:glyph-idx 96 :glyph-color sdl:*cyan* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-not-climable t :trait-blocks-sound t :trait-blocks-sound-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-creep+ :name "creep"
:glyph-idx 95 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-blocks-sound t :trait-blocks-sound-floor t :trait-not-climable t))
;;--------------------
;; Floors
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-stone+ :name "stone floor"
:glyph-idx +glyph-id-solid-floor+ :glyph-color (sdl:color :r 200 :g 200 :b 200) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-glass+ :name "transparent floor"
:glyph-idx +glyph-id-solid-floor+ :glyph-color sdl:*cyan* :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-grass+ :name "grass"
:glyph-idx 95 :glyph-color (sdl:color :r 0 :g 100 :b 0) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 3))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-dirt+ :name "dirt"
:glyph-idx 95 :glyph-color (sdl:color :r 205 :g 103 :b 63) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-dirt-bright+ :name "dirt"
:glyph-idx 95 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-snow+ :name "snow"
:glyph-idx 95 :glyph-color sdl:*white* :back-color sdl:*black*
:on-step #'(lambda (mob x y z)
(when (not (mob-ability-p mob +mob-abil-float+))
(set-terrain-* (level *world*) x y z +terrain-floor-snow-prints+)))
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-snow-prints+ :name "snow"
:glyph-idx 95 :glyph-color (sdl:color :r 80 :g 80 :b 155) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-bridge+ :name "bridge"
:glyph-idx 96 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-pier+ :name "pier"
:glyph-idx 96 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-ash+ :name "ash"
:glyph-idx 95 :glyph-color (sdl:color :r 70 :g 70 :b 70) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-church-catholic+ :name "sign \"The Catholic Church of the One\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-church-orthodox+ :name "sign \"The Orthodox Church of the One\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-library+ :name "sign \"The Library of His Imperial Majesty\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-prison+ :name "sign \"City Prison\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-bank+ :name "sign \"Bank of Morozov and Sons\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep+ :name "creep"
:glyph-idx 95 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-bright+ :name "creep"
:glyph-idx 95 :glyph-color (sdl:color :r 255 :g 0 :b 255) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-dreadtubes+ :name "dreadtubes"
:glyph-idx 129 :glyph-color (sdl:color :r 105 :g 50 :b 255) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(when (< (random 100) 20)
(generate-sound mob (x mob) (y mob) (z mob) 100 #'(lambda (str)
(format nil "You hear an eerie howl~A. " str)))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Dreadtubes give off an eerie howl under ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(let ((mob))
(check-surroundings x y t #'(lambda (dx dy)
(when (and (>= dx 0)
(>= dy 0)
(< dx (array-dimension (terrain (level *world*)) 0))
(< dy (array-dimension (terrain (level *world*)) 1))
(get-mob-* (level *world*) dx dy z))
(setf mob (get-mob-* (level *world*) dx dy z))
(if (> (random (+ (strength mob) 5)) (strength mob))
(progn
(set-mob-effect mob :effect-type-id +mob-effect-fear+ :actor-id (id mob) :cd 4)
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "~A is feared. " (capitalize-name (prepend-article +article-the+ (visible-name mob))))
:observed-mob mob
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind))))
(progn
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "~A resists fear. " (capitalize-name (prepend-article +article-the+ (visible-name mob))))
:observed-mob mob
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind))))))
)))
))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-spores+ :name "sludgeshrooms"
:glyph-idx 130 :glyph-color (sdl:color :r 155 :g 50 :b 0) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(when (< (random 100) 20)
(generate-sound mob (x mob) (y mob) (z mob) 100 #'(lambda (str)
(format nil "You hear a hissing sound~A. " str)))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Sludgeshrooms release spores under ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(check-surroundings x y t #'(lambda (dx dy)
(when (and (>= dx 0)
(>= dy 0)
(< dx (array-dimension (terrain (level *world*)) 0))
(< dy (array-dimension (terrain (level *world*)) 1))
(and (not (get-terrain-type-trait (get-terrain-* (level *world*) dx dy z) +terrain-trait-blocks-move+))
(not (get-terrain-type-trait (get-terrain-* (level *world*) dx dy z) +terrain-trait-blocks-projectiles+))))
(add-feature-to-level-list (level *world*) (make-instance 'feature :feature-type +feature-corrupted-spores+ :x dx :y dy :z z
:counter 2))
)
))))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-irradiated+ :name "glowing creep"
:glyph-idx 95 :glyph-color (sdl:color :r 200 :g 50 :b 100) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Sinister glow irradiates ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(if (mob-effect-p mob +mob-effect-irradiated+)
(progn
(let ((effect (get-effect-by-id (mob-effect-p mob +mob-effect-irradiated+))))
(when (<= (param1 effect) 5)
(incf (param1 effect) (+ 2 (random 2))))))
(progn
(set-mob-effect mob :effect-type-id +mob-effect-irradiated+ :actor-id nil :cd t :param1 (+ 2 (random 3))))))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-slime+ :name "slime"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*yellow* :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Slime envelops ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(set-mob-effect mob :effect-type-id +mob-effect-reduce-resitances+ :actor-id nil :cd 6))))
;;--------------------
;; Walls
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-stone+ :name "stone wall"
:glyph-idx +glyph-id-wall+ :glyph-color sdl:*white* :back-color sdl:*white*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 25 :trait-blocks-sound-floor 20 :trait-blocks-move-floor t
:trait-can-have-rune t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-barricade+ :name "barricade"
:glyph-idx +glyph-id-hash+ :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-earth+ :name "earth"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color (sdl:color :r 185 :g 83 :b 43)
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 40 :trait-blocks-sound-floor 40))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-compressed-bones+ :name "compressed bones"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 189 :g 183 :b 107) :back-color (sdl:color :r 189 :g 183 :b 107)
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 40 :trait-blocks-sound-floor 40))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-raw-flesh+ :name "raw flesh"
:glyph-idx +glyph-id-percent+ :glyph-color sdl:*magenta* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 20 :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-bush+ :name "bush"
:glyph-idx +glyph-id-hash+ :glyph-color sdl:*green* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 3 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-grave+ :name "grave"
:glyph-idx 121 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-gloomtwigs+ :name "gloomtwigs"
:glyph-idx +glyph-id-hash+ :glyph-color (sdl:color :r 255 :g 0 :b 255) :back-color sdl:*black*
:trait-blocks-vision 60 :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-corrupted+ :name "bone wall"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 189 :g 183 :b 107) :back-color (sdl:color :r 189 :g 183 :b 107)
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 25 :trait-blocks-sound-floor 20 :trait-can-have-rune t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-razorthorns+ :name "razorthorns"
:glyph-idx +glyph-id-hash+ :glyph-color (sdl:color :r 100 :g 0 :b 0) :back-color sdl:*black*
:trait-blocks-vision 60 :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(inflict-damage mob :min-dmg 1 :max-dmg 1 :dmg-type +weapon-dmg-acid+
:att-spd nil :weapon-aux () :acc 100 :add-blood t :no-dodge t
:actor nil :no-hit-message t
:specific-hit-string-func #'(lambda (cur-dmg)
(format nil "~A takes ~A damage from razorthorns. " (capitalize-name (name mob)) cur-dmg))
:specific-no-dmg-string-func #'(lambda ()
(format nil "~A takes no damage from razorthorns. " (capitalize-name (name mob)))))
(when (check-dead mob)
(when (eq mob *player*)
(setf (killed-by *player*) "razorthorns"))))))
;;--------------------
;; Trees
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-birch+ :name "young birch tree"
:glyph-idx 52 :glyph-color sdl:*green* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-birch-snow+ :name "snow-covered birch tree"
:glyph-idx 52 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-branches+ :name "tree branch"
:glyph-idx 3 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-leaves+ :name "tree leaves"
:glyph-idx 3 :glyph-color sdl:*green* :back-color sdl:*black*
:trait-blocks-vision 60))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-leaves-snow+ :name "snow-covered tree leaves"
:glyph-idx 3 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-vision 60))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-birch-trunk+ :name "mature birch"
:glyph-idx 16 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 10 :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-nw+ :name "mature oak"
:glyph-idx 104 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-ne+ :name "mature oak"
:glyph-idx 105 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-se+ :name "mature oak"
:glyph-idx 106 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-sw+ :name "mature oak"
:glyph-idx 107 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-twintube+ :name "young twintube"
:glyph-idx 57 :glyph-color (sdl:color :r 255 :g 0 :b 255) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20 :trait-blocks-move-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-twintube-trunk+ :name "mature twintube"
:glyph-idx 16 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound 10 :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-twintube-branches+ :name "twintube branch"
:glyph-idx 3 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t))
;;--------------------
;; Furniture
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-chair+ :name "chair"
:glyph-idx 100 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-table+ :name "table"
:glyph-idx 101 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 10 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-bed+ :name "bed"
:glyph-idx 102 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 12 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-cabinet+ :name "cabinet"
:glyph-idx 103 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 8))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-crate+ :name "crate"
:glyph-idx 103 :glyph-color (sdl:color :r 112 :g 128 :b 144) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-bookshelf+ :name "bookshelf"
:glyph-idx 103 :glyph-color (sdl:color :r 165 :g 42 :b 42) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20 :trait-flammable 8))
;;--------------------
Doors & Windows
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-window+ :name "window"
:glyph-idx 13 :glyph-color (sdl:color :r 0 :g 0 :b 200) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t :trait-blocks-vision 30 :trait-blocks-vision-floor t
:trait-blocks-sound 20 :trait-blocks-sound-floor 20 :trait-openable-window t
:on-use #'(lambda (mob x y z)
TODO : add connections change for size 3
(set-terrain-* (level *world*) x y z +terrain-wall-window-opened+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-walk+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-climb+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-fly+))
)
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-open-close-window+)
(can-invoke-ability mob mob +mob-abil-open-close-window+)
(= (get-terrain-* (level *world*) x y z) +terrain-wall-window+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-open-close-window+)
t)
nil))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-window-opened+ :name "opened window"
:glyph-idx 15 :glyph-color (sdl:color :r 0 :g 0 :b 200) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10 :trait-openable-window t
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-wall-window+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
+connect-room-none+)
)))
(set-terrain-type (make-instance 'terrain-type :id +terrain-door-open+ :name "open door"
:glyph-idx 7 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10 :trait-openable-door t
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-door-closed+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
+connect-room-none+)
(let ((func #'(lambda (&key map-size move-mode)
(let ((room-id-list nil))
(check-surroundings x y nil #'(lambda (dx dy)
(when (/= (get-level-connect-map-value (level *world*) dx dy z map-size move-mode) +connect-room-none+)
(pushnew (get-level-connect-map-value (level *world*) dx dy z map-size move-mode)
room-id-list))))
(loop for room-id-start in room-id-list do
(loop for room-id-end in room-id-list do
(when (/= room-id-start room-id-end)
(set-aux-map-connection (level *world*) room-id-start room-id-end map-size move-mode :delta-potential 0 :delta-actual -1))))))))
(funcall func :map-size 1 :move-mode +connect-map-move-walk+)
(funcall func :map-size 1 :move-mode +connect-map-move-climb+)
(funcall func :map-size 1 :move-mode +connect-map-move-fly+)
)
)))
(set-terrain-type (make-instance 'terrain-type :id +terrain-door-closed+ :name "closed door"
:glyph-idx 11 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-vision t :trait-blocks-projectiles-floor t :trait-blocks-projectiles t
:trait-blocks-sound 15 :trait-blocks-sound-floor 20 :trait-openable-door t
:on-use #'(lambda (mob x y z)
TODO : add connections change for size 3
(set-terrain-* (level *world*) x y z +terrain-door-open+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-walk+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-climb+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-fly+))
(let ((func #'(lambda (&key map-size move-mode)
(let ((room-id-list nil))
(check-surroundings x y nil #'(lambda (dx dy)
(when (/= (get-level-connect-map-value (level *world*) dx dy z map-size move-mode) +connect-room-none+)
(pushnew (get-level-connect-map-value (level *world*) dx dy z map-size move-mode)
room-id-list))))
(loop for room-id-start in room-id-list do
(loop for room-id-end in room-id-list do
(when (/= room-id-start room-id-end)
(set-aux-map-connection (level *world*) room-id-start room-id-end map-size move-mode :delta-potential 0 :delta-actual 1))))))))
(funcall func :map-size 1 :move-mode +connect-map-move-walk+)
(funcall func :map-size 1 :move-mode +connect-map-move-climb+)
(funcall func :map-size 1 :move-mode +connect-map-move-fly+)
)
)
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-open-close-door+)
(can-invoke-ability mob mob +mob-abil-open-close-door+)
(= (get-terrain-* (level *world*) x y z) +terrain-door-closed+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-open-close-door+)
t)
nil))))
;;--------------------
;; Water & Ice
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-water-liquid+ :name "water"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*blue* :back-color sdl:*black*
:trait-not-climable t :trait-blocks-sound-floor 10 :trait-blocks-sound 10 :trait-water t :trait-move-cost-factor *water-move-factor*
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(set-mob-effect mob :effect-type-id +mob-effect-wet+ :actor-id (id mob) :cd 4))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-water-ice+ :name "ice"
:glyph-idx +glyph-id-tilda+ :glyph-color (sdl:color :r 0 :g 150 :b 255) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-water-liquid-nofreeze+ :name "water"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*blue* :back-color sdl:*black*
:trait-blocks-sound-floor 10 :trait-blocks-sound 10 :trait-water t :trait-move-cost-factor *water-move-factor*
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(set-mob-effect mob :effect-type-id +mob-effect-wet+ :actor-id (id mob) :cd 4))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-ice+ :name "ice"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 0 :g 150 :b 255) :back-color (sdl:color :r 0 :g 150 :b 255)
:trait-blocks-move t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 25 :trait-blocks-sound-floor 20))
;;--------------------
Air
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-air+ :name "air"
:glyph-idx 96 :glyph-color sdl:*cyan* :back-color sdl:*black* ))
;;--------------------
Slopes
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-slope-stone-up+ :name "slope up"
:glyph-idx 118 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-slope-up t :trait-blocks-sound-floor 10))
(set-terrain-type (make-instance 'terrain-type :id +terrain-slope-stone-down+ :name "slope down"
:glyph-idx 119 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-slope-down t))
;;--------------------
;; Light sources
;;--------------------
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-lantern+ :name "lantern"
:glyph-idx 92 :glyph-color sdl:*yellow* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-light-source 6 :trait-can-switch-light t :trait-blocks-sound 20 :trait-blocks-sound-floor 20
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-wall-lantern-off+)
(loop for (nx ny nz light-radius) in (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))
for i from 0 below (length (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10)))
when (and (= x nx) (= y ny) (= z nz))
do
(setf (fourth (nth i (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))))
(get-terrain-type-trait +terrain-wall-lantern-off+ +terrain-trait-light-source+))
(loop-finish))
)
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-toggle-light+)
(can-invoke-ability mob mob +mob-abil-toggle-light+)
(= (get-terrain-* (level *world*) x y z) +terrain-wall-lantern+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-toggle-light+)
t)
nil))))
light sources that are off , but can be toggled on - should have the + terrain - trait - light - source+ set to 0 , as opposed to non - light - sources , where it is set to nil
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-lantern-off+ :name "lantern (off)"
:glyph-idx 92 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-light-source 0 :trait-can-switch-light t :trait-blocks-sound 20 :trait-blocks-sound-floor 20
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-wall-lantern+)
(loop for (nx ny nz light-radius) in (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))
for i from 0 below (length (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10)))
when (and (= x nx) (= y ny) (= z nz))
do
(setf (fourth (nth i (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))))
(get-terrain-type-trait +terrain-wall-lantern+ +terrain-trait-light-source+))
(loop-finish)))
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-toggle-light+)
(can-invoke-ability mob mob +mob-abil-toggle-light+)
(= (get-terrain-* (level *world*) x y z) +terrain-wall-lantern-off+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-toggle-light+)
t)
nil))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-glowshroom+ :name "glowshroom"
:glyph-idx 130 :glyph-color sdl:*yellow* :back-color sdl:*black*
:trait-blocks-move nil :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-light-source 4 :trait-can-switch-light nil :trait-blocks-sound-floor 20))
| null | https://raw.githubusercontent.com/gwathlobal/CotD/d01ef486cc1d3b21d2ad670ebdb443e957290aa2/src/init-terrain-types.lisp | lisp | --------------------
--------------------
--------------------
--------------------
--------------------
Floors
--------------------
--------------------
Walls
--------------------
--------------------
Trees
--------------------
--------------------
Furniture
--------------------
--------------------
--------------------
--------------------
Water & Ice
--------------------
--------------------
--------------------
--------------------
--------------------
--------------------
Light sources
--------------------
| (in-package :cotd)
TERRAIN - TEMPLATE Declarations
Borders
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-floor+ :name "dirt"
:glyph-idx 95 :glyph-color (sdl:color :r 205 :g 103 :b 63) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-blocks-sound t :trait-blocks-sound-floor t :trait-not-climable t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-floor-snow+ :name "snow"
:glyph-idx 95 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-not-climable t :trait-blocks-sound t :trait-blocks-sound-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-water+ :name "water"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*blue* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-blocks-sound 10 :trait-blocks-sound-floor 10 :trait-not-climable t :trait-water t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-grass+ :name "grass"
:glyph-idx 95 :glyph-color (sdl:color :r 0 :g 100 :b 0) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-not-climable t :trait-blocks-sound t :trait-blocks-sound-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-air+ :name "air"
:glyph-idx 96 :glyph-color sdl:*cyan* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-not-climable t :trait-blocks-sound t :trait-blocks-sound-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-border-creep+ :name "creep"
:glyph-idx 95 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision t :trait-blocks-vision-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t
:trait-blocks-sound t :trait-blocks-sound-floor t :trait-not-climable t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-stone+ :name "stone floor"
:glyph-idx +glyph-id-solid-floor+ :glyph-color (sdl:color :r 200 :g 200 :b 200) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-glass+ :name "transparent floor"
:glyph-idx +glyph-id-solid-floor+ :glyph-color sdl:*cyan* :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-grass+ :name "grass"
:glyph-idx 95 :glyph-color (sdl:color :r 0 :g 100 :b 0) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 3))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-dirt+ :name "dirt"
:glyph-idx 95 :glyph-color (sdl:color :r 205 :g 103 :b 63) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-dirt-bright+ :name "dirt"
:glyph-idx 95 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-snow+ :name "snow"
:glyph-idx 95 :glyph-color sdl:*white* :back-color sdl:*black*
:on-step #'(lambda (mob x y z)
(when (not (mob-ability-p mob +mob-abil-float+))
(set-terrain-* (level *world*) x y z +terrain-floor-snow-prints+)))
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-snow-prints+ :name "snow"
:glyph-idx 95 :glyph-color (sdl:color :r 80 :g 80 :b 155) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-bridge+ :name "bridge"
:glyph-idx 96 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-pier+ :name "pier"
:glyph-idx 96 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-ash+ :name "ash"
:glyph-idx 95 :glyph-color (sdl:color :r 70 :g 70 :b 70) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-church-catholic+ :name "sign \"The Catholic Church of the One\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-church-orthodox+ :name "sign \"The Orthodox Church of the One\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-library+ :name "sign \"The Library of His Imperial Majesty\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-prison+ :name "sign \"City Prison\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-sign-bank+ :name "sign \"Bank of Morozov and Sons\""
:glyph-idx 122 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep+ :name "creep"
:glyph-idx 95 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-bright+ :name "creep"
:glyph-idx 95 :glyph-color (sdl:color :r 255 :g 0 :b 255) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-dreadtubes+ :name "dreadtubes"
:glyph-idx 129 :glyph-color (sdl:color :r 105 :g 50 :b 255) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(when (< (random 100) 20)
(generate-sound mob (x mob) (y mob) (z mob) 100 #'(lambda (str)
(format nil "You hear an eerie howl~A. " str)))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Dreadtubes give off an eerie howl under ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(let ((mob))
(check-surroundings x y t #'(lambda (dx dy)
(when (and (>= dx 0)
(>= dy 0)
(< dx (array-dimension (terrain (level *world*)) 0))
(< dy (array-dimension (terrain (level *world*)) 1))
(get-mob-* (level *world*) dx dy z))
(setf mob (get-mob-* (level *world*) dx dy z))
(if (> (random (+ (strength mob) 5)) (strength mob))
(progn
(set-mob-effect mob :effect-type-id +mob-effect-fear+ :actor-id (id mob) :cd 4)
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "~A is feared. " (capitalize-name (prepend-article +article-the+ (visible-name mob))))
:observed-mob mob
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind))))
(progn
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "~A resists fear. " (capitalize-name (prepend-article +article-the+ (visible-name mob))))
:observed-mob mob
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind))))))
)))
))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-spores+ :name "sludgeshrooms"
:glyph-idx 130 :glyph-color (sdl:color :r 155 :g 50 :b 0) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(when (< (random 100) 20)
(generate-sound mob (x mob) (y mob) (z mob) 100 #'(lambda (str)
(format nil "You hear a hissing sound~A. " str)))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Sludgeshrooms release spores under ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(check-surroundings x y t #'(lambda (dx dy)
(when (and (>= dx 0)
(>= dy 0)
(< dx (array-dimension (terrain (level *world*)) 0))
(< dy (array-dimension (terrain (level *world*)) 1))
(and (not (get-terrain-type-trait (get-terrain-* (level *world*) dx dy z) +terrain-trait-blocks-move+))
(not (get-terrain-type-trait (get-terrain-* (level *world*) dx dy z) +terrain-trait-blocks-projectiles+))))
(add-feature-to-level-list (level *world*) (make-instance 'feature :feature-type +feature-corrupted-spores+ :x dx :y dy :z z
:counter 2))
)
))))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-irradiated+ :name "glowing creep"
:glyph-idx 95 :glyph-color (sdl:color :r 200 :g 50 :b 100) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Sinister glow irradiates ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(if (mob-effect-p mob +mob-effect-irradiated+)
(progn
(let ((effect (get-effect-by-id (mob-effect-p mob +mob-effect-irradiated+))))
(when (<= (param1 effect) 5)
(incf (param1 effect) (+ 2 (random 2))))))
(progn
(set-mob-effect mob :effect-type-id +mob-effect-irradiated+ :actor-id nil :cd t :param1 (+ 2 (random 3))))))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-slime+ :name "slime"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*yellow* :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(print-visible-message (x mob) (y mob) (z mob) (level *world*)
(format nil "Slime envelops ~A. " (prepend-article +article-the+ (visible-name mob)))
:color sdl:*white*
:tags (list (when (if-cur-mob-seen-through-shared-vision *player*)
:singlemind)))
(set-mob-effect mob :effect-type-id +mob-effect-reduce-resitances+ :actor-id nil :cd 6))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-stone+ :name "stone wall"
:glyph-idx +glyph-id-wall+ :glyph-color sdl:*white* :back-color sdl:*white*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 25 :trait-blocks-sound-floor 20 :trait-blocks-move-floor t
:trait-can-have-rune t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-barricade+ :name "barricade"
:glyph-idx +glyph-id-hash+ :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-earth+ :name "earth"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color (sdl:color :r 185 :g 83 :b 43)
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 40 :trait-blocks-sound-floor 40))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-compressed-bones+ :name "compressed bones"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 189 :g 183 :b 107) :back-color (sdl:color :r 189 :g 183 :b 107)
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 40 :trait-blocks-sound-floor 40))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-raw-flesh+ :name "raw flesh"
:glyph-idx +glyph-id-percent+ :glyph-color sdl:*magenta* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 20 :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-bush+ :name "bush"
:glyph-idx +glyph-id-hash+ :glyph-color sdl:*green* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 3 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-grave+ :name "grave"
:glyph-idx 121 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-gloomtwigs+ :name "gloomtwigs"
:glyph-idx +glyph-id-hash+ :glyph-color (sdl:color :r 255 :g 0 :b 255) :back-color sdl:*black*
:trait-blocks-vision 60 :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-corrupted+ :name "bone wall"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 189 :g 183 :b 107) :back-color (sdl:color :r 189 :g 183 :b 107)
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 25 :trait-blocks-sound-floor 20 :trait-can-have-rune t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-razorthorns+ :name "razorthorns"
:glyph-idx +glyph-id-hash+ :glyph-color (sdl:color :r 100 :g 0 :b 0) :back-color sdl:*black*
:trait-blocks-vision 60 :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(inflict-damage mob :min-dmg 1 :max-dmg 1 :dmg-type +weapon-dmg-acid+
:att-spd nil :weapon-aux () :acc 100 :add-blood t :no-dodge t
:actor nil :no-hit-message t
:specific-hit-string-func #'(lambda (cur-dmg)
(format nil "~A takes ~A damage from razorthorns. " (capitalize-name (name mob)) cur-dmg))
:specific-no-dmg-string-func #'(lambda ()
(format nil "~A takes no damage from razorthorns. " (capitalize-name (name mob)))))
(when (check-dead mob)
(when (eq mob *player*)
(setf (killed-by *player*) "razorthorns"))))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-birch+ :name "young birch tree"
:glyph-idx 52 :glyph-color sdl:*green* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-birch-snow+ :name "snow-covered birch tree"
:glyph-idx 52 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-branches+ :name "tree branch"
:glyph-idx 3 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-leaves+ :name "tree leaves"
:glyph-idx 3 :glyph-color sdl:*green* :back-color sdl:*black*
:trait-blocks-vision 60))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-leaves-snow+ :name "snow-covered tree leaves"
:glyph-idx 3 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-vision 60))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-birch-trunk+ :name "mature birch"
:glyph-idx 16 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 10 :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-nw+ :name "mature oak"
:glyph-idx 104 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-ne+ :name "mature oak"
:glyph-idx 105 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-se+ :name "mature oak"
:glyph-idx 106 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-oak-trunk-sw+ :name "mature oak"
:glyph-idx 107 :glyph-color (sdl:color :r 185 :g 83 :b 43) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 15 :trait-blocks-sound-floor 15))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-twintube+ :name "young twintube"
:glyph-idx 57 :glyph-color (sdl:color :r 255 :g 0 :b 255) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20 :trait-blocks-move-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-tree-twintube-trunk+ :name "mature twintube"
:glyph-idx 16 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound 10 :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-twintube-branches+ :name "twintube branch"
:glyph-idx 3 :glyph-color (sdl:color :r 100 :g 0 :b 100) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-chair+ :name "chair"
:glyph-idx 100 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 6))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-table+ :name "table"
:glyph-idx 101 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 10 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-bed+ :name "bed"
:glyph-idx 102 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 12 :trait-can-jump-over t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-cabinet+ :name "cabinet"
:glyph-idx 103 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20 :trait-flammable 8))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-crate+ :name "crate"
:glyph-idx 103 :glyph-color (sdl:color :r 112 :g 128 :b 144) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-bookshelf+ :name "bookshelf"
:glyph-idx 103 :glyph-color (sdl:color :r 165 :g 42 :b 42) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-vision t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound-floor 20 :trait-flammable 8))
Doors & Windows
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-window+ :name "window"
:glyph-idx 13 :glyph-color (sdl:color :r 0 :g 0 :b 200) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-projectiles t :trait-blocks-projectiles-floor t :trait-blocks-vision 30 :trait-blocks-vision-floor t
:trait-blocks-sound 20 :trait-blocks-sound-floor 20 :trait-openable-window t
:on-use #'(lambda (mob x y z)
TODO : add connections change for size 3
(set-terrain-* (level *world*) x y z +terrain-wall-window-opened+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-walk+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-climb+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-fly+))
)
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-open-close-window+)
(can-invoke-ability mob mob +mob-abil-open-close-window+)
(= (get-terrain-* (level *world*) x y z) +terrain-wall-window+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-open-close-window+)
t)
nil))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-window-opened+ :name "opened window"
:glyph-idx 15 :glyph-color (sdl:color :r 0 :g 0 :b 200) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10 :trait-openable-window t
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-wall-window+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
+connect-room-none+)
)))
(set-terrain-type (make-instance 'terrain-type :id +terrain-door-open+ :name "open door"
:glyph-idx 7 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 10 :trait-openable-door t
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-door-closed+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
+connect-room-none+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
+connect-room-none+)
(let ((func #'(lambda (&key map-size move-mode)
(let ((room-id-list nil))
(check-surroundings x y nil #'(lambda (dx dy)
(when (/= (get-level-connect-map-value (level *world*) dx dy z map-size move-mode) +connect-room-none+)
(pushnew (get-level-connect-map-value (level *world*) dx dy z map-size move-mode)
room-id-list))))
(loop for room-id-start in room-id-list do
(loop for room-id-end in room-id-list do
(when (/= room-id-start room-id-end)
(set-aux-map-connection (level *world*) room-id-start room-id-end map-size move-mode :delta-potential 0 :delta-actual -1))))))))
(funcall func :map-size 1 :move-mode +connect-map-move-walk+)
(funcall func :map-size 1 :move-mode +connect-map-move-climb+)
(funcall func :map-size 1 :move-mode +connect-map-move-fly+)
)
)))
(set-terrain-type (make-instance 'terrain-type :id +terrain-door-closed+ :name "closed door"
:glyph-idx 11 :glyph-color (sdl:color :r 139 :g 69 :b 19) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-vision t :trait-blocks-projectiles-floor t :trait-blocks-projectiles t
:trait-blocks-sound 15 :trait-blocks-sound-floor 20 :trait-openable-door t
:on-use #'(lambda (mob x y z)
TODO : add connections change for size 3
(set-terrain-* (level *world*) x y z +terrain-door-open+)
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-walk+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-walk+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-climb+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-climb+))
(set-connect-map-value (aref (connect-map (level *world*)) 1) x y z +connect-map-move-fly+
(get-connect-map-value (aref (connect-map (level *world*)) 1) (x mob) (y mob) (z mob) +connect-map-move-fly+))
(let ((func #'(lambda (&key map-size move-mode)
(let ((room-id-list nil))
(check-surroundings x y nil #'(lambda (dx dy)
(when (/= (get-level-connect-map-value (level *world*) dx dy z map-size move-mode) +connect-room-none+)
(pushnew (get-level-connect-map-value (level *world*) dx dy z map-size move-mode)
room-id-list))))
(loop for room-id-start in room-id-list do
(loop for room-id-end in room-id-list do
(when (/= room-id-start room-id-end)
(set-aux-map-connection (level *world*) room-id-start room-id-end map-size move-mode :delta-potential 0 :delta-actual 1))))))))
(funcall func :map-size 1 :move-mode +connect-map-move-walk+)
(funcall func :map-size 1 :move-mode +connect-map-move-climb+)
(funcall func :map-size 1 :move-mode +connect-map-move-fly+)
)
)
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-open-close-door+)
(can-invoke-ability mob mob +mob-abil-open-close-door+)
(= (get-terrain-* (level *world*) x y z) +terrain-door-closed+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-open-close-door+)
t)
nil))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-water-liquid+ :name "water"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*blue* :back-color sdl:*black*
:trait-not-climable t :trait-blocks-sound-floor 10 :trait-blocks-sound 10 :trait-water t :trait-move-cost-factor *water-move-factor*
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(set-mob-effect mob :effect-type-id +mob-effect-wet+ :actor-id (id mob) :cd 4))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-water-ice+ :name "ice"
:glyph-idx +glyph-id-tilda+ :glyph-color (sdl:color :r 0 :g 150 :b 255) :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-projectiles-floor t :trait-blocks-sound-floor 20))
(set-terrain-type (make-instance 'terrain-type :id +terrain-water-liquid-nofreeze+ :name "water"
:glyph-idx +glyph-id-tilda+ :glyph-color sdl:*blue* :back-color sdl:*black*
:trait-blocks-sound-floor 10 :trait-blocks-sound 10 :trait-water t :trait-move-cost-factor *water-move-factor*
:on-step #'(lambda (mob x y z)
(declare (ignore x y z))
(set-mob-effect mob :effect-type-id +mob-effect-wet+ :actor-id (id mob) :cd 4))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-ice+ :name "ice"
:glyph-idx +glyph-id-wall+ :glyph-color (sdl:color :r 0 :g 150 :b 255) :back-color (sdl:color :r 0 :g 150 :b 255)
:trait-blocks-move t :trait-blocks-projectiles t :trait-blocks-move-floor t :trait-blocks-projectiles-floor t
:trait-blocks-sound 25 :trait-blocks-sound-floor 20))
Air
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-air+ :name "air"
:glyph-idx 96 :glyph-color sdl:*cyan* :back-color sdl:*black* ))
Slopes
(set-terrain-type (make-instance 'terrain-type :id +terrain-slope-stone-up+ :name "slope up"
:glyph-idx 118 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-slope-up t :trait-blocks-sound-floor 10))
(set-terrain-type (make-instance 'terrain-type :id +terrain-slope-stone-down+ :name "slope down"
:glyph-idx 119 :glyph-color sdl:*white* :back-color sdl:*black*
:trait-slope-down t))
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-lantern+ :name "lantern"
:glyph-idx 92 :glyph-color sdl:*yellow* :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-light-source 6 :trait-can-switch-light t :trait-blocks-sound 20 :trait-blocks-sound-floor 20
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-wall-lantern-off+)
(loop for (nx ny nz light-radius) in (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))
for i from 0 below (length (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10)))
when (and (= x nx) (= y ny) (= z nz))
do
(setf (fourth (nth i (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))))
(get-terrain-type-trait +terrain-wall-lantern-off+ +terrain-trait-light-source+))
(loop-finish))
)
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-toggle-light+)
(can-invoke-ability mob mob +mob-abil-toggle-light+)
(= (get-terrain-* (level *world*) x y z) +terrain-wall-lantern+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-toggle-light+)
t)
nil))))
light sources that are off , but can be toggled on - should have the + terrain - trait - light - source+ set to 0 , as opposed to non - light - sources , where it is set to nil
(set-terrain-type (make-instance 'terrain-type :id +terrain-wall-lantern-off+ :name "lantern (off)"
:glyph-idx 92 :glyph-color (sdl:color :r 150 :g 150 :b 150) :back-color sdl:*black*
:trait-blocks-move t :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t
:trait-light-source 0 :trait-can-switch-light t :trait-blocks-sound 20 :trait-blocks-sound-floor 20
:on-use #'(lambda (mob x y z)
(declare (ignore mob))
(set-terrain-* (level *world*) x y z +terrain-wall-lantern+)
(loop for (nx ny nz light-radius) in (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))
for i from 0 below (length (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10)))
when (and (= x nx) (= y ny) (= z nz))
do
(setf (fourth (nth i (aref (light-quadrant-map (level *world*)) (truncate x 10) (truncate y 10))))
(get-terrain-type-trait +terrain-wall-lantern+ +terrain-trait-light-source+))
(loop-finish)))
:on-bump-terrain #'(lambda (mob x y z)
(if (and (mob-ability-p mob +mob-abil-toggle-light+)
(can-invoke-ability mob mob +mob-abil-toggle-light+)
(= (get-terrain-* (level *world*) x y z) +terrain-wall-lantern-off+))
(progn
(mob-invoke-ability mob (list x y z) +mob-abil-toggle-light+)
t)
nil))))
(set-terrain-type (make-instance 'terrain-type :id +terrain-floor-creep-glowshroom+ :name "glowshroom"
:glyph-idx 130 :glyph-color sdl:*yellow* :back-color sdl:*black*
:trait-blocks-move nil :trait-blocks-move-floor t :trait-blocks-vision-floor t :trait-blocks-projectiles-floor t :trait-light-source 4 :trait-can-switch-light nil :trait-blocks-sound-floor 20))
|
ba0604e7e767a986dd2d2479304998ec7909bd7308e12b351cf30eab457ba383 | footprintanalytics/footprint-web | slack.clj | (ns metabase.integrations.slack
(:require [cheshire.core :as json]
[clj-http.client :as http]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.tools.logging :as log]
[java-time :as t]
[medley.core :as m]
[metabase.email.messages :as messages]
[metabase.models.setting :as setting :refer [defsetting]]
[metabase.util :as u]
[metabase.util.date-2 :as u.date]
[metabase.util.i18n :refer [deferred-tru trs tru]]
[metabase.util.schema :as su]
[schema.core :as s]))
(defsetting slack-token
(deferred-tru
(str "Deprecated Slack API token for connecting the Metabase Slack bot. "
"Please use a new Slack app integration instead."))
:deprecated "0.42.0"
:doc false)
(defsetting slack-app-token
(deferred-tru
(str "Bot user OAuth token for connecting the Metabase Slack app. "
"This should be used for all new Slack integrations starting in Metabase v0.42.0.")))
(defsetting slack-token-valid?
(deferred-tru
(str "Whether the current Slack app token, if set, is valid. "
"Set to 'false' if a Slack API request returns an auth error."))
:type :boolean
:doc false)
(defn process-files-channel-name
"Converts empty strings to `nil`, and removes leading `#` from the channel name if present."
[channel-name]
(when-not (str/blank? channel-name)
(if (str/starts-with? channel-name "#") (subs channel-name 1) channel-name)))
(defsetting slack-cached-channels-and-usernames
"A cache shared between instances for storing an instance's slack channels and users."
:visibility :internal
:type :json
:doc false)
(def ^:private zoned-time-epoch (t/zoned-date-time 1970 1 1 0))
(defsetting slack-channels-and-usernames-last-updated
"The updated-at time for the [[slack-cached-channels-and-usernames]] setting."
:visibility :internal
:cache? false
:type :timestamp
:default zoned-time-epoch
:doc false)
(defsetting slack-files-channel
(deferred-tru "The name of the channel to which Metabase files should be initially uploaded")
:default "metabase_files"
:setter (fn [channel-name]
(setting/set-value-of-type! :string :slack-files-channel (process-files-channel-name channel-name))))
(defn slack-configured?
"Is Slack integration configured?"
[]
(boolean (or (seq (slack-app-token)) (seq (slack-token)))))
(def ^:private slack-token-error-codes
"List of error codes that indicate an invalid or revoked Slack token."
If any of these error codes are received from the Slack API , we send an email to all admins indicating that the
;; Slack integration is broken. In practice, the "account_inactive" error code is the one that is most likely to be
received . This would happen if access to the Slack workspace is manually revoked via the Slack UI .
#{"invalid_auth", "account_inactive", "token_revoked", "token_expired"})
(def ^:private ^:dynamic *send-token-error-emails?*
"Whether to send an email to all admins when an invalid or revoked token error is received in response to a Slack
API call. Should be set to false when checking if an unsaved token is valid. (Default: `true`)"
true)
(defn- handle-error [body]
(let [invalid-token? (slack-token-error-codes (:error body))
message (if invalid-token?
(trs "Invalid token")
(trs "Slack API error: {0}" (:error body)))
error (if invalid-token?
{:error-code (:error body)
:errors {:slack-token message}}
{:error-code (:error body)
:message message
:response body})]
(when (and invalid-token? *send-token-error-emails?*)
;; Check `slack-token-valid?` before sending emails to avoid sending repeat emails for the same invalid token.
;; We should send an email if `slack-token-valid?` is `true` or `nil` (i.e. a pre-existing bot integration is
;; being used)
(when (slack-token-valid?) (messages/send-slack-token-error-emails!))
(slack-token-valid?! false))
(when invalid-token?
(log/warn (u/pprint-to-str 'red (trs "🔒 Your Slack authorization token is invalid or has been revoked. Please update your integration in Admin Settings -> Slack."))))
(throw (ex-info message error))))
(defn- handle-response [{:keys [status body]}]
(with-open [reader (io/reader body)]
(let [body (json/parse-stream reader true)]
(if (and (= 200 status) (:ok body))
body
(handle-error body)))))
(defn- do-slack-request [request-fn endpoint request]
(let [token (or (get-in request [:query-params :token])
(get-in request [:form-params :token])
(slack-app-token)
(slack-token))]
(when token
(let [url (str "/" (name endpoint))
_ (log/trace "Slack API request: %s %s" (pr-str url) (pr-str request))
request (m/deep-merge
{:headers {:authorization (str "Bearer\n" token)}
:as :stream
use a relatively long connection timeout ( 10 seconds ) in cases where we 're fetching big
amounts of data -- see # 11735
:conn-timeout 10000
:socket-timeout 10000}
(m/dissoc-in request [:query-params :token]))]
(try
(handle-response (request-fn url request))
(catch Throwable e
(throw (ex-info (.getMessage e) (merge (ex-data e) {:url url}) e))))))))
(defn- GET
"Make a GET request to the Slack API."
[endpoint & {:as query-params}]
(do-slack-request http/get endpoint {:query-params query-params}))
(defn- POST
"Make a POST request to the Slack API."
[endpoint body]
(do-slack-request http/post endpoint body))
(defn- next-cursor
"Get a cursor for the next page of results in a Slack API response, if one exists."
[response]
(not-empty (get-in response [:response_metadata :next_cursor])))
(def ^:private max-list-results
"Absolute maximum number of results to fetch from Slack API list endpoints. To prevent unbounded pagination of
results. Don't set this too low -- some orgs have many thousands of channels (see #12978)"
10000)
(defn- paged-list-request
"Make a GET request to a Slack API list `endpoint`, returning a sequence of objects returned by the top level
`results-key` in the response. If additional pages of results exist, fetches those lazily, up to a total of
`max-list-results`."
[endpoint response->data params]
use default limit ( page size ) of 1000 instead of 100 so we do n't end up making a hundred API requests for orgs
;; with a huge number of channels or users.
(let [default-params {:limit 1000}
response (m/mapply GET endpoint (merge default-params params))
data (response->data response)]
(when (seq response)
(take
max-list-results
(concat
data
(when-let [next-cursor (next-cursor response)]
(lazy-seq
(paged-list-request endpoint response->data (assoc params :cursor next-cursor)))))))))
(defn channel-transform
"Transformation from slack's api representation of a channel to our own."
[channel]
{:display-name (str \# (:name channel))
:name (:name channel)
:id (:id channel)
:type "channel"})
(defn conversations-list
"Calls Slack API `conversations.list` and returns list of available 'conversations' (channels and direct messages).
By default only fetches channels, and returns them with their # prefix. Note the call to [[paged-list-request]] will
only fetch the first [[max-list-results]] items."
[& {:as query-parameters}]
(let [params (merge {:exclude_archived true, :types "public_channel"} query-parameters)]
(paged-list-request "conversations.list"
;; response -> channel names
#(->> % :channels (map channel-transform))
params)))
(defn channel-exists?
"Returns true if the channel it exists."
[channel-name]
(let [channel-names (into #{} (comp (map (juxt :name :id))
cat)
(:channels (slack-cached-channels-and-usernames)))]
(and channel-name (contains? channel-names channel-name))))
(s/defn valid-token?
"Check whether a Slack token is valid by checking if the `conversations.list` Slack api accepts it."
[token :- su/NonBlankString]
(try
(binding [*send-token-error-emails?* false]
(boolean (take 1 (:channels (GET "conversations.list" :limit 1, :token token)))))
(catch Throwable e
(if (slack-token-error-codes (:error-code (ex-data e)))
false
(throw e)))))
(defn user-transform
"Tranformation from slack api user to our own internal representation."
[member]
{:display-name (str \@ (:name member))
:type "user"
:name (:name member)
:id (:id member)})
(defn users-list
"Calls Slack API `users.list` endpoint and returns the list of available users with their @ prefix. Note the call
to [[paged-list-request]] will only fetch the first [[max-list-results]] items."
[& {:as query-parameters}]
(->> (paged-list-request "users.list"
;; response -> user names
#(->> % :members (map user-transform))
query-parameters)
remove deleted users and bots . At the time of this writing there 's no way to do this in the Slack API
;; itself so we need to do it after the fact.
(remove :deleted)
(remove :is_bot)))
(defonce ^:private refresh-lock (Object.))
(defn- needs-refresh? []
(u.date/older-than?
(slack-channels-and-usernames-last-updated)
(t/minutes 10)))
(defn clear-channel-cache!
"Clear the Slack channels cache, and reset its last-updated timestamp to its default value (the Unix epoch)."
[]
(slack-channels-and-usernames-last-updated! zoned-time-epoch)
(slack-cached-channels-and-usernames! {:channels []}))
(defn refresh-channels-and-usernames!
"Refreshes users and conversations in slack-cache. finds both in parallel, sets
[[slack-cached-channels-and-usernames]], and resets the [[slack-channels-and-usernames-last-updated]] time."
[]
(when (slack-configured?)
(log/info "Refreshing slack channels and usernames.")
(let [users (future (vec (users-list)))
conversations (future (vec (conversations-list)))]
(slack-cached-channels-and-usernames! {:channels (concat @conversations @users)})
(slack-channels-and-usernames-last-updated! (t/zoned-date-time)))))
(defn refresh-channels-and-usernames-when-needed!
"Refreshes users and conversations in slack-cache on a per-instance lock."
[]
(when (needs-refresh?)
(locking refresh-lock
(when (needs-refresh?)
(refresh-channels-and-usernames!)))))
(defn files-channel
"Looks in [[slack-cached-channels-and-usernames]] to check whether a channel exists with the expected name from the
[[slack-files-channel]] setting with an # prefix. If it does, returns the channel details as a map. If it doesn't,
throws an error that advices an admin to create it."
[]
(let [channel-name (slack-files-channel)]
(if (channel-exists? channel-name)
channel-name
(let [message (str (tru "Slack channel named `{0}` is missing!" channel-name)
" "
(tru "Please create or unarchive the channel in order to complete the Slack integration.")
" "
(tru "The channel is used for storing images that are included in dashboard subscriptions."))]
(log/error (u/format-color 'red message))
(throw (ex-info message {:status-code 400}))))))
(def ^:private NonEmptyByteArray
(s/constrained
(Class/forName "[B")
not-empty
"Non-empty byte array"))
(s/defn join-channel!
"Given a channel ID, calls Slack API `conversations.join` endpoint to join the channel as the Metabase Slack app.
This must be done before uploading a file to the channel, if using a Slack app integration."
[channel-id :- su/NonBlankString]
(POST "conversations.join" {:form-params {:channel channel-id}}))
(defn- maybe-lookup-id
"Slack requires the slack app to be in the channel that we post all of our attachments to. Slack changed (around June
2022 #23229) the \"conversations.join\" api to require the internal slack id rather than the common name. This makes
a lot of sense to ensure we continue to operate despite channel renames. Attempt to look up the channel-id in the
list of channels to obtain the internal id. Fallback to using the current channel-id."
[channel-id cached-channels]
(let [name->id (into {} (comp (filter (comp #{"channel"} :type))
(map (juxt :name :id)))
(:channels cached-channels))
channel-id' (get name->id channel-id channel-id)]
channel-id'))
(s/defn upload-file!
"Calls Slack API `files.upload` endpoint and returns the URL of the uploaded file."
[file :- NonEmptyByteArray, filename :- su/NonBlankString, channel-id :- su/NonBlankString]
{:pre [(slack-configured?)]}
(let [request {:multipart [{:name "file", :content file}
{:name "filename", :content filename}
{:name "channels", :content channel-id}]}
response (try
(POST "files.upload" request)
(catch Throwable e
;; If file upload fails with a "not_in_channel" error, we join the channel and try again.
This is expected to happen the first time a Slack subscription is sent .
(if (= "not_in_channel" (:error-code (ex-data e)))
(do (-> channel-id
(maybe-lookup-id (slack-cached-channels-and-usernames))
join-channel!)
(POST "files.upload" request))
(throw e))))]
(u/prog1 (get-in response [:file :url_private])
(log/debug (trs "Uploaded image") <>))))
(s/defn post-chat-message!
"Calls Slack API `chat.postMessage` endpoint and posts a message to a channel. `attachments` should be serialized
JSON."
[channel-id :- su/NonBlankString, text-or-nil :- (s/maybe s/Str) & [attachments]]
;; TODO: it would be nice to have an emoji or icon image to use here
(POST "chat.postMessage"
{:form-params
{:channel channel-id
:username "MetaBot"
:icon_url ""
:text text-or-nil
:attachments (when (seq attachments)
(json/generate-string attachments))}}))
| null | https://raw.githubusercontent.com/footprintanalytics/footprint-web/d3090d943dd9fcea493c236f79e7ef8a36ae17fc/src/metabase/integrations/slack.clj | clojure | Slack integration is broken. In practice, the "account_inactive" error code is the one that is most likely to be
Check `slack-token-valid?` before sending emails to avoid sending repeat emails for the same invalid token.
We should send an email if `slack-token-valid?` is `true` or `nil` (i.e. a pre-existing bot integration is
being used)
with a huge number of channels or users.
response -> channel names
response -> user names
itself so we need to do it after the fact.
If file upload fails with a "not_in_channel" error, we join the channel and try again.
TODO: it would be nice to have an emoji or icon image to use here | (ns metabase.integrations.slack
(:require [cheshire.core :as json]
[clj-http.client :as http]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.tools.logging :as log]
[java-time :as t]
[medley.core :as m]
[metabase.email.messages :as messages]
[metabase.models.setting :as setting :refer [defsetting]]
[metabase.util :as u]
[metabase.util.date-2 :as u.date]
[metabase.util.i18n :refer [deferred-tru trs tru]]
[metabase.util.schema :as su]
[schema.core :as s]))
(defsetting slack-token
(deferred-tru
(str "Deprecated Slack API token for connecting the Metabase Slack bot. "
"Please use a new Slack app integration instead."))
:deprecated "0.42.0"
:doc false)
(defsetting slack-app-token
(deferred-tru
(str "Bot user OAuth token for connecting the Metabase Slack app. "
"This should be used for all new Slack integrations starting in Metabase v0.42.0.")))
(defsetting slack-token-valid?
(deferred-tru
(str "Whether the current Slack app token, if set, is valid. "
"Set to 'false' if a Slack API request returns an auth error."))
:type :boolean
:doc false)
(defn process-files-channel-name
"Converts empty strings to `nil`, and removes leading `#` from the channel name if present."
[channel-name]
(when-not (str/blank? channel-name)
(if (str/starts-with? channel-name "#") (subs channel-name 1) channel-name)))
(defsetting slack-cached-channels-and-usernames
"A cache shared between instances for storing an instance's slack channels and users."
:visibility :internal
:type :json
:doc false)
(def ^:private zoned-time-epoch (t/zoned-date-time 1970 1 1 0))
(defsetting slack-channels-and-usernames-last-updated
"The updated-at time for the [[slack-cached-channels-and-usernames]] setting."
:visibility :internal
:cache? false
:type :timestamp
:default zoned-time-epoch
:doc false)
(defsetting slack-files-channel
(deferred-tru "The name of the channel to which Metabase files should be initially uploaded")
:default "metabase_files"
:setter (fn [channel-name]
(setting/set-value-of-type! :string :slack-files-channel (process-files-channel-name channel-name))))
(defn slack-configured?
"Is Slack integration configured?"
[]
(boolean (or (seq (slack-app-token)) (seq (slack-token)))))
(def ^:private slack-token-error-codes
"List of error codes that indicate an invalid or revoked Slack token."
If any of these error codes are received from the Slack API , we send an email to all admins indicating that the
received . This would happen if access to the Slack workspace is manually revoked via the Slack UI .
#{"invalid_auth", "account_inactive", "token_revoked", "token_expired"})
(def ^:private ^:dynamic *send-token-error-emails?*
"Whether to send an email to all admins when an invalid or revoked token error is received in response to a Slack
API call. Should be set to false when checking if an unsaved token is valid. (Default: `true`)"
true)
(defn- handle-error [body]
(let [invalid-token? (slack-token-error-codes (:error body))
message (if invalid-token?
(trs "Invalid token")
(trs "Slack API error: {0}" (:error body)))
error (if invalid-token?
{:error-code (:error body)
:errors {:slack-token message}}
{:error-code (:error body)
:message message
:response body})]
(when (and invalid-token? *send-token-error-emails?*)
(when (slack-token-valid?) (messages/send-slack-token-error-emails!))
(slack-token-valid?! false))
(when invalid-token?
(log/warn (u/pprint-to-str 'red (trs "🔒 Your Slack authorization token is invalid or has been revoked. Please update your integration in Admin Settings -> Slack."))))
(throw (ex-info message error))))
(defn- handle-response [{:keys [status body]}]
(with-open [reader (io/reader body)]
(let [body (json/parse-stream reader true)]
(if (and (= 200 status) (:ok body))
body
(handle-error body)))))
(defn- do-slack-request [request-fn endpoint request]
(let [token (or (get-in request [:query-params :token])
(get-in request [:form-params :token])
(slack-app-token)
(slack-token))]
(when token
(let [url (str "/" (name endpoint))
_ (log/trace "Slack API request: %s %s" (pr-str url) (pr-str request))
request (m/deep-merge
{:headers {:authorization (str "Bearer\n" token)}
:as :stream
use a relatively long connection timeout ( 10 seconds ) in cases where we 're fetching big
amounts of data -- see # 11735
:conn-timeout 10000
:socket-timeout 10000}
(m/dissoc-in request [:query-params :token]))]
(try
(handle-response (request-fn url request))
(catch Throwable e
(throw (ex-info (.getMessage e) (merge (ex-data e) {:url url}) e))))))))
(defn- GET
"Make a GET request to the Slack API."
[endpoint & {:as query-params}]
(do-slack-request http/get endpoint {:query-params query-params}))
(defn- POST
"Make a POST request to the Slack API."
[endpoint body]
(do-slack-request http/post endpoint body))
(defn- next-cursor
"Get a cursor for the next page of results in a Slack API response, if one exists."
[response]
(not-empty (get-in response [:response_metadata :next_cursor])))
(def ^:private max-list-results
"Absolute maximum number of results to fetch from Slack API list endpoints. To prevent unbounded pagination of
results. Don't set this too low -- some orgs have many thousands of channels (see #12978)"
10000)
(defn- paged-list-request
"Make a GET request to a Slack API list `endpoint`, returning a sequence of objects returned by the top level
`results-key` in the response. If additional pages of results exist, fetches those lazily, up to a total of
`max-list-results`."
[endpoint response->data params]
use default limit ( page size ) of 1000 instead of 100 so we do n't end up making a hundred API requests for orgs
(let [default-params {:limit 1000}
response (m/mapply GET endpoint (merge default-params params))
data (response->data response)]
(when (seq response)
(take
max-list-results
(concat
data
(when-let [next-cursor (next-cursor response)]
(lazy-seq
(paged-list-request endpoint response->data (assoc params :cursor next-cursor)))))))))
(defn channel-transform
"Transformation from slack's api representation of a channel to our own."
[channel]
{:display-name (str \# (:name channel))
:name (:name channel)
:id (:id channel)
:type "channel"})
(defn conversations-list
"Calls Slack API `conversations.list` and returns list of available 'conversations' (channels and direct messages).
By default only fetches channels, and returns them with their # prefix. Note the call to [[paged-list-request]] will
only fetch the first [[max-list-results]] items."
[& {:as query-parameters}]
(let [params (merge {:exclude_archived true, :types "public_channel"} query-parameters)]
(paged-list-request "conversations.list"
#(->> % :channels (map channel-transform))
params)))
(defn channel-exists?
"Returns true if the channel it exists."
[channel-name]
(let [channel-names (into #{} (comp (map (juxt :name :id))
cat)
(:channels (slack-cached-channels-and-usernames)))]
(and channel-name (contains? channel-names channel-name))))
(s/defn valid-token?
"Check whether a Slack token is valid by checking if the `conversations.list` Slack api accepts it."
[token :- su/NonBlankString]
(try
(binding [*send-token-error-emails?* false]
(boolean (take 1 (:channels (GET "conversations.list" :limit 1, :token token)))))
(catch Throwable e
(if (slack-token-error-codes (:error-code (ex-data e)))
false
(throw e)))))
(defn user-transform
"Tranformation from slack api user to our own internal representation."
[member]
{:display-name (str \@ (:name member))
:type "user"
:name (:name member)
:id (:id member)})
(defn users-list
"Calls Slack API `users.list` endpoint and returns the list of available users with their @ prefix. Note the call
to [[paged-list-request]] will only fetch the first [[max-list-results]] items."
[& {:as query-parameters}]
(->> (paged-list-request "users.list"
#(->> % :members (map user-transform))
query-parameters)
remove deleted users and bots . At the time of this writing there 's no way to do this in the Slack API
(remove :deleted)
(remove :is_bot)))
(defonce ^:private refresh-lock (Object.))
(defn- needs-refresh? []
(u.date/older-than?
(slack-channels-and-usernames-last-updated)
(t/minutes 10)))
(defn clear-channel-cache!
"Clear the Slack channels cache, and reset its last-updated timestamp to its default value (the Unix epoch)."
[]
(slack-channels-and-usernames-last-updated! zoned-time-epoch)
(slack-cached-channels-and-usernames! {:channels []}))
(defn refresh-channels-and-usernames!
"Refreshes users and conversations in slack-cache. finds both in parallel, sets
[[slack-cached-channels-and-usernames]], and resets the [[slack-channels-and-usernames-last-updated]] time."
[]
(when (slack-configured?)
(log/info "Refreshing slack channels and usernames.")
(let [users (future (vec (users-list)))
conversations (future (vec (conversations-list)))]
(slack-cached-channels-and-usernames! {:channels (concat @conversations @users)})
(slack-channels-and-usernames-last-updated! (t/zoned-date-time)))))
(defn refresh-channels-and-usernames-when-needed!
"Refreshes users and conversations in slack-cache on a per-instance lock."
[]
(when (needs-refresh?)
(locking refresh-lock
(when (needs-refresh?)
(refresh-channels-and-usernames!)))))
(defn files-channel
"Looks in [[slack-cached-channels-and-usernames]] to check whether a channel exists with the expected name from the
[[slack-files-channel]] setting with an # prefix. If it does, returns the channel details as a map. If it doesn't,
throws an error that advices an admin to create it."
[]
(let [channel-name (slack-files-channel)]
(if (channel-exists? channel-name)
channel-name
(let [message (str (tru "Slack channel named `{0}` is missing!" channel-name)
" "
(tru "Please create or unarchive the channel in order to complete the Slack integration.")
" "
(tru "The channel is used for storing images that are included in dashboard subscriptions."))]
(log/error (u/format-color 'red message))
(throw (ex-info message {:status-code 400}))))))
(def ^:private NonEmptyByteArray
(s/constrained
(Class/forName "[B")
not-empty
"Non-empty byte array"))
(s/defn join-channel!
"Given a channel ID, calls Slack API `conversations.join` endpoint to join the channel as the Metabase Slack app.
This must be done before uploading a file to the channel, if using a Slack app integration."
[channel-id :- su/NonBlankString]
(POST "conversations.join" {:form-params {:channel channel-id}}))
(defn- maybe-lookup-id
"Slack requires the slack app to be in the channel that we post all of our attachments to. Slack changed (around June
2022 #23229) the \"conversations.join\" api to require the internal slack id rather than the common name. This makes
a lot of sense to ensure we continue to operate despite channel renames. Attempt to look up the channel-id in the
list of channels to obtain the internal id. Fallback to using the current channel-id."
[channel-id cached-channels]
(let [name->id (into {} (comp (filter (comp #{"channel"} :type))
(map (juxt :name :id)))
(:channels cached-channels))
channel-id' (get name->id channel-id channel-id)]
channel-id'))
(s/defn upload-file!
"Calls Slack API `files.upload` endpoint and returns the URL of the uploaded file."
[file :- NonEmptyByteArray, filename :- su/NonBlankString, channel-id :- su/NonBlankString]
{:pre [(slack-configured?)]}
(let [request {:multipart [{:name "file", :content file}
{:name "filename", :content filename}
{:name "channels", :content channel-id}]}
response (try
(POST "files.upload" request)
(catch Throwable e
This is expected to happen the first time a Slack subscription is sent .
(if (= "not_in_channel" (:error-code (ex-data e)))
(do (-> channel-id
(maybe-lookup-id (slack-cached-channels-and-usernames))
join-channel!)
(POST "files.upload" request))
(throw e))))]
(u/prog1 (get-in response [:file :url_private])
(log/debug (trs "Uploaded image") <>))))
(s/defn post-chat-message!
"Calls Slack API `chat.postMessage` endpoint and posts a message to a channel. `attachments` should be serialized
JSON."
[channel-id :- su/NonBlankString, text-or-nil :- (s/maybe s/Str) & [attachments]]
(POST "chat.postMessage"
{:form-params
{:channel channel-id
:username "MetaBot"
:icon_url ""
:text text-or-nil
:attachments (when (seq attachments)
(json/generate-string attachments))}}))
|
8660771360c49fa58a7d1580c7ae07239ca0977a5a9c98faf14a4c6c203a6acc | FundingCircle/md2c8e | cli.clj | (ns md2c8e.cli
(:require [cli-matic.core :refer [run-cmd]]
[clojure.java.io :as io :refer [file]]
[clojure.main :as cm]
[cognitect.anomalies :as anom]
[md2c8e.anomalies :refer [anom]]
[md2c8e.confluence :as c8e :refer [make-client]]
[md2c8e.core :refer [dir->page-tree publish]]
[md2c8e.links :refer [replace-links]]
[md2c8e.markdown :as md]
[md2c8e.paths :as paths]))
(defn- summarize
[results source-dir]
(let [{:keys [:created :updated :failed :skipped]}
(group-by #(cond (anom %) :failed
(::c8e/page %) (keyword (str (name (::c8e/operation %)) "d"))
:else :skipped)
results)]
(println (format (str "-------------------\n"
"✅ Created: %s\n"
"✅ Updated: %s\n"
"⚠️ Skipped: %s\n"
"🔥 Failed: %s")
(count created)
(count updated)
(count skipped)
(count failed)))
(doseq [{:keys [::c8e/page ::anom/message]} failed
:let [sfrp ;; source-file-relative-path
(paths/relative-path source-dir (get-in page [::md/source ::md/fp]))]]
(println " 🚨" (str sfrp) "\n"
" " message "\n"))))
(defn- publish-cmd
[{:keys [source-dir root-page-id site-root-url username password]}]
(let [client (make-client site-root-url username password)
threads 10] ;; TODO: Make threads a command-line option
(as-> (dir->page-tree (file source-dir) root-page-id) pt
(replace-links pt source-dir)
; (validate pt)
(publish pt client threads)
(try
(summarize pt source-dir)
; I think it’s sensible to consider `summarize` semi-optional, to the degree that if it
fails , the program ’s exit code should probably still be zero . That said , I ’d still
like to fix this specific IllegalArgumentException that I saw “ in the wild ” today :
;
(catch IllegalArgumentException e
(println "Publishing succeeded, but an error occurred while summarizing the results")
(println (cm/err->msg e)))))))
(def config
The spec for this is here :
;; :default :present means required ¯\_(ツ)_/¯
{:app {:command "md2c8e"
:description "“Markdown to Confluence” — A tool for publishing sets of Markdown documents to Confluence"
:version "TBD"}
:commands [{:command "publish"
:description "Publish the specified docset to the specified Confluence site."
:opts [{:option "source-dir"
:as "The path to the Markdown docset to publish"
:type :string
:default :present}
{:option "root-page-id"
:as "The ID of the page under which the docset should be published"
:type :int
:default :present}
{:option "site-root-url"
:as "The root URL of the Confluence site to which the docset should be published"
:type :string
:default :present}
{:option "username"
:short "u"
:type :string
:default :present}
{:option "password"
:short "p"
:type :string
:default :present}]
:runs publish-cmd}]})
(defn -main
[& args]
(run-cmd args config))
| null | https://raw.githubusercontent.com/FundingCircle/md2c8e/6c93eba8676c8c80af371a44c173704fe0461232/src/md2c8e/cli.clj | clojure | source-file-relative-path
TODO: Make threads a command-line option
(validate pt)
I think it’s sensible to consider `summarize` semi-optional, to the degree that if it
:default :present means required ¯\_(ツ)_/¯ | (ns md2c8e.cli
(:require [cli-matic.core :refer [run-cmd]]
[clojure.java.io :as io :refer [file]]
[clojure.main :as cm]
[cognitect.anomalies :as anom]
[md2c8e.anomalies :refer [anom]]
[md2c8e.confluence :as c8e :refer [make-client]]
[md2c8e.core :refer [dir->page-tree publish]]
[md2c8e.links :refer [replace-links]]
[md2c8e.markdown :as md]
[md2c8e.paths :as paths]))
(defn- summarize
[results source-dir]
(let [{:keys [:created :updated :failed :skipped]}
(group-by #(cond (anom %) :failed
(::c8e/page %) (keyword (str (name (::c8e/operation %)) "d"))
:else :skipped)
results)]
(println (format (str "-------------------\n"
"✅ Created: %s\n"
"✅ Updated: %s\n"
"⚠️ Skipped: %s\n"
"🔥 Failed: %s")
(count created)
(count updated)
(count skipped)
(count failed)))
(doseq [{:keys [::c8e/page ::anom/message]} failed
(paths/relative-path source-dir (get-in page [::md/source ::md/fp]))]]
(println " 🚨" (str sfrp) "\n"
" " message "\n"))))
(defn- publish-cmd
[{:keys [source-dir root-page-id site-root-url username password]}]
(let [client (make-client site-root-url username password)
(as-> (dir->page-tree (file source-dir) root-page-id) pt
(replace-links pt source-dir)
(publish pt client threads)
(try
(summarize pt source-dir)
fails , the program ’s exit code should probably still be zero . That said , I ’d still
like to fix this specific IllegalArgumentException that I saw “ in the wild ” today :
(catch IllegalArgumentException e
(println "Publishing succeeded, but an error occurred while summarizing the results")
(println (cm/err->msg e)))))))
(def config
The spec for this is here :
{:app {:command "md2c8e"
:description "“Markdown to Confluence” — A tool for publishing sets of Markdown documents to Confluence"
:version "TBD"}
:commands [{:command "publish"
:description "Publish the specified docset to the specified Confluence site."
:opts [{:option "source-dir"
:as "The path to the Markdown docset to publish"
:type :string
:default :present}
{:option "root-page-id"
:as "The ID of the page under which the docset should be published"
:type :int
:default :present}
{:option "site-root-url"
:as "The root URL of the Confluence site to which the docset should be published"
:type :string
:default :present}
{:option "username"
:short "u"
:type :string
:default :present}
{:option "password"
:short "p"
:type :string
:default :present}]
:runs publish-cmd}]})
(defn -main
[& args]
(run-cmd args config))
|
58d7d2275b17811e44cdf64186056e59639578a3b1cc45880be82e074306f2c3 | Mercerenies/net-game | numbernoun.lisp | (in-package #:net-game)
(defparameter *numerical* nil)
(defun assign-numbers (&rest lists)
"Assigns numbers to every element in each of the lists. Sets the
global *numerical* variable to be an alist of numbers associated
with objects."
(check-type *numerical* list "an associative list")
(setf *numerical*
(loop for elem in (apply #'concatenate 'list lists)
for i upfrom 1
collect (cons i elem))))
(defun get-number (obj &key ((:numerical *numerical*) *numerical*))
"Looks up the number for the given object in *numerical*, or the
keyword argument given."
(car (rassoc obj *numerical*)))
(defun get-formatted-number (obj &key ((:numerical *numerical*) *numerical*))
"Formats a string consisting of the corresponding number for the given object,
padded to a length to match the longest number in *numerical*."
(let ((len (loop for (n . o) in *numerical*
maximize (length (write-to-string n)))))
(and (get-number obj)
(format nil "~v,'0D" len (get-number obj)))))
(defun get-formatted-numbers (&key ((:numerical *numerical*) *numerical*))
"Gets a list of formatted numbers for each object in *numerical*."
(let ((len (loop for (n . o) in *numerical*
maximize (length (write-to-string n)))))
(loop for (n . o) in *numerical*
collect (format nil "~v,'0D" len n))))
(defun get-numerical-object (num &key ((:numerical *numerical*) *numerical*))
"Given a number, get the object associated with the number in *numerical*."
(when (stringp num)
(setf num (parse-integer num :junk-allowed t)))
(cdr (assoc num *numerical*)))
(defun get-numbered-name (obj &key ((:numerical *numerical*) *numerical*))
"Format a line of text containing the number associated with the object and
the object's name. The object given should be a named object."
(format nil "~A~@[ (~A)~]"
(if (typep obj 'location)
(location-short-name obj)
(get-name obj))
(get-formatted-number obj)))
| null | https://raw.githubusercontent.com/Mercerenies/net-game/da30ca36a7f468e6f9c062fd7bc5e4fed4ee95f9/lisp/numbernoun.lisp | lisp | (in-package #:net-game)
(defparameter *numerical* nil)
(defun assign-numbers (&rest lists)
"Assigns numbers to every element in each of the lists. Sets the
global *numerical* variable to be an alist of numbers associated
with objects."
(check-type *numerical* list "an associative list")
(setf *numerical*
(loop for elem in (apply #'concatenate 'list lists)
for i upfrom 1
collect (cons i elem))))
(defun get-number (obj &key ((:numerical *numerical*) *numerical*))
"Looks up the number for the given object in *numerical*, or the
keyword argument given."
(car (rassoc obj *numerical*)))
(defun get-formatted-number (obj &key ((:numerical *numerical*) *numerical*))
"Formats a string consisting of the corresponding number for the given object,
padded to a length to match the longest number in *numerical*."
(let ((len (loop for (n . o) in *numerical*
maximize (length (write-to-string n)))))
(and (get-number obj)
(format nil "~v,'0D" len (get-number obj)))))
(defun get-formatted-numbers (&key ((:numerical *numerical*) *numerical*))
"Gets a list of formatted numbers for each object in *numerical*."
(let ((len (loop for (n . o) in *numerical*
maximize (length (write-to-string n)))))
(loop for (n . o) in *numerical*
collect (format nil "~v,'0D" len n))))
(defun get-numerical-object (num &key ((:numerical *numerical*) *numerical*))
"Given a number, get the object associated with the number in *numerical*."
(when (stringp num)
(setf num (parse-integer num :junk-allowed t)))
(cdr (assoc num *numerical*)))
(defun get-numbered-name (obj &key ((:numerical *numerical*) *numerical*))
"Format a line of text containing the number associated with the object and
the object's name. The object given should be a named object."
(format nil "~A~@[ (~A)~]"
(if (typep obj 'location)
(location-short-name obj)
(get-name obj))
(get-formatted-number obj)))
|
|
f1a593539f3a78c88ffe827826da07b25d36086280dcfb236b04ed52731223b6 | mirage/bechamel | ext.mli | module Make (Functor : S.FUNCTOR) : sig
XXX(dinosaure ): only on [ > = 4.06.0 ]
type t = private ..
module type Extension = sig
type x
type t += T of x
end
type 'a extension = (module Extension with type x = 'a)
type instance = V : 'a * 'a Functor.t -> instance
module Injection (X : sig
type t
val instance : t Functor.t
end) : Extension with type x = X.t
val inj : 'a Functor.t -> 'a extension
val prj : t -> instance
end
| null | https://raw.githubusercontent.com/mirage/bechamel/7a0aebef3c2ec266db97385264be74274bdc4765/lib/ext.mli | ocaml | module Make (Functor : S.FUNCTOR) : sig
XXX(dinosaure ): only on [ > = 4.06.0 ]
type t = private ..
module type Extension = sig
type x
type t += T of x
end
type 'a extension = (module Extension with type x = 'a)
type instance = V : 'a * 'a Functor.t -> instance
module Injection (X : sig
type t
val instance : t Functor.t
end) : Extension with type x = X.t
val inj : 'a Functor.t -> 'a extension
val prj : t -> instance
end
|
|
bd8f76d6eb17cc0cf10fe191acf8dd7cca2f6af6834d6dca35ca01e9dc7f1532 | kztk-m/sparcl | Multiplicity.hs | module Language.Sparcl.Multiplicity where
import Language.Sparcl.Pretty
data Multiplicity = One | Omega
deriving (Eq, Ord, Show)
instance Pretty Multiplicity where
ppr One = text "One"
ppr Omega = text "Omega"
instance Bounded Multiplicity where
minBound = One
maxBound = Omega
class MultiplicityLike a where
one :: a
omega :: a
fromMultiplicity :: Multiplicity -> a
instance MultiplicityLike Multiplicity where
# INLINE one #
one = One
# INLINE omega #
omega = Omega
# INLINE fromMultiplicity #
fromMultiplicity = id
class Lub a where
lub :: a -> a -> a
instance Lub Multiplicity where
lub One t = t
lub Omega _ = Omega
| null | https://raw.githubusercontent.com/kztk-m/sparcl/f52d333ce50e0aa6cb307da08811719f8c684f7d/src/Language/Sparcl/Multiplicity.hs | haskell | module Language.Sparcl.Multiplicity where
import Language.Sparcl.Pretty
data Multiplicity = One | Omega
deriving (Eq, Ord, Show)
instance Pretty Multiplicity where
ppr One = text "One"
ppr Omega = text "Omega"
instance Bounded Multiplicity where
minBound = One
maxBound = Omega
class MultiplicityLike a where
one :: a
omega :: a
fromMultiplicity :: Multiplicity -> a
instance MultiplicityLike Multiplicity where
# INLINE one #
one = One
# INLINE omega #
omega = Omega
# INLINE fromMultiplicity #
fromMultiplicity = id
class Lub a where
lub :: a -> a -> a
instance Lub Multiplicity where
lub One t = t
lub Omega _ = Omega
|
|
9936d745c6ba8d5a52748ae8a05417cdb63ef23d2e361ebf87d76fd4294dbad1 | carocad/clemence | edit_test.clj | (ns clemence.edit-test
(:require [clojure.test :refer :all]
[clemence.core :as clemence]
[criterium.core :as crit]
[ : as tc ]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[clojure.test.check.clojure-test :refer [defspec]]))
(def min-words 2)
(def max-words 100)
(def word (gen/not-empty gen/string))
(def dictionary (gen/vector word min-words max-words))
; -------------------------------------------------------------------
; The levenshtein distance is simmetric, thus the order of the comparison
does n't matter for any two strings
; leven (P,Q) = leven (Q, P)
(defspec levenshtein-simmetry
100; tries
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
bar (rand-nth words)
foo-bar-dist (first (filter (comp #(= bar (first %)))
(clemence/levenshtein trie foo)))
bar-foo-dist (first (filter (comp #(= foo (first %)))
(clemence/levenshtein trie bar)))]
(= (second foo-bar-dist) (second bar-foo-dist)))))
( tc / quick - check 100 levenshtein - simmetry )
; -------------------------------------------------------------------
; The levenshtein distance is a true metric, thus the triangle-innequality
holds for any 3 strings
Ddf(P , Q ) < = Ddf(P , R ) + Ddf(R , Q )
(defspec levenshtein-triangle-innequality
100; tries
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
bar (rand-nth words)
baz (rand-nth words)
foo-bar-dist (first (filter (comp #(= bar (first %)))
(clemence/levenshtein trie foo)))
foo-baz-dist (first (filter (comp #(= baz (first %)))
(clemence/levenshtein trie foo)))
bar-baz-dist (first (filter (comp #(= bar (first %)))
(clemence/levenshtein trie baz)))]
(<= (second foo-bar-dist) (+ (second foo-baz-dist) (second bar-baz-dist))))))
( tc / quick - check 100 levenshtein - triangle - innequality )
(defspec levenshtein-result-type
100; tries
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
res (clemence/levenshtein trie foo)]
(and (every? vector? res)
(every? (comp string? first) res)
(every? (comp integer? second) res)))))
( tc / quick - check 100 levenshtein - result - type )
; -------------------------------------------------------------------
If the distance of two strings is 0 , then the two strings are the same
; Ddf(P,Q) = 0 if P = Q
(defspec levenshtein-equality
100; tries
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
foo-foo-dist (first (filter (comp #(= foo (first %)))
(clemence/levenshtein trie foo)))]
(= 0 (second foo-foo-dist)))))
( tc / quick - check 100 equality - property )
d(a , b ) > 0 when a ≠ b , since this would require at least one operation at non - zero cost .
(defspec levenshtein-positive-distance
100; tries
(prop/for-all [foo word
words dictionary]
(let [trie (clemence/build-trie words)
foo-dist (clemence/levenshtein trie foo)]
(if (get (set words) foo)
(= 0 (second (filter #(= foo %) foo-dist)))
(< 0 (apply min (map second foo-dist)))))))
;; TODO: add random words and check that the number of words in the trie is
;; equal to the number of input words
| null | https://raw.githubusercontent.com/carocad/clemence/ee6504d05717aa695e5c5047d01bd3fef7ea8325/test/clemence/edit_test.clj | clojure | -------------------------------------------------------------------
The levenshtein distance is simmetric, thus the order of the comparison
leven (P,Q) = leven (Q, P)
tries
-------------------------------------------------------------------
The levenshtein distance is a true metric, thus the triangle-innequality
tries
tries
-------------------------------------------------------------------
Ddf(P,Q) = 0 if P = Q
tries
tries
TODO: add random words and check that the number of words in the trie is
equal to the number of input words | (ns clemence.edit-test
(:require [clojure.test :refer :all]
[clemence.core :as clemence]
[criterium.core :as crit]
[ : as tc ]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[clojure.test.check.clojure-test :refer [defspec]]))
(def min-words 2)
(def max-words 100)
(def word (gen/not-empty gen/string))
(def dictionary (gen/vector word min-words max-words))
does n't matter for any two strings
(defspec levenshtein-simmetry
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
bar (rand-nth words)
foo-bar-dist (first (filter (comp #(= bar (first %)))
(clemence/levenshtein trie foo)))
bar-foo-dist (first (filter (comp #(= foo (first %)))
(clemence/levenshtein trie bar)))]
(= (second foo-bar-dist) (second bar-foo-dist)))))
( tc / quick - check 100 levenshtein - simmetry )
holds for any 3 strings
Ddf(P , Q ) < = Ddf(P , R ) + Ddf(R , Q )
(defspec levenshtein-triangle-innequality
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
bar (rand-nth words)
baz (rand-nth words)
foo-bar-dist (first (filter (comp #(= bar (first %)))
(clemence/levenshtein trie foo)))
foo-baz-dist (first (filter (comp #(= baz (first %)))
(clemence/levenshtein trie foo)))
bar-baz-dist (first (filter (comp #(= bar (first %)))
(clemence/levenshtein trie baz)))]
(<= (second foo-bar-dist) (+ (second foo-baz-dist) (second bar-baz-dist))))))
( tc / quick - check 100 levenshtein - triangle - innequality )
(defspec levenshtein-result-type
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
res (clemence/levenshtein trie foo)]
(and (every? vector? res)
(every? (comp string? first) res)
(every? (comp integer? second) res)))))
( tc / quick - check 100 levenshtein - result - type )
If the distance of two strings is 0 , then the two strings are the same
(defspec levenshtein-equality
(prop/for-all [words dictionary]
(let [trie (clemence/build-trie words)
foo (rand-nth words)
foo-foo-dist (first (filter (comp #(= foo (first %)))
(clemence/levenshtein trie foo)))]
(= 0 (second foo-foo-dist)))))
( tc / quick - check 100 equality - property )
d(a , b ) > 0 when a ≠ b , since this would require at least one operation at non - zero cost .
(defspec levenshtein-positive-distance
(prop/for-all [foo word
words dictionary]
(let [trie (clemence/build-trie words)
foo-dist (clemence/levenshtein trie foo)]
(if (get (set words) foo)
(= 0 (second (filter #(= foo %) foo-dist)))
(< 0 (apply min (map second foo-dist)))))))
|
ac30a554ec0d63d6e9952bc34b818dd50bb793805db3743e7fb19e1f1f0fe0a2 | dizengrong/erlang_game | chat_dict.erl | @author dzR < >
%% @doc 聊天频道的字典数据
-module (chat_dict).
-include ("chat.hrl").
-export ([init/1]).
-export ([get_chat_room_rec/2, set_chat_room_rec/2, delete_chat_room_rec/2]).
-export ([get_chat_entity_rec/2, set_chat_entity_rec/2, delete_chat_entity_rec/2]).
-export ([get_world_chat_member/1,
add_world_chat_member/2, delete_world_chat_member/2]).
init(_Channel) ->
ok.
%% @doc 操作chat_room的接口
get_chat_room_rec(Channel, RoomKey) -> erlang:get({Channel, RoomKey}).
delete_chat_room_rec(Channel, RoomKey) -> erlang:erase({Channel, RoomKey}).
set_chat_room_rec(Channel, ChatRoomRec) ->
erlang:put({Channel, ChatRoomRec#r_chat_room.room_key}, ChatRoomRec).
%% @doc 私聊的实体操作接口
get_chat_entity_rec(Channel, EntityId) -> erlang:get({Channel, EntityId}).
set_chat_entity_rec(Channel, ChatEntityRec) ->
erlang:put({Channel, ChatEntityRec#r_chat_entity.id}, ChatEntityRec).
delete_chat_entity_rec(Channel, EntityId) -> erlang:erase({Channel, EntityId}).
%% @doc 操作世界聊天成员的接口
get_world_chat_member(Channel) -> erlang:get({Channel, world_members}).
set_world_chat_member(Channel, Members) ->
erlang:put({Channel, world_members}, Members).
add_world_chat_member(Channel, EntityId) ->
Members = get_world_chat_member(Channel),
case lists:member(EntityId, Members) of
true -> ok;
false -> set_world_chat_member(Channel, [EntityId | Members])
end,
ok.
delete_world_chat_member(Channel, EntityId) ->
Members = get_world_chat_member(Channel),
set_world_chat_member(Channel, lists:delete(EntityId, Members)),
ok.
| null | https://raw.githubusercontent.com/dizengrong/erlang_game/4598f97daa9ca5eecff292ac401dd8f903eea867/gerl/src/chat/chat_dict.erl | erlang | @doc 聊天频道的字典数据
@doc 操作chat_room的接口
@doc 私聊的实体操作接口
@doc 操作世界聊天成员的接口 | @author dzR < >
-module (chat_dict).
-include ("chat.hrl").
-export ([init/1]).
-export ([get_chat_room_rec/2, set_chat_room_rec/2, delete_chat_room_rec/2]).
-export ([get_chat_entity_rec/2, set_chat_entity_rec/2, delete_chat_entity_rec/2]).
-export ([get_world_chat_member/1,
add_world_chat_member/2, delete_world_chat_member/2]).
init(_Channel) ->
ok.
get_chat_room_rec(Channel, RoomKey) -> erlang:get({Channel, RoomKey}).
delete_chat_room_rec(Channel, RoomKey) -> erlang:erase({Channel, RoomKey}).
set_chat_room_rec(Channel, ChatRoomRec) ->
erlang:put({Channel, ChatRoomRec#r_chat_room.room_key}, ChatRoomRec).
get_chat_entity_rec(Channel, EntityId) -> erlang:get({Channel, EntityId}).
set_chat_entity_rec(Channel, ChatEntityRec) ->
erlang:put({Channel, ChatEntityRec#r_chat_entity.id}, ChatEntityRec).
delete_chat_entity_rec(Channel, EntityId) -> erlang:erase({Channel, EntityId}).
get_world_chat_member(Channel) -> erlang:get({Channel, world_members}).
set_world_chat_member(Channel, Members) ->
erlang:put({Channel, world_members}, Members).
add_world_chat_member(Channel, EntityId) ->
Members = get_world_chat_member(Channel),
case lists:member(EntityId, Members) of
true -> ok;
false -> set_world_chat_member(Channel, [EntityId | Members])
end,
ok.
delete_world_chat_member(Channel, EntityId) ->
Members = get_world_chat_member(Channel),
set_world_chat_member(Channel, lists:delete(EntityId, Members)),
ok.
|
87f13860055d65d0485a9348f74dd14ac48acb808f145624f8392bec78e49931 | homebaseio/datalog-console | main.cljs | (ns datalog-console.chrome.extension.devtool.main
{:no-doc true}
(:require [clojure.edn]
[cljs.reader]
[reagent.dom :as rdom]
[datalog-console.client :as console]))
(println ::loaded)
(defn mount! []
(rdom/render [console/root] (js/document.getElementById "root")))
(defn init! []
(mount!))
(defn ^:dev/after-load remount!
"Remounts the whole UI on every save. Def state you want to persist between remounts with defonce."
[]
(mount!))
(mount!) | null | https://raw.githubusercontent.com/homebaseio/datalog-console/21b2c88b88b1964ef0135f36a257d99da43cce87/src/main/datalog_console/chrome/extension/devtool/main.cljs | clojure | (ns datalog-console.chrome.extension.devtool.main
{:no-doc true}
(:require [clojure.edn]
[cljs.reader]
[reagent.dom :as rdom]
[datalog-console.client :as console]))
(println ::loaded)
(defn mount! []
(rdom/render [console/root] (js/document.getElementById "root")))
(defn init! []
(mount!))
(defn ^:dev/after-load remount!
"Remounts the whole UI on every save. Def state you want to persist between remounts with defonce."
[]
(mount!))
(mount!) |
|
c9304c0c7f3ba8957a29364941752908ee3a7873c4b5782ced29b3d8d22af264 | hgoes/smtlib2 | Verify.hs | module Language.SMTLib2.Internals.Proof.Verify where
import qualified Language.SMTLib2.Internals.Backend as B
import Language.SMTLib2.Internals.Monad
import Language.SMTLib2.Internals.Embed
import Language.SMTLib2.Internals.Proof
import Language.SMTLib2
import qualified Language.SMTLib2.Internals.Expression as E
import Data.GADT.Compare
import Data.GADT.Show
import Control.Monad.State
import Control.Monad.Except
import qualified Data.Map as Map
verifyZ3Proof :: B.Backend b => B.Proof b -> SMT b ()
verifyZ3Proof pr = do
res <- runExceptT (evalStateT (verifyProof analyzeProof (\name args res -> do
b <- gets backend
verifyZ3Rule (BackendInfo b) name args res) pr) Map.empty)
case res of
Right _ -> return ()
Left err -> error $ "Error in proof: "++err
verifyZ3Rule :: (GetType e,Extract i e,GEq e,Monad m,GShow e)
=> i -> String -> [ProofResult e] -> ProofResult e -> ExceptT String m ()
verifyZ3Rule _ "asserted" [] q = return ()
verifyZ3Rule i "mp" [p,impl] q = case p of
ProofExpr p' -> case q of
ProofExpr q' -> case impl of
ProofExpr (extract i -> Just (Implies (rp ::: rq ::: Nil)))
-> case geq p' rp of
Just Refl -> case geq q' rq of
Just Refl -> return ()
Nothing -> throwError "right hand side of implication doesn't match result"
Nothing -> throwError "left hand side of implication doesn't match argument"
ProofExpr (extract i -> Just (Eq (rp ::: rq ::: Nil)))
-> case geq p' rp of
Just Refl -> case geq q' rq of
Just Refl -> return ()
Nothing -> throwError "right hand side of implication doesn't match result"
Nothing -> throwError "left hand side of implication doesn't match argument"
_ -> throwError "second argument isn't an implication"
_ -> throwError "result type can't be equisatisfiable equality"
_ -> throwError "first argument can't be equisatisfiable equality"
verifyZ3Rule i "reflexivity" [] res = case res of
EquivSat e1 e2 -> case geq e1 e2 of
Just Refl -> return ()
Nothing -> throwError "arguments must be the same"
ProofExpr (extract i -> Just (Eq (x ::: y ::: Nil)))
-> case geq x y of
Just Refl -> return ()
Nothing -> throwError "arguments must be the same"
_ -> throwError "result must be equality"
verifyZ3Rule i "symmetry" [rel] res = case rel of
EquivSat x y -> case res of
EquivSat y' x' -> case geq x x' of
Just Refl -> case geq y y' of
Just Refl -> return ()
Nothing -> throwError "argument mismatch"
Nothing -> throwError "argument mismatch"
_ -> throwError "argument mismatch"
ProofExpr (extract i -> Just (E.App r1 (x ::: y ::: Nil)))
-> case res of
ProofExpr (extract i -> Just (E.App r2 (ry ::: rx ::: Nil)))
-> case geq x rx of
Just Refl -> case geq y ry of
Just Refl -> case geq r1 r2 of
Just Refl -> case r1 of
E.Eq _ _ -> return ()
E.Logic E.And _ -> return ()
E.Logic E.Or _ -> return ()
E.Logic E.XOr _ -> return ()
_ -> throwError "relation is not symmetric"
_ -> throwError "result must be the same relation"
_ -> throwError "argument mismatch"
_ -> throwError "argument mismatch"
_ -> throwError "result must be a relation"
_ -> throwError "argument must be a relation"
--verifyZ3Rule i "transitivity"
verifyZ3Rule i name args res = error $ "Cannot verify rule "++show name++" "++show args++" => "++show res
| null | https://raw.githubusercontent.com/hgoes/smtlib2/c35747f2a5a9ec88dc7b1db41a5aab6e98c0458d/Language/SMTLib2/Internals/Proof/Verify.hs | haskell | verifyZ3Rule i "transitivity" | module Language.SMTLib2.Internals.Proof.Verify where
import qualified Language.SMTLib2.Internals.Backend as B
import Language.SMTLib2.Internals.Monad
import Language.SMTLib2.Internals.Embed
import Language.SMTLib2.Internals.Proof
import Language.SMTLib2
import qualified Language.SMTLib2.Internals.Expression as E
import Data.GADT.Compare
import Data.GADT.Show
import Control.Monad.State
import Control.Monad.Except
import qualified Data.Map as Map
verifyZ3Proof :: B.Backend b => B.Proof b -> SMT b ()
verifyZ3Proof pr = do
res <- runExceptT (evalStateT (verifyProof analyzeProof (\name args res -> do
b <- gets backend
verifyZ3Rule (BackendInfo b) name args res) pr) Map.empty)
case res of
Right _ -> return ()
Left err -> error $ "Error in proof: "++err
verifyZ3Rule :: (GetType e,Extract i e,GEq e,Monad m,GShow e)
=> i -> String -> [ProofResult e] -> ProofResult e -> ExceptT String m ()
verifyZ3Rule _ "asserted" [] q = return ()
verifyZ3Rule i "mp" [p,impl] q = case p of
ProofExpr p' -> case q of
ProofExpr q' -> case impl of
ProofExpr (extract i -> Just (Implies (rp ::: rq ::: Nil)))
-> case geq p' rp of
Just Refl -> case geq q' rq of
Just Refl -> return ()
Nothing -> throwError "right hand side of implication doesn't match result"
Nothing -> throwError "left hand side of implication doesn't match argument"
ProofExpr (extract i -> Just (Eq (rp ::: rq ::: Nil)))
-> case geq p' rp of
Just Refl -> case geq q' rq of
Just Refl -> return ()
Nothing -> throwError "right hand side of implication doesn't match result"
Nothing -> throwError "left hand side of implication doesn't match argument"
_ -> throwError "second argument isn't an implication"
_ -> throwError "result type can't be equisatisfiable equality"
_ -> throwError "first argument can't be equisatisfiable equality"
verifyZ3Rule i "reflexivity" [] res = case res of
EquivSat e1 e2 -> case geq e1 e2 of
Just Refl -> return ()
Nothing -> throwError "arguments must be the same"
ProofExpr (extract i -> Just (Eq (x ::: y ::: Nil)))
-> case geq x y of
Just Refl -> return ()
Nothing -> throwError "arguments must be the same"
_ -> throwError "result must be equality"
verifyZ3Rule i "symmetry" [rel] res = case rel of
EquivSat x y -> case res of
EquivSat y' x' -> case geq x x' of
Just Refl -> case geq y y' of
Just Refl -> return ()
Nothing -> throwError "argument mismatch"
Nothing -> throwError "argument mismatch"
_ -> throwError "argument mismatch"
ProofExpr (extract i -> Just (E.App r1 (x ::: y ::: Nil)))
-> case res of
ProofExpr (extract i -> Just (E.App r2 (ry ::: rx ::: Nil)))
-> case geq x rx of
Just Refl -> case geq y ry of
Just Refl -> case geq r1 r2 of
Just Refl -> case r1 of
E.Eq _ _ -> return ()
E.Logic E.And _ -> return ()
E.Logic E.Or _ -> return ()
E.Logic E.XOr _ -> return ()
_ -> throwError "relation is not symmetric"
_ -> throwError "result must be the same relation"
_ -> throwError "argument mismatch"
_ -> throwError "argument mismatch"
_ -> throwError "result must be a relation"
_ -> throwError "argument must be a relation"
verifyZ3Rule i name args res = error $ "Cannot verify rule "++show name++" "++show args++" => "++show res
|
b56290c574cd04912f6fef323e8dc6416b6f7c3c17fc508058d42815a49bfa23 | dreixel/regular | Selector.hs | # LANGUAGE KindSignatures #
-----------------------------------------------------------------------------
-- |
-- Module : Generics.Regular.Selector
Copyright : ( c ) 2008 Universiteit Utrecht
-- License : BSD3
--
Maintainer :
-- Stability : experimental
-- Portability : non-portable
--
-- Summary: Representation for record selectors.
-----------------------------------------------------------------------------
module Generics.Regular.Selector (Selector(..)) where
class Selector s where
selName :: t s (f :: * -> *) r -> String
| null | https://raw.githubusercontent.com/dreixel/regular/c8460ee827f1eb04dd31b873380ff9626a4a4220/src/Generics/Regular/Selector.hs | haskell | ---------------------------------------------------------------------------
|
Module : Generics.Regular.Selector
License : BSD3
Stability : experimental
Portability : non-portable
Summary: Representation for record selectors.
--------------------------------------------------------------------------- | # LANGUAGE KindSignatures #
Copyright : ( c ) 2008 Universiteit Utrecht
Maintainer :
module Generics.Regular.Selector (Selector(..)) where
class Selector s where
selName :: t s (f :: * -> *) r -> String
|
0b733219b7d065cd72ec2b9b499fe70969b77988b0a1128c1049706e80136c41 | runtimeverification/haskell-backend | Location.hs | |
Module : . Attribute . Location
Description : Line / column location attribute
Copyright : ( c ) Runtime Verification , 2019 - 2021
License : BSD-3 - Clause
Maintainer :
Module : Kore.Attribute.Location
Description : Line/column location attribute
Copyright : (c) Runtime Verification, 2019-2021
License : BSD-3-Clause
Maintainer :
-}
module Kore.Attribute.Location (
Location (..),
LineColumn (..),
) where
import Data.Text qualified as Text
import GHC.Generics qualified as GHC
import Generics.SOP qualified as SOP
import Kore.Attribute.Parser as AttributeParser
import Kore.Debug
import Kore.Error qualified
import Prelude.Kore
import Text.Megaparsec (
Parsec,
parseMaybe,
)
import Text.Megaparsec.Char
import Text.Megaparsec.Char.Lexer (
decimal,
)
data LineColumn = LineColumn
{ line :: !Int
, column :: !Int
}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (Hashable, NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
data Location = Location
{ start :: Maybe LineColumn
, end :: Maybe LineColumn
}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (Hashable, NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
instance Default Location where
def = Location Nothing Nothing
-- | Kore identifier representing the @location@ attribute symbol.
locationId :: Id
locationId = "org'Stop'kframework'Stop'attributes'Stop'Location"
instance ParseAttributes Location where
parseAttribute = AttributeParser.withApplication locationId parseApplication
where
parseApplication ::
[Sort] ->
[AttributePattern] ->
Location ->
AttributeParser.Parser Location
parseApplication params args l@(Location Nothing Nothing) = do
AttributeParser.getZeroParams params
case args of
[] -> pure l
[_] -> do
arg <- AttributeParser.getOneArgument args
StringLiteral str <- AttributeParser.getStringLiteral arg
pure
. fromMaybe def
. parseMaybe locationParser
$ Text.unpack str
_ ->
Kore.Error.koreFail
("expected one argument, found " ++ show (length args))
parseApplication _ _ _ =
AttributeParser.failDuplicate locationId
instance From Location Attributes where
TODO ( thomas.tuegel ): Implement
from = def
| This parser is used to parse the inner representation of the attribute .
The expected format is " Location(sl , sc , el , ec ) " where sc , sc , el , and ec are
all numbers .
The expected format is "Location(sl,sc,el,ec)" where sc, sc, el, and ec are
all numbers.
-}
type StringParser = Parsec String String
locationParser :: StringParser Location
locationParser =
Location
<$> (Just <$> parseStart)
<*> (Just <$> parseEnd)
where
parseStart :: StringParser LineColumn
parseStart =
LineColumn
<$> (string "Location(" *> decimal)
<*> (string "," *> decimal)
parseEnd :: StringParser LineColumn
parseEnd =
LineColumn
<$> (string "," *> decimal)
<*> (string "," *> decimal <* ")")
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/b06757e252ee01fdd5ab8f07de2910711997d845/kore/src/Kore/Attribute/Location.hs | haskell | | Kore identifier representing the @location@ attribute symbol. | |
Module : . Attribute . Location
Description : Line / column location attribute
Copyright : ( c ) Runtime Verification , 2019 - 2021
License : BSD-3 - Clause
Maintainer :
Module : Kore.Attribute.Location
Description : Line/column location attribute
Copyright : (c) Runtime Verification, 2019-2021
License : BSD-3-Clause
Maintainer :
-}
module Kore.Attribute.Location (
Location (..),
LineColumn (..),
) where
import Data.Text qualified as Text
import GHC.Generics qualified as GHC
import Generics.SOP qualified as SOP
import Kore.Attribute.Parser as AttributeParser
import Kore.Debug
import Kore.Error qualified
import Prelude.Kore
import Text.Megaparsec (
Parsec,
parseMaybe,
)
import Text.Megaparsec.Char
import Text.Megaparsec.Char.Lexer (
decimal,
)
data LineColumn = LineColumn
{ line :: !Int
, column :: !Int
}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (Hashable, NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
data Location = Location
{ start :: Maybe LineColumn
, end :: Maybe LineColumn
}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (Hashable, NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
instance Default Location where
def = Location Nothing Nothing
locationId :: Id
locationId = "org'Stop'kframework'Stop'attributes'Stop'Location"
instance ParseAttributes Location where
parseAttribute = AttributeParser.withApplication locationId parseApplication
where
parseApplication ::
[Sort] ->
[AttributePattern] ->
Location ->
AttributeParser.Parser Location
parseApplication params args l@(Location Nothing Nothing) = do
AttributeParser.getZeroParams params
case args of
[] -> pure l
[_] -> do
arg <- AttributeParser.getOneArgument args
StringLiteral str <- AttributeParser.getStringLiteral arg
pure
. fromMaybe def
. parseMaybe locationParser
$ Text.unpack str
_ ->
Kore.Error.koreFail
("expected one argument, found " ++ show (length args))
parseApplication _ _ _ =
AttributeParser.failDuplicate locationId
instance From Location Attributes where
TODO ( thomas.tuegel ): Implement
from = def
| This parser is used to parse the inner representation of the attribute .
The expected format is " Location(sl , sc , el , ec ) " where sc , sc , el , and ec are
all numbers .
The expected format is "Location(sl,sc,el,ec)" where sc, sc, el, and ec are
all numbers.
-}
type StringParser = Parsec String String
locationParser :: StringParser Location
locationParser =
Location
<$> (Just <$> parseStart)
<*> (Just <$> parseEnd)
where
parseStart :: StringParser LineColumn
parseStart =
LineColumn
<$> (string "Location(" *> decimal)
<*> (string "," *> decimal)
parseEnd :: StringParser LineColumn
parseEnd =
LineColumn
<$> (string "," *> decimal)
<*> (string "," *> decimal <* ")")
|
68410987f04c683e71f406426ffa943620bc6bcae195bee1c391b119606b933e | PEZ/pirate-lang | seven_segmemts.clj | (ns pez.lab.seven-segments
(:require [clojure.string :as string]))
(defn transpose [v]
(->> v
(remove nil?)
(apply mapv vector)))
(def seven-segment-alphabet
{\a ["#### "
"# # "
"#### "
"# # "
"# # "]
\b ["# "
"# "
"#### "
"# # "
"#### "]
\c ["#### "
"# "
"# "
"# "
"#### "]
\d [" # "
" # "
"#### "
"# # "
"#### "]
\e ["#### "
"# "
"#### "
"# "
"#### "]
\f ["#### "
"# "
"#### "
"# "
"# "]
\h ["# # "
"# # "
"#### "
"# # "
"# # "]
\i ["# "
"# "
"# "
"# "
"# "]
\j [" # "
" # "
" # "
" # "
"#### "]
\l ["# "
"# "
"# "
"# "
"#### "]
\n ["#### "
"# # "
"# # "
"# # "
"# # "]
\o [" "
" "
"#### "
"# # "
"#### "]
\p ["#### "
"# # "
"#### "
"# "
"# "]
\q ["#### "
"# # "
"#### "
" # "
" # "]
\r [" "
" "
"#### "
"# "
"# "]
\s ["#### "
"# "
"#### "
" # "
"#### "]
\t ["# "
"# "
"#### "
"# "
"#### "]
\u ["# # "
"# # "
"# # "
"# # "
"#### "]
\y ["# # "
"# # "
"#### "
" # "
"#### "]
\space [" "
" "
" "
" "
" "]})
(defn display [word]
(->> word
(string/lower-case)
(map seven-segment-alphabet)
(transpose)
(map #(apply str %))))
(def bad-letters-regex #"[gkmqvwxzi]")
(defn good? [word]
(nil?
(re-find bad-letters-regex word)))
(def dictionary-text
(slurp "words-alpha.txt"))
(defn longest []
(->> dictionary-text
(string/split-lines)
(filter good?)
(sort-by count)
(last)))
(comment
(display "Hello World")
(-> (longest)
(display))
(spit "words-alpha.txt" (slurp "-words/master/words_alpha.txt")))
| null | https://raw.githubusercontent.com/PEZ/pirate-lang/a4c8b3431d2ef98ae386ccde822d3e98c30c297f/src/pez/lab/seven_segmemts.clj | clojure | (ns pez.lab.seven-segments
(:require [clojure.string :as string]))
(defn transpose [v]
(->> v
(remove nil?)
(apply mapv vector)))
(def seven-segment-alphabet
{\a ["#### "
"# # "
"#### "
"# # "
"# # "]
\b ["# "
"# "
"#### "
"# # "
"#### "]
\c ["#### "
"# "
"# "
"# "
"#### "]
\d [" # "
" # "
"#### "
"# # "
"#### "]
\e ["#### "
"# "
"#### "
"# "
"#### "]
\f ["#### "
"# "
"#### "
"# "
"# "]
\h ["# # "
"# # "
"#### "
"# # "
"# # "]
\i ["# "
"# "
"# "
"# "
"# "]
\j [" # "
" # "
" # "
" # "
"#### "]
\l ["# "
"# "
"# "
"# "
"#### "]
\n ["#### "
"# # "
"# # "
"# # "
"# # "]
\o [" "
" "
"#### "
"# # "
"#### "]
\p ["#### "
"# # "
"#### "
"# "
"# "]
\q ["#### "
"# # "
"#### "
" # "
" # "]
\r [" "
" "
"#### "
"# "
"# "]
\s ["#### "
"# "
"#### "
" # "
"#### "]
\t ["# "
"# "
"#### "
"# "
"#### "]
\u ["# # "
"# # "
"# # "
"# # "
"#### "]
\y ["# # "
"# # "
"#### "
" # "
"#### "]
\space [" "
" "
" "
" "
" "]})
(defn display [word]
(->> word
(string/lower-case)
(map seven-segment-alphabet)
(transpose)
(map #(apply str %))))
(def bad-letters-regex #"[gkmqvwxzi]")
(defn good? [word]
(nil?
(re-find bad-letters-regex word)))
(def dictionary-text
(slurp "words-alpha.txt"))
(defn longest []
(->> dictionary-text
(string/split-lines)
(filter good?)
(sort-by count)
(last)))
(comment
(display "Hello World")
(-> (longest)
(display))
(spit "words-alpha.txt" (slurp "-words/master/words_alpha.txt")))
|
|
d8fa5ef9feb40f6bcb18b387651d4f18c515b092f94b0227d1f705807c6a88ae | mbutterick/beautiful-racket | test-sources.rkt | #lang at-exp br
(require br/test rackunit)
(check-equal? (run-source "sample.rkt") "one
three
4
")
(check-equal? (run-source "sample-var.rkt") "15
75
")
(check-equal? (run-source "sample-math.rkt") "1
1
1
1
1
1
1
")
(check-equal? (run-source "sample-gosub.rkt") "hello
world
third
hi
")
(check-equal? (run-source "sample-for.rkt") "19
18
17
29
28
27
39
38
37
") | null | https://raw.githubusercontent.com/mbutterick/beautiful-racket/f0e2cb5b325733b3f9cbd554cc7d2bb236af9ee9/beautiful-racket-demo/basic-demo-2/test-sources.rkt | racket | #lang at-exp br
(require br/test rackunit)
(check-equal? (run-source "sample.rkt") "one
three
4
")
(check-equal? (run-source "sample-var.rkt") "15
75
")
(check-equal? (run-source "sample-math.rkt") "1
1
1
1
1
1
1
")
(check-equal? (run-source "sample-gosub.rkt") "hello
world
third
hi
")
(check-equal? (run-source "sample-for.rkt") "19
18
17
29
28
27
39
38
37
") |
|
55d3dcf77f101c494edcab8670ec7e00d22d771a0605f2ad7a7bddf09a5dbe97 | EveryTian/Haskell-Codewars | playing-with-digits.hs | -- -with-digits
module Codewars.Kata.DigPow where
import Data.Char
digpow :: Integer -> Integer -> Integer
digpow n p = let intList = map (\ x -> ord x - ord '0') $ show n
sumAll s _ [] = s
sumAll s curp (x:xs) = sumAll (s + x ^ curp) (curp + 1) xs
sumOfAll = sumAll 0 p intList
in if sumOfAll `mod` fromInteger n == 0 then toInteger $ sumOfAll `div` fromInteger n else -1 | null | https://raw.githubusercontent.com/EveryTian/Haskell-Codewars/dc48d95c676ce1a59f697d07672acb6d4722893b/6kyu/playing-with-digits.hs | haskell | -with-digits |
module Codewars.Kata.DigPow where
import Data.Char
digpow :: Integer -> Integer -> Integer
digpow n p = let intList = map (\ x -> ord x - ord '0') $ show n
sumAll s _ [] = s
sumAll s curp (x:xs) = sumAll (s + x ^ curp) (curp + 1) xs
sumOfAll = sumAll 0 p intList
in if sumOfAll `mod` fromInteger n == 0 then toInteger $ sumOfAll `div` fromInteger n else -1 |
6634c5de2f2a93e28e037ff7e704a06bd94d556bf6c9daccf8ed46e869f728ac | ndmitchell/weeder | Hi.hs | # LANGUAGE DeriveGeneric , RecordWildCards , , OverloadedStrings #
module Hi(
HiKey(), Hi(..), Ident(..),
hiParseDirectory
) where
import qualified Data.HashSet as Set
import qualified Data.HashMap.Lazy as Map
import System.Console.CmdArgs.Verbosity
import System.FilePath
import System.Directory.Extra
import System.Time.Extra
import GHC.Generics
import Data.Tuple.Extra
import Data.Maybe
import Control.Monad
import Control.Exception
import Control.DeepSeq
import Data.Char
import Data.Hashable
import Data.List.Extra
import Data.Semigroup
import Data.Functor
import Util
import qualified Str as S
import System.IO.Extra
import Prelude
data Ident = Ident {identModule :: ModuleName, identName :: IdentName}
deriving (Show,Eq,Ord,Generic)
instance Hashable Ident
instance NFData Ident
data Hi = Hi
{hiModuleName :: ModuleName
-- ^ Module name
,hiImportPackage :: Set.HashSet PackageName
-- ^ Packages imported by this module
,hiExportIdent :: Set.HashSet Ident
-- ^ Identifiers exported by this module
,hiImportIdent :: Set.HashSet Ident
-- ^ Identifiers used by this module
,hiImportModule :: Set.HashSet ModuleName
-- ^ Modules imported and used by this module
Normally equivalent to , unless a module supplies only instances
,hiImportOrphan :: Set.HashSet ModuleName
-- ^ Orphans that are in scope in this module
,hiImportPackageModule :: Set.HashSet (PackageName, ModuleName)
-- ^ Modules imported from other packages
,hiSignatures :: Map.HashMap IdentName (Set.HashSet Ident)
-- ^ Type signatures of functions defined in this module and the types they refer to
,hiFieldName :: Set.HashSet Ident
-- ^ Things that are field names
} deriving (Show,Eq,Generic)
instance Hashable Hi
instance NFData Hi
instance Semigroup Hi where
x <> y = Hi
{hiModuleName = f (?:) hiModuleName
,hiImportPackage = f (<>) hiImportPackage
,hiExportIdent = f (<>) hiExportIdent
,hiImportIdent = f (<>) hiImportIdent
,hiImportModule = f (<>) hiImportModule
,hiImportPackageModule = f (<>) hiImportPackageModule
,hiImportOrphan = f (<>) hiImportOrphan
,hiSignatures = f (Map.unionWith (<>)) hiSignatures
,hiFieldName = f (<>) hiFieldName
}
where f op sel = sel x `op` sel y
instance Monoid Hi where
mempty = Hi mempty mempty mempty mempty mempty mempty mempty mempty mempty
mappend = (<>)
-- | Don't expose that we're just using the filename internally
newtype HiKey = HiKey FilePathEq deriving (Eq,Ord,Hashable)
hiParseDirectory :: FilePath -> IO (Map.HashMap FilePathEq HiKey, Map.HashMap HiKey Hi)
hiParseDirectory dir = do
whenLoud $ putStrLn $ "Reading hi directory " ++ dir
files <- filter ((==) ".dump-hi" . takeExtension) <$> listFilesRecursive dir
his <- forM files $ \file -> do
let name = drop (length dir + 1) file
whenLoud $ do
putStr $ "Reading hi file " ++ name ++ " ... "
hFlush stdout
(time, (len, res)) <- duration $ do
src <- S.readFileUTF8 file
len <- evaluate $ S.length src
let res = trimSignatures $ hiParseContents src
evaluate $ rnf res
return (len, res)
whenLoud $ putStrLn $ S.showLength len ++ " bytes in " ++ showDuration time
return (filePathEq name, res)
here we try and dedupe any identical Hi modules
let keys = Map.fromList $ map (second HiKey . swap) his
mp1 <- evaluate $ Map.fromList $ map (second (keys Map.!)) his
mp2 <- evaluate $ Map.fromList $ map swap $ Map.toList keys
whenLoud $ putStrLn $ "Found " ++ show (Map.size mp1) ++ " files, " ++ show (Map.size mp2) ++ " distinct"
return (mp1, mp2)
note that in some cases we may get more / less internal signatures , so first remove them
trimSignatures :: Hi -> Hi
trimSignatures hi@Hi{..} = hi{hiSignatures = Map.filterWithKey (\k _ -> k `Set.member` names) hiSignatures}
where names = Set.fromList [s | Ident m s <- Set.toList hiExportIdent, m == hiModuleName]
hiParseContents :: Str -> Hi
hiParseContents = mconcatMap f . parseHanging2 . S.linesCR
where
f (x,xs)
| Just x <- S.stripPrefix "interface " x = mempty{hiModuleName = parseInterface $ S.toList x}
| Just x <- S.stripPrefix "exports:" x = mconcatMap (parseExports . S.toList) $ unindent2 xs
| Just x <- S.stripPrefix "orphans:" x = mempty{hiImportOrphan = Set.fromList $ map parseInterface $ concatMap (words . S.toList) $ x:xs}
| Just x <- S.stripPrefix "package dependencies:" x = mempty{hiImportPackage = Set.fromList $ map parsePackDep $ concatMap (words . S.toList) $ x:xs}
| Just x <- S.stripPrefix "import " x = case unindent2 xs of
[] | let s = words (S.toList x) !! 1
, (pkg, mod) <- fromMaybe ("", s) $ stripInfix ":" s -> mempty
{hiImportPackageModule = Set.singleton (parsePackDep pkg, mod)}
xs -> let m = words (S.toList x) !! 1 in mempty
{hiImportModule = Set.singleton m
,hiImportIdent = Set.fromList $ map (Ident m . fst . word1 . S.toList) $ dropWhile ("exports:" `S.isPrefixOf`) xs}
| S.length x == S.ugly 32, S.all isHexDigit x,
(y,ys):_ <- parseHanging2 $ map (S.drop $ S.ugly 2) xs,
fun:"::":typ <- concatMap (wordsBy (`elem` (",()[]{} " :: String)) . S.toList) $ y:ys,
not $ "$" `isPrefixOf` fun =
mempty{hiSignatures = Map.singleton fun $ Set.fromList $ map parseIdent typ}
| otherwise = mempty
-- "[email protected]" -> "old-locale"
-- "old-locale-1.0.0.7" -> "old-locale"
parsePackDep = intercalate "-" . takeWhile (any isAlpha) . wordsBy (== '-') . takeWhile (/= '@')
-- "hlint-1.9.41-IPKy9tGF1918X9VRp9DMhp:HSE.All 8002" -> "HSE.All"
-- "HSE.All 8002" -> "HSE.All"
parseInterface = takeWhileEnd (/= ':') . fst . word1
-- "Apply.applyHintFile"
" Language . Haskell . PPHsMode{Language . . caseIndent }
-- Return the identifiers and the fields. Fields are never qualified but everything else is.
parseExports x = mempty
{hiExportIdent = Set.fromList $ y : [Ident (a ?: identModule y) b | Ident a b <- ys]
,hiFieldName = Set.fromList [Ident (identModule y) b | Ident "" b <- ys]
,hiSignatures = Map.fromList [(b, Set.singleton y) | Ident _ b <- ys, b /= identName y]
}
where y:ys = map parseIdent $ wordsBy (`elem` ("{} " :: String)) x
" Language . Haskell . " - > Ident " Language . Haskell " " PPHsMode "
parseIdent x
| isHaskellSymbol $ last x =
let (a,b) = spanEnd isHaskellSymbol x
in if null a then Ident "" b else Ident a $ tail b
| otherwise =
let (a,b) = breakOnEnd "." x
in Ident (if null a then "" else init a) b
| null | https://raw.githubusercontent.com/ndmitchell/weeder/3bc7ee09de6faf34cd60a0f4554aa1baf36f25e8/src/Hi.hs | haskell | ^ Module name
^ Packages imported by this module
^ Identifiers exported by this module
^ Identifiers used by this module
^ Modules imported and used by this module
^ Orphans that are in scope in this module
^ Modules imported from other packages
^ Type signatures of functions defined in this module and the types they refer to
^ Things that are field names
| Don't expose that we're just using the filename internally
"[email protected]" -> "old-locale"
"old-locale-1.0.0.7" -> "old-locale"
"hlint-1.9.41-IPKy9tGF1918X9VRp9DMhp:HSE.All 8002" -> "HSE.All"
"HSE.All 8002" -> "HSE.All"
"Apply.applyHintFile"
Return the identifiers and the fields. Fields are never qualified but everything else is. | # LANGUAGE DeriveGeneric , RecordWildCards , , OverloadedStrings #
module Hi(
HiKey(), Hi(..), Ident(..),
hiParseDirectory
) where
import qualified Data.HashSet as Set
import qualified Data.HashMap.Lazy as Map
import System.Console.CmdArgs.Verbosity
import System.FilePath
import System.Directory.Extra
import System.Time.Extra
import GHC.Generics
import Data.Tuple.Extra
import Data.Maybe
import Control.Monad
import Control.Exception
import Control.DeepSeq
import Data.Char
import Data.Hashable
import Data.List.Extra
import Data.Semigroup
import Data.Functor
import Util
import qualified Str as S
import System.IO.Extra
import Prelude
data Ident = Ident {identModule :: ModuleName, identName :: IdentName}
deriving (Show,Eq,Ord,Generic)
instance Hashable Ident
instance NFData Ident
data Hi = Hi
{hiModuleName :: ModuleName
,hiImportPackage :: Set.HashSet PackageName
,hiExportIdent :: Set.HashSet Ident
,hiImportIdent :: Set.HashSet Ident
,hiImportModule :: Set.HashSet ModuleName
Normally equivalent to , unless a module supplies only instances
,hiImportOrphan :: Set.HashSet ModuleName
,hiImportPackageModule :: Set.HashSet (PackageName, ModuleName)
,hiSignatures :: Map.HashMap IdentName (Set.HashSet Ident)
,hiFieldName :: Set.HashSet Ident
} deriving (Show,Eq,Generic)
instance Hashable Hi
instance NFData Hi
instance Semigroup Hi where
x <> y = Hi
{hiModuleName = f (?:) hiModuleName
,hiImportPackage = f (<>) hiImportPackage
,hiExportIdent = f (<>) hiExportIdent
,hiImportIdent = f (<>) hiImportIdent
,hiImportModule = f (<>) hiImportModule
,hiImportPackageModule = f (<>) hiImportPackageModule
,hiImportOrphan = f (<>) hiImportOrphan
,hiSignatures = f (Map.unionWith (<>)) hiSignatures
,hiFieldName = f (<>) hiFieldName
}
where f op sel = sel x `op` sel y
instance Monoid Hi where
mempty = Hi mempty mempty mempty mempty mempty mempty mempty mempty mempty
mappend = (<>)
newtype HiKey = HiKey FilePathEq deriving (Eq,Ord,Hashable)
hiParseDirectory :: FilePath -> IO (Map.HashMap FilePathEq HiKey, Map.HashMap HiKey Hi)
hiParseDirectory dir = do
whenLoud $ putStrLn $ "Reading hi directory " ++ dir
files <- filter ((==) ".dump-hi" . takeExtension) <$> listFilesRecursive dir
his <- forM files $ \file -> do
let name = drop (length dir + 1) file
whenLoud $ do
putStr $ "Reading hi file " ++ name ++ " ... "
hFlush stdout
(time, (len, res)) <- duration $ do
src <- S.readFileUTF8 file
len <- evaluate $ S.length src
let res = trimSignatures $ hiParseContents src
evaluate $ rnf res
return (len, res)
whenLoud $ putStrLn $ S.showLength len ++ " bytes in " ++ showDuration time
return (filePathEq name, res)
here we try and dedupe any identical Hi modules
let keys = Map.fromList $ map (second HiKey . swap) his
mp1 <- evaluate $ Map.fromList $ map (second (keys Map.!)) his
mp2 <- evaluate $ Map.fromList $ map swap $ Map.toList keys
whenLoud $ putStrLn $ "Found " ++ show (Map.size mp1) ++ " files, " ++ show (Map.size mp2) ++ " distinct"
return (mp1, mp2)
note that in some cases we may get more / less internal signatures , so first remove them
trimSignatures :: Hi -> Hi
trimSignatures hi@Hi{..} = hi{hiSignatures = Map.filterWithKey (\k _ -> k `Set.member` names) hiSignatures}
where names = Set.fromList [s | Ident m s <- Set.toList hiExportIdent, m == hiModuleName]
hiParseContents :: Str -> Hi
hiParseContents = mconcatMap f . parseHanging2 . S.linesCR
where
f (x,xs)
| Just x <- S.stripPrefix "interface " x = mempty{hiModuleName = parseInterface $ S.toList x}
| Just x <- S.stripPrefix "exports:" x = mconcatMap (parseExports . S.toList) $ unindent2 xs
| Just x <- S.stripPrefix "orphans:" x = mempty{hiImportOrphan = Set.fromList $ map parseInterface $ concatMap (words . S.toList) $ x:xs}
| Just x <- S.stripPrefix "package dependencies:" x = mempty{hiImportPackage = Set.fromList $ map parsePackDep $ concatMap (words . S.toList) $ x:xs}
| Just x <- S.stripPrefix "import " x = case unindent2 xs of
[] | let s = words (S.toList x) !! 1
, (pkg, mod) <- fromMaybe ("", s) $ stripInfix ":" s -> mempty
{hiImportPackageModule = Set.singleton (parsePackDep pkg, mod)}
xs -> let m = words (S.toList x) !! 1 in mempty
{hiImportModule = Set.singleton m
,hiImportIdent = Set.fromList $ map (Ident m . fst . word1 . S.toList) $ dropWhile ("exports:" `S.isPrefixOf`) xs}
| S.length x == S.ugly 32, S.all isHexDigit x,
(y,ys):_ <- parseHanging2 $ map (S.drop $ S.ugly 2) xs,
fun:"::":typ <- concatMap (wordsBy (`elem` (",()[]{} " :: String)) . S.toList) $ y:ys,
not $ "$" `isPrefixOf` fun =
mempty{hiSignatures = Map.singleton fun $ Set.fromList $ map parseIdent typ}
| otherwise = mempty
parsePackDep = intercalate "-" . takeWhile (any isAlpha) . wordsBy (== '-') . takeWhile (/= '@')
parseInterface = takeWhileEnd (/= ':') . fst . word1
" Language . Haskell . PPHsMode{Language . . caseIndent }
parseExports x = mempty
{hiExportIdent = Set.fromList $ y : [Ident (a ?: identModule y) b | Ident a b <- ys]
,hiFieldName = Set.fromList [Ident (identModule y) b | Ident "" b <- ys]
,hiSignatures = Map.fromList [(b, Set.singleton y) | Ident _ b <- ys, b /= identName y]
}
where y:ys = map parseIdent $ wordsBy (`elem` ("{} " :: String)) x
" Language . Haskell . " - > Ident " Language . Haskell " " PPHsMode "
parseIdent x
| isHaskellSymbol $ last x =
let (a,b) = spanEnd isHaskellSymbol x
in if null a then Ident "" b else Ident a $ tail b
| otherwise =
let (a,b) = breakOnEnd "." x
in Ident (if null a then "" else init a) b
|
fe5bdee83a53ce66184a5996f9764c7aee02cf1784af00a54a0abf522fa5dc9b | samrushing/irken-compiler | t17.scm |
(datatype bool (:true) (:false))
(define (random)
(%%cexp (-> int) "random()"))
;; hmmm... think about defining a *boxed* (rather than tagged) type that will
;; hold a pointer. [or maybe better... consider switching to untagged ints
;; and having something like a 'stack map' for the gc that knows the type
;; of everything on the stack]
(define (malloc n)
(%%cexp (int -> int) "(pxll_int)malloc(%0)" n))
(define (free n)
(%%cexp (int -> undefined) "free((void*)%0); IRK_UNDEFINED" n))
(define (write-int p n)
(%%cexp (int int -> undefined) "(*(pxll_int *)(%0)) = %1" p n))
(define (read-int p)
(%%cexp (int -> int) "(*(pxll_int *)(%0))" p))
(define (printn x)
(%%cexp ('a -> undefined) "dump_object (%0, 0); fprintf (stdout, \"\\n\")" x))
(define (sizeof-int)
(%%cexp (-> int) "sizeof(pxll_int)"))
(let ((x 3)
(y (malloc 16)))
(set! x (random))
(printn y)
(write-int y 3141)
(printn (read-int y))
(free y)
(printn (sizeof-int))
#t
)
| null | https://raw.githubusercontent.com/samrushing/irken-compiler/690da48852d55497f873738df54f14e8e135d006/tests/t17.scm | scheme | hmmm... think about defining a *boxed* (rather than tagged) type that will
hold a pointer. [or maybe better... consider switching to untagged ints
and having something like a 'stack map' for the gc that knows the type
of everything on the stack] |
(datatype bool (:true) (:false))
(define (random)
(%%cexp (-> int) "random()"))
(define (malloc n)
(%%cexp (int -> int) "(pxll_int)malloc(%0)" n))
(define (free n)
(%%cexp (int -> undefined) "free((void*)%0); IRK_UNDEFINED" n))
(define (write-int p n)
(%%cexp (int int -> undefined) "(*(pxll_int *)(%0)) = %1" p n))
(define (read-int p)
(%%cexp (int -> int) "(*(pxll_int *)(%0))" p))
(define (printn x)
(%%cexp ('a -> undefined) "dump_object (%0, 0); fprintf (stdout, \"\\n\")" x))
(define (sizeof-int)
(%%cexp (-> int) "sizeof(pxll_int)"))
(let ((x 3)
(y (malloc 16)))
(set! x (random))
(printn y)
(write-int y 3141)
(printn (read-int y))
(free y)
(printn (sizeof-int))
#t
)
|
ff3376dfcce4e78ee29e7d23cd659b728081eb8764d9c7282e1eeb608b023c74 | danlentz/cl-wal | package.lisp | (in-package :common-lisp-user)
;;(require :unit-test)
(defpackage wal
(:use :cl) ;;:unit-test
(:shadow #:open #:close #:write)
(:export #:wal
#:open
#:close
#:write
#:commit
#:rollback
#:recover))
| null | https://raw.githubusercontent.com/danlentz/cl-wal/8b577882488daeeb57efee3971bc26ce91744993/package.lisp | lisp | (require :unit-test)
:unit-test | (in-package :common-lisp-user)
(defpackage wal
(:shadow #:open #:close #:write)
(:export #:wal
#:open
#:close
#:write
#:commit
#:rollback
#:recover))
|
513512327b9d3a73d17f3a28cf6df75415b688c943dff4c13ef1ad64c932c1fb | huangz1990/SICP-answers | 47-selector-for-make-frame-using-cons.scm | ;;; 47-selector-for-make-frame-using-cons.scm
(define (origin-frame frame)
(car frame))
(define (edge1-frame frame)
(cadr frame))
(define (edge2-frame frame)
(cddr frame))
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/old_chp2/code/47-selector-for-make-frame-using-cons.scm | scheme | 47-selector-for-make-frame-using-cons.scm |
(define (origin-frame frame)
(car frame))
(define (edge1-frame frame)
(cadr frame))
(define (edge2-frame frame)
(cddr frame))
|
71a7083b8201f136afdf97afef114032f858348656605d768bc45fe29d782ed0 | ucsd-progsys/liquidhaskell | Misc.hs | # LANGUAGE FlexibleContexts #
module Language.Haskell.Liquid.Bare.Misc
( joinVar
, mkVarExpr
, vmap
, runMapTyVars
, matchKindArgs
, symbolRTyVar
, simpleSymbolVar
, hasBoolResult
, isKind
) where
import Prelude hiding (error)
import Liquid.GHC.API as Ghc hiding (Located, showPpr)
import Control.Monad.Except (MonadError, throwError)
import Control.Monad.State
import qualified Data.Maybe as Mb --(fromMaybe, isNothing)
import qualified Text.PrettyPrint.HughesPJ as PJ
import qualified Data.List as L
import qualified Language.Fixpoint.Types as F
import Liquid.GHC.Misc
import Language.Haskell.Liquid.Types.RefType
import Language.Haskell.Liquid.Types.Types
-- import Language.Haskell.Liquid.Bare.Env
-- import Language.Haskell.Liquid.WiredIn (dcPrefix)
-- TODO: This is where unsorted stuff is for now. Find proper places for what follows.
-- WTF does this function do ?
makeSymbols : : ( I d - > Bool ) - > [ I d ] - > [ F.Symbol ] - > BareM [ ( , ) ]
makeSymbols f vs xs
= do svs < - M.toList < $ > gets varEnv
return $ L.nub ( [ ( x , v ' ) | ( x , v ) < - svs , x ` elem ` xs , let ( v ' , _ , _ ) = joinVar vs ( v , x , x ) ]
+ + [ ( F.symbol v , v ) | v < - vs , f v , isDataConId v , hasBasicArgs $ varType v ] )
where
-- arguments should be basic so that autogenerated singleton types are well formed
hasBasicArgs ( ForAllTy _ t ) = hasBasicArgs t
hasBasicArgs ( FunTy _ tx t ) = isBaseTy tx & & hasBasicArgs t
hasBasicArgs _ = True
-- WTF does this function do?
makeSymbols :: (Id -> Bool) -> [Id] -> [F.Symbol] -> BareM [(F.Symbol, Var)]
makeSymbols f vs xs
= do svs <- M.toList <$> gets varEnv
return $ L.nub ([ (x,v') | (x,v) <- svs, x `elem` xs, let (v',_,_) = joinVar vs (v,x,x)]
++ [ (F.symbol v, v) | v <- vs, f v, isDataConId v, hasBasicArgs $ varType v ])
where
-- arguments should be basic so that autogenerated singleton types are well formed
hasBasicArgs (ForAllTy _ t) = hasBasicArgs t
hasBasicArgs (FunTy _ tx t) = isBaseTy tx && hasBasicArgs t
hasBasicArgs _ = True
-}
HEAD
freeSymbols : : ( , r2 , , , c2 )
= > [ F.Symbol ]
- > [ ( a1 , Located ( RType c2 tv2 r2 ) ) ]
- > [ ( a , Located ( RType c1 tv1 r1 ) ) ]
- > [ Located ( RType c tv r ) ]
- > [ LocSymbol ]
freeSymbols xs ' xts = [ lx | lx < - Misc.sortNub $ zs + + zs ' + + zs '' , not ( ( lx ) knownM ) ]
where
knownM = M.fromList [ ( x , ( ) ) | x < - xs ' ]
zs = concatMap freeSyms ( snd < $ > xts )
zs ' = concatMap freeSyms ( snd < $ > yts )
zs '' = concatMap freeSyms ivs
-------------------------------------------------------------------------------
freeSyms : : ( F.Reftable r , ) = > Located ( RType c tv r ) - > [ LocSymbol ]
-------------------------------------------------------------------------------
= [ | x < - tySyms ]
where
tySyms = Misc.sortNub $ concat $ efoldReft ( \ _ _ - > True ) False ( \ _ _ - > [ ] ) ( const [ ] ) ( const ( ) ) f ( const i d ) F.emptySEnv [ ] ( )
f γ _ r xs = let F.Reft ( v , _ ) r in
[ x | x < - F.syms r , x /= v , not ( x ` F.memberSEnv ` γ ) ] : xs
--- ABOVE IS THE T1773 STUFF
--- BELOW IS THE develop - classes STUFF
-- freeSymbols : : ( , r2 , , , c2 )
-- = > [ F.Symbol ]
-- - > [ ( a1 , Located ( RType c2 tv2 r2 ) ) ]
-- - > [ ( a , Located ( RType c1 tv1 r1 ) ) ]
-- - > [ ( Located ( RType c tv r ) ) ]
-- - > [ LocSymbol ]
-- freeSymbols xs ' xts = [ lx | lx < - Misc.sortNub $ zs + + zs ' + + zs '' , not ( ( lx ) knownM ) ]
-- where
-- knownM = M.fromList [ ( x , ( ) ) | x < - xs ' ]
-- zs = concatMap freeSyms ( snd < $ > xts )
-- zs ' = concatMap freeSyms ( snd < $ > yts )
-- zs '' = concatMap freeSyms ivs
-- freeSyms : : ( F.Reftable r , ) = > Located ( RType c tv r ) - > [ LocSymbol ]
-- freeSyms ty = [ | x < - tySyms ]
-- where
-- tySyms = Misc.sortNub $ concat $ efoldReft ( \ _ _ - > True ) False ( \ _ _ - > [ ] ) ( \ _ - > [ ] ) ( const ( ) ) f ( const i d ) F.emptySEnv [ ] ( )
-- f γ _ r xs = let F.Reft ( v , _ ) in
-- [ x | x < - F.syms r , x /= v , not ( x ` F.memberSEnv ` γ ) ] : xs
HEAD
freeSymbols :: (F.Reftable r, F.Reftable r1, F.Reftable r2, TyConable c, TyConable c1, TyConable c2)
=> [F.Symbol]
-> [(a1, Located (RType c2 tv2 r2))]
-> [(a, Located (RType c1 tv1 r1))]
-> [Located (RType c tv r)]
-> [LocSymbol]
freeSymbols xs' xts yts ivs = [ lx | lx <- Misc.sortNub $ zs ++ zs' ++ zs'' , not (M.member (val lx) knownM) ]
where
knownM = M.fromList [ (x, ()) | x <- xs' ]
zs = concatMap freeSyms (snd <$> xts)
zs' = concatMap freeSyms (snd <$> yts)
zs'' = concatMap freeSyms ivs
-------------------------------------------------------------------------------
freeSyms :: (F.Reftable r, TyConable c) => Located (RType c tv r) -> [LocSymbol]
-------------------------------------------------------------------------------
freeSyms ty = [ F.atLoc ty x | x <- tySyms ]
where
tySyms = Misc.sortNub $ concat $ efoldReft (\_ _ -> True) False (\_ _ -> []) (const []) (const ()) f (const id) F.emptySEnv [] (val ty)
f γ _ r xs = let F.Reft (v, _) = F.toReft r in
[ x | x <- F.syms r, x /= v, not (x `F.memberSEnv` γ)] : xs
--- ABOVE IS THE T1773 STUFF
--- BELOW IS THE develop-classes STUFF
-- freeSymbols :: (F.Reftable r, F.Reftable r1, F.Reftable r2, TyConable c, TyConable c1, TyConable c2)
-- => [F.Symbol]
-- -> [(a1, Located (RType c2 tv2 r2))]
-- -> [(a, Located (RType c1 tv1 r1))]
-- -> [(Located (RType c tv r))]
-- -> [LocSymbol]
-- freeSymbols xs' xts yts ivs = [ lx | lx <- Misc.sortNub $ zs ++ zs' ++ zs'' , not (M.member (val lx) knownM) ]
-- where
-- knownM = M.fromList [ (x, ()) | x <- xs' ]
-- zs = concatMap freeSyms (snd <$> xts)
-- zs' = concatMap freeSyms (snd <$> yts)
-- zs'' = concatMap freeSyms ivs
-- freeSyms :: (F.Reftable r, TyConable c) => Located (RType c tv r) -> [LocSymbol]
-- freeSyms ty = [ F.atLoc ty x | x <- tySyms ]
-- where
-- tySyms = Misc.sortNub $ concat $ efoldReft (\_ _ -> True) False (\_ _ -> []) (\_ -> []) (const ()) f (const id) F.emptySEnv [] (val ty)
-- f γ _ r xs = let F.Reft (v, _) = F.toReft r in
-- [ x | x <- F.syms r, x /= v, not (x `F.memberSEnv` γ)] : xs
-}
-------------------------------------------------------------------------------
Renaming Type Variables in Haskell Signatures ------------------------------
-------------------------------------------------------------------------------
runMapTyVars :: Bool -> Type -> SpecType -> (PJ.Doc -> PJ.Doc -> Error) -> Either Error MapTyVarST
runMapTyVars allowTC τ t err = execStateT (mapTyVars allowTC τ t) (MTVST [] err)
data MapTyVarST = MTVST
{ vmap :: [(Var, RTyVar)]
, errmsg :: PJ.Doc -> PJ.Doc -> Error
}
mapTyVars :: Bool -> Type -> SpecType -> StateT MapTyVarST (Either Error) ()
mapTyVars allowTC t (RImpF _ _ _ t' _)
= mapTyVars allowTC t t'
mapTyVars allowTC (FunTy { ft_arg = τ, ft_res = τ'}) t
| isErasable τ
= mapTyVars allowTC τ' t
where isErasable = if allowTC then isEmbeddedDictType else isClassPred
mapTyVars allowTC (FunTy { ft_arg = τ, ft_res = τ'}) (RFun _ _ t t' _)
= mapTyVars allowTC τ t >> mapTyVars allowTC τ' t'
mapTyVars allowTC τ (RAllT _ t _)
= mapTyVars allowTC τ t
mapTyVars allowTC (TyConApp _ τs) (RApp _ ts _ _)
= zipWithM_ (mapTyVars allowTC) τs (matchKindArgs' τs ts)
mapTyVars _ (TyVarTy α) (RVar a _)
= do s <- get
s' <- mapTyRVar α a s
put s'
mapTyVars allowTC τ (RAllP _ t)
= mapTyVars allowTC τ t
mapTyVars allowTC τ (RAllE _ _ t)
= mapTyVars allowTC τ t
mapTyVars allowTC τ (RRTy _ _ _ t)
= mapTyVars allowTC τ t
mapTyVars allowTC τ (REx _ _ t)
= mapTyVars allowTC τ t
mapTyVars _ _ (RExprArg _)
= return ()
mapTyVars allowTC (AppTy τ τ') (RAppTy t t' _)
= do mapTyVars allowTC τ t
mapTyVars allowTC τ' t'
mapTyVars _ _ (RHole _)
= return ()
mapTyVars _ k _ | isKind k
= return ()
mapTyVars allowTC (ForAllTy _ τ) t
= mapTyVars allowTC τ t
mapTyVars _ hsT lqT
= do err <- gets errmsg
throwError (err (F.pprint hsT) (F.pprint lqT))
isKind :: Kind -> Bool
TODO : -- typeKind k
mapTyRVar :: MonadError Error m
=> Var -> RTyVar -> MapTyVarST -> m MapTyVarST
mapTyRVar α a s@(MTVST αas err)
= case lookup α αas of
Just a' | a == a' -> return s
| otherwise -> throwError (err (F.pprint a) (F.pprint a'))
Nothing -> return $ MTVST ((α,a):αas) err
matchKindArgs' :: [Type] -> [SpecType] -> [SpecType]
matchKindArgs' ts1' = reverse . go (reverse ts1') . reverse
where
go (_:ts1) (t2:ts2) = t2:go ts1 ts2
go ts [] | all isKind ts
= (ofType <$> ts) :: [SpecType]
go _ ts = ts
matchKindArgs :: [SpecType] -> [SpecType] -> [SpecType]
matchKindArgs ts1' = reverse . go (reverse ts1') . reverse
where
go (_:ts1) (t2:ts2) = t2:go ts1 ts2
go ts [] = ts
go _ ts = ts
mkVarExpr :: Id -> F.Expr
mkVarExpr v
| isFunVar v = F.mkEApp (varFunSymbol v) []
EVar ( symbol v )
varFunSymbol :: Id -> Located F.Symbol
varFunSymbol = dummyLoc . F.symbol . idDataCon
isFunVar :: Id -> Bool
isFunVar v = isDataConId v && not (null αs) && Mb.isNothing tf
where
(αs, t) = splitForAllTyCoVars $ varType v
tf = splitFunTy_maybe t
the Vars we lookup in GHC do n't always have the same tyvars as the Vars
-- we're given, so return the original var when possible.
-- see tests/pos/ResolvePred.hs for an example
joinVar :: [Var] -> (Var, s, t) -> (Var, s, t)
joinVar vs (v,s,t) = case L.find ((== showPpr v) . showPpr) vs of
Just v' -> (v',s,t)
Nothing -> (v,s,t)
simpleSymbolVar :: Var -> F.Symbol
simpleSymbolVar = dropModuleNames . F.symbol . showPpr . getName
hasBoolResult :: Type -> Bool
hasBoolResult (ForAllTy _ t) = hasBoolResult t
hasBoolResult (FunTy { ft_res = t} ) | eqType boolTy t = True
hasBoolResult (FunTy { ft_res = t} ) = hasBoolResult t
hasBoolResult _ = False
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/5e9347ac719e0ca192b05ccde74617d0cbb05a85/src/Language/Haskell/Liquid/Bare/Misc.hs | haskell | (fromMaybe, isNothing)
import Language.Haskell.Liquid.Bare.Env
import Language.Haskell.Liquid.WiredIn (dcPrefix)
TODO: This is where unsorted stuff is for now. Find proper places for what follows.
WTF does this function do ?
arguments should be basic so that autogenerated singleton types are well formed
WTF does this function do?
arguments should be basic so that autogenerated singleton types are well formed
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
- ABOVE IS THE T1773 STUFF
- BELOW IS THE develop - classes STUFF
freeSymbols : : ( , r2 , , , c2 )
= > [ F.Symbol ]
- > [ ( a1 , Located ( RType c2 tv2 r2 ) ) ]
- > [ ( a , Located ( RType c1 tv1 r1 ) ) ]
- > [ ( Located ( RType c tv r ) ) ]
- > [ LocSymbol ]
freeSymbols xs ' xts = [ lx | lx < - Misc.sortNub $ zs + + zs ' + + zs '' , not ( ( lx ) knownM ) ]
where
knownM = M.fromList [ ( x , ( ) ) | x < - xs ' ]
zs = concatMap freeSyms ( snd < $ > xts )
zs ' = concatMap freeSyms ( snd < $ > yts )
zs '' = concatMap freeSyms ivs
freeSyms : : ( F.Reftable r , ) = > Located ( RType c tv r ) - > [ LocSymbol ]
freeSyms ty = [ | x < - tySyms ]
where
tySyms = Misc.sortNub $ concat $ efoldReft ( \ _ _ - > True ) False ( \ _ _ - > [ ] ) ( \ _ - > [ ] ) ( const ( ) ) f ( const i d ) F.emptySEnv [ ] ( )
f γ _ r xs = let F.Reft ( v , _ ) in
[ x | x < - F.syms r , x /= v , not ( x ` F.memberSEnv ` γ ) ] : xs
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
- ABOVE IS THE T1773 STUFF
- BELOW IS THE develop-classes STUFF
freeSymbols :: (F.Reftable r, F.Reftable r1, F.Reftable r2, TyConable c, TyConable c1, TyConable c2)
=> [F.Symbol]
-> [(a1, Located (RType c2 tv2 r2))]
-> [(a, Located (RType c1 tv1 r1))]
-> [(Located (RType c tv r))]
-> [LocSymbol]
freeSymbols xs' xts yts ivs = [ lx | lx <- Misc.sortNub $ zs ++ zs' ++ zs'' , not (M.member (val lx) knownM) ]
where
knownM = M.fromList [ (x, ()) | x <- xs' ]
zs = concatMap freeSyms (snd <$> xts)
zs' = concatMap freeSyms (snd <$> yts)
zs'' = concatMap freeSyms ivs
freeSyms :: (F.Reftable r, TyConable c) => Located (RType c tv r) -> [LocSymbol]
freeSyms ty = [ F.atLoc ty x | x <- tySyms ]
where
tySyms = Misc.sortNub $ concat $ efoldReft (\_ _ -> True) False (\_ _ -> []) (\_ -> []) (const ()) f (const id) F.emptySEnv [] (val ty)
f γ _ r xs = let F.Reft (v, _) = F.toReft r in
[ x | x <- F.syms r, x /= v, not (x `F.memberSEnv` γ)] : xs
-----------------------------------------------------------------------------
----------------------------
-----------------------------------------------------------------------------
typeKind k
we're given, so return the original var when possible.
see tests/pos/ResolvePred.hs for an example | # LANGUAGE FlexibleContexts #
module Language.Haskell.Liquid.Bare.Misc
( joinVar
, mkVarExpr
, vmap
, runMapTyVars
, matchKindArgs
, symbolRTyVar
, simpleSymbolVar
, hasBoolResult
, isKind
) where
import Prelude hiding (error)
import Liquid.GHC.API as Ghc hiding (Located, showPpr)
import Control.Monad.Except (MonadError, throwError)
import Control.Monad.State
import qualified Text.PrettyPrint.HughesPJ as PJ
import qualified Data.List as L
import qualified Language.Fixpoint.Types as F
import Liquid.GHC.Misc
import Language.Haskell.Liquid.Types.RefType
import Language.Haskell.Liquid.Types.Types
makeSymbols : : ( I d - > Bool ) - > [ I d ] - > [ F.Symbol ] - > BareM [ ( , ) ]
makeSymbols f vs xs
= do svs < - M.toList < $ > gets varEnv
return $ L.nub ( [ ( x , v ' ) | ( x , v ) < - svs , x ` elem ` xs , let ( v ' , _ , _ ) = joinVar vs ( v , x , x ) ]
+ + [ ( F.symbol v , v ) | v < - vs , f v , isDataConId v , hasBasicArgs $ varType v ] )
where
hasBasicArgs ( ForAllTy _ t ) = hasBasicArgs t
hasBasicArgs ( FunTy _ tx t ) = isBaseTy tx & & hasBasicArgs t
hasBasicArgs _ = True
makeSymbols :: (Id -> Bool) -> [Id] -> [F.Symbol] -> BareM [(F.Symbol, Var)]
makeSymbols f vs xs
= do svs <- M.toList <$> gets varEnv
return $ L.nub ([ (x,v') | (x,v) <- svs, x `elem` xs, let (v',_,_) = joinVar vs (v,x,x)]
++ [ (F.symbol v, v) | v <- vs, f v, isDataConId v, hasBasicArgs $ varType v ])
where
hasBasicArgs (ForAllTy _ t) = hasBasicArgs t
hasBasicArgs (FunTy _ tx t) = isBaseTy tx && hasBasicArgs t
hasBasicArgs _ = True
-}
HEAD
freeSymbols : : ( , r2 , , , c2 )
= > [ F.Symbol ]
- > [ ( a1 , Located ( RType c2 tv2 r2 ) ) ]
- > [ ( a , Located ( RType c1 tv1 r1 ) ) ]
- > [ Located ( RType c tv r ) ]
- > [ LocSymbol ]
freeSymbols xs ' xts = [ lx | lx < - Misc.sortNub $ zs + + zs ' + + zs '' , not ( ( lx ) knownM ) ]
where
knownM = M.fromList [ ( x , ( ) ) | x < - xs ' ]
zs = concatMap freeSyms ( snd < $ > xts )
zs ' = concatMap freeSyms ( snd < $ > yts )
zs '' = concatMap freeSyms ivs
freeSyms : : ( F.Reftable r , ) = > Located ( RType c tv r ) - > [ LocSymbol ]
= [ | x < - tySyms ]
where
tySyms = Misc.sortNub $ concat $ efoldReft ( \ _ _ - > True ) False ( \ _ _ - > [ ] ) ( const [ ] ) ( const ( ) ) f ( const i d ) F.emptySEnv [ ] ( )
f γ _ r xs = let F.Reft ( v , _ ) r in
[ x | x < - F.syms r , x /= v , not ( x ` F.memberSEnv ` γ ) ] : xs
HEAD
freeSymbols :: (F.Reftable r, F.Reftable r1, F.Reftable r2, TyConable c, TyConable c1, TyConable c2)
=> [F.Symbol]
-> [(a1, Located (RType c2 tv2 r2))]
-> [(a, Located (RType c1 tv1 r1))]
-> [Located (RType c tv r)]
-> [LocSymbol]
freeSymbols xs' xts yts ivs = [ lx | lx <- Misc.sortNub $ zs ++ zs' ++ zs'' , not (M.member (val lx) knownM) ]
where
knownM = M.fromList [ (x, ()) | x <- xs' ]
zs = concatMap freeSyms (snd <$> xts)
zs' = concatMap freeSyms (snd <$> yts)
zs'' = concatMap freeSyms ivs
freeSyms :: (F.Reftable r, TyConable c) => Located (RType c tv r) -> [LocSymbol]
freeSyms ty = [ F.atLoc ty x | x <- tySyms ]
where
tySyms = Misc.sortNub $ concat $ efoldReft (\_ _ -> True) False (\_ _ -> []) (const []) (const ()) f (const id) F.emptySEnv [] (val ty)
f γ _ r xs = let F.Reft (v, _) = F.toReft r in
[ x | x <- F.syms r, x /= v, not (x `F.memberSEnv` γ)] : xs
-}
runMapTyVars :: Bool -> Type -> SpecType -> (PJ.Doc -> PJ.Doc -> Error) -> Either Error MapTyVarST
runMapTyVars allowTC τ t err = execStateT (mapTyVars allowTC τ t) (MTVST [] err)
data MapTyVarST = MTVST
{ vmap :: [(Var, RTyVar)]
, errmsg :: PJ.Doc -> PJ.Doc -> Error
}
mapTyVars :: Bool -> Type -> SpecType -> StateT MapTyVarST (Either Error) ()
mapTyVars allowTC t (RImpF _ _ _ t' _)
= mapTyVars allowTC t t'
mapTyVars allowTC (FunTy { ft_arg = τ, ft_res = τ'}) t
| isErasable τ
= mapTyVars allowTC τ' t
where isErasable = if allowTC then isEmbeddedDictType else isClassPred
mapTyVars allowTC (FunTy { ft_arg = τ, ft_res = τ'}) (RFun _ _ t t' _)
= mapTyVars allowTC τ t >> mapTyVars allowTC τ' t'
mapTyVars allowTC τ (RAllT _ t _)
= mapTyVars allowTC τ t
mapTyVars allowTC (TyConApp _ τs) (RApp _ ts _ _)
= zipWithM_ (mapTyVars allowTC) τs (matchKindArgs' τs ts)
mapTyVars _ (TyVarTy α) (RVar a _)
= do s <- get
s' <- mapTyRVar α a s
put s'
mapTyVars allowTC τ (RAllP _ t)
= mapTyVars allowTC τ t
mapTyVars allowTC τ (RAllE _ _ t)
= mapTyVars allowTC τ t
mapTyVars allowTC τ (RRTy _ _ _ t)
= mapTyVars allowTC τ t
mapTyVars allowTC τ (REx _ _ t)
= mapTyVars allowTC τ t
mapTyVars _ _ (RExprArg _)
= return ()
mapTyVars allowTC (AppTy τ τ') (RAppTy t t' _)
= do mapTyVars allowTC τ t
mapTyVars allowTC τ' t'
mapTyVars _ _ (RHole _)
= return ()
mapTyVars _ k _ | isKind k
= return ()
mapTyVars allowTC (ForAllTy _ τ) t
= mapTyVars allowTC τ t
mapTyVars _ hsT lqT
= do err <- gets errmsg
throwError (err (F.pprint hsT) (F.pprint lqT))
isKind :: Kind -> Bool
mapTyRVar :: MonadError Error m
=> Var -> RTyVar -> MapTyVarST -> m MapTyVarST
mapTyRVar α a s@(MTVST αas err)
= case lookup α αas of
Just a' | a == a' -> return s
| otherwise -> throwError (err (F.pprint a) (F.pprint a'))
Nothing -> return $ MTVST ((α,a):αas) err
matchKindArgs' :: [Type] -> [SpecType] -> [SpecType]
matchKindArgs' ts1' = reverse . go (reverse ts1') . reverse
where
go (_:ts1) (t2:ts2) = t2:go ts1 ts2
go ts [] | all isKind ts
= (ofType <$> ts) :: [SpecType]
go _ ts = ts
matchKindArgs :: [SpecType] -> [SpecType] -> [SpecType]
matchKindArgs ts1' = reverse . go (reverse ts1') . reverse
where
go (_:ts1) (t2:ts2) = t2:go ts1 ts2
go ts [] = ts
go _ ts = ts
mkVarExpr :: Id -> F.Expr
mkVarExpr v
| isFunVar v = F.mkEApp (varFunSymbol v) []
EVar ( symbol v )
varFunSymbol :: Id -> Located F.Symbol
varFunSymbol = dummyLoc . F.symbol . idDataCon
isFunVar :: Id -> Bool
isFunVar v = isDataConId v && not (null αs) && Mb.isNothing tf
where
(αs, t) = splitForAllTyCoVars $ varType v
tf = splitFunTy_maybe t
the Vars we lookup in GHC do n't always have the same tyvars as the Vars
joinVar :: [Var] -> (Var, s, t) -> (Var, s, t)
joinVar vs (v,s,t) = case L.find ((== showPpr v) . showPpr) vs of
Just v' -> (v',s,t)
Nothing -> (v,s,t)
simpleSymbolVar :: Var -> F.Symbol
simpleSymbolVar = dropModuleNames . F.symbol . showPpr . getName
hasBoolResult :: Type -> Bool
hasBoolResult (ForAllTy _ t) = hasBoolResult t
hasBoolResult (FunTy { ft_res = t} ) | eqType boolTy t = True
hasBoolResult (FunTy { ft_res = t} ) = hasBoolResult t
hasBoolResult _ = False
|
7122273d38d7f1a2629fb09de6d40e60826dc75cb16e1f2cd08843996f258dc3 | tolysz/ghcjs-stack | HDC.hs | #if __GLASGOW_HASKELL__ >= 701
# LANGUAGE Trustworthy #
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.Win32.GDI.HDC
Copyright : ( c ) , 1997 - 2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer : Vuokko < >
-- Stability : provisional
-- Portability : portable
--
A collection of FFI declarations for interfacing with Win32 .
--
-----------------------------------------------------------------------------
module Graphics.Win32.GDI.HDC
( module Graphics.Win32.GDI.HDC
) where
#ifndef ghcjs_HOST_OS
import System.Win32.Types
import Graphics.Win32.GDI.Types
import Foreign
#include "windows_cconv.h"
Note [ Overflow checking and fromIntegral ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some windows APIs use the value 0x80000000 to represent failure return
codes . However , when GHC builds libraries with -XNegativeLiterals
enabled , it will fail in contexts where the type would suffer from
signed overflow - such as Int32 . ( : : Int32 = = 0x80000000 )
Technically , the frontend is correct that the literal overflows in the
context it is used in . So instead , we use fromIntegral to convert the
literal from a to the necessary type . This is n't any less
efficient ( fromIntegral is optimized away , ) and conveys the idea we
simply want the same representational value .
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some windows APIs use the value 0x80000000 to represent failure return
codes. However, when GHC builds libraries with -XNegativeLiterals
enabled, it will fail in contexts where the type would suffer from
signed overflow - such as Int32. (minBound :: Int32 == 0x80000000)
Technically, the frontend is correct that the literal overflows in the
context it is used in. So instead, we use fromIntegral to convert the
literal from a Word32 to the necessary type. This isn't any less
efficient (fromIntegral is optimized away,) and conveys the idea we
simply want the same representational value.
-}
setArcDirection :: HDC -> ArcDirection -> IO ArcDirection
setArcDirection dc dir =
failIfZero "SetArcDirection" $ c_SetArcDirection dc dir
foreign import WINDOWS_CCONV unsafe "windows.h SetArcDirection"
c_SetArcDirection :: HDC -> ArcDirection -> IO ArcDirection
getArcDirection :: HDC -> IO ArcDirection
getArcDirection dc =
failIfZero "GetArcDirection" $ c_GetArcDirection dc
foreign import WINDOWS_CCONV unsafe "windows.h GetArcDirection"
c_GetArcDirection :: HDC -> IO ArcDirection
setPolyFillMode :: HDC -> PolyFillMode -> IO PolyFillMode
setPolyFillMode dc mode =
failIfZero "SetPolyFillMode" $ c_SetPolyFillMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetPolyFillMode"
c_SetPolyFillMode :: HDC -> PolyFillMode -> IO PolyFillMode
getPolyFillMode :: HDC -> IO PolyFillMode
getPolyFillMode dc =
failIfZero "GetPolyFillMode" $ c_GetPolyFillMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetPolyFillMode"
c_GetPolyFillMode :: HDC -> IO PolyFillMode
setGraphicsMode :: HDC -> GraphicsMode -> IO GraphicsMode
setGraphicsMode dc mode =
failIfZero "SetGraphicsMode" $ c_SetGraphicsMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetGraphicsMode"
c_SetGraphicsMode :: HDC -> GraphicsMode -> IO GraphicsMode
getGraphicsMode :: HDC -> IO GraphicsMode
getGraphicsMode dc =
failIfZero "GetGraphicsMode" $ c_GetGraphicsMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetGraphicsMode"
c_GetGraphicsMode :: HDC -> IO GraphicsMode
setStretchBltMode :: HDC -> StretchBltMode -> IO StretchBltMode
setStretchBltMode dc mode =
failIfZero "SetStretchBltMode" $ c_SetStretchBltMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetStretchBltMode"
c_SetStretchBltMode :: HDC -> StretchBltMode -> IO StretchBltMode
getStretchBltMode :: HDC -> IO StretchBltMode
getStretchBltMode dc =
failIfZero "GetStretchBltMode" $ c_GetStretchBltMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetStretchBltMode"
c_GetStretchBltMode :: HDC -> IO StretchBltMode
setBkColor :: HDC -> COLORREF -> IO COLORREF
setBkColor dc color =
failIfZero "SetBkColor" $ c_SetBkColor dc color
foreign import WINDOWS_CCONV unsafe "windows.h SetBkColor"
c_SetBkColor :: HDC -> COLORREF -> IO COLORREF
getBkColor :: HDC -> IO COLORREF
getBkColor dc =
failIfZero "GetBkColor" $ c_GetBkColor dc
foreign import WINDOWS_CCONV unsafe "windows.h GetBkColor"
c_GetBkColor :: HDC -> IO COLORREF
setTextColor :: HDC -> COLORREF -> IO COLORREF
setTextColor dc color =
failIf (== cLR_INVALID) "SetTextColor" $ c_SetTextColor dc color
foreign import WINDOWS_CCONV unsafe "windows.h SetTextColor"
c_SetTextColor :: HDC -> COLORREF -> IO COLORREF
getTextColor :: HDC -> IO COLORREF
getTextColor dc =
failIf (== cLR_INVALID) "GetTextColor" $ c_GetTextColor dc
foreign import WINDOWS_CCONV unsafe "windows.h GetTextColor"
c_GetTextColor :: HDC -> IO COLORREF
setBkMode :: HDC -> BackgroundMode -> IO BackgroundMode
setBkMode dc mode =
failIfZero "SetBkMode" $ c_SetBkMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetBkMode"
c_SetBkMode :: HDC -> BackgroundMode -> IO BackgroundMode
getBkMode :: HDC -> IO BackgroundMode
getBkMode dc =
failIfZero "GetBkMode" $ c_GetBkMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetBkMode"
c_GetBkMode :: HDC -> IO BackgroundMode
setBrushOrgEx :: HDC -> Int -> Int -> IO POINT
setBrushOrgEx dc x y =
allocaPOINT $ \ pt -> do
failIfFalse_ "SetBrushOrgEx" $ c_SetBrushOrgEx dc x y pt
peekPOINT pt
foreign import WINDOWS_CCONV unsafe "windows.h SetBrushOrgEx"
c_SetBrushOrgEx :: HDC -> Int -> Int -> Ptr POINT -> IO Bool
getBrushOrgEx :: HDC -> IO POINT
getBrushOrgEx dc =
allocaPOINT $ \ pt -> do
failIfFalse_ "GetBrushOrgEx" $ c_GetBrushOrgEx dc pt
peekPOINT pt
foreign import WINDOWS_CCONV unsafe "windows.h GetBrushOrgEx"
c_GetBrushOrgEx :: HDC -> Ptr POINT -> IO Bool
setTextAlign :: HDC -> TextAlignment -> IO TextAlignment
setTextAlign dc align =
failIf (== gDI_ERROR) "SetTextAlign" $ c_SetTextAlign dc align
foreign import WINDOWS_CCONV unsafe "windows.h SetTextAlign"
c_SetTextAlign :: HDC -> TextAlignment -> IO TextAlignment
getTextAlign :: HDC -> IO TextAlignment
getTextAlign dc =
failIf (== gDI_ERROR) "GetTextAlign" $ c_GetTextAlign dc
foreign import WINDOWS_CCONV unsafe "windows.h GetTextAlign"
c_GetTextAlign :: HDC -> IO TextAlignment
setTextCharacterExtra :: HDC -> Int -> IO Int
setTextCharacterExtra dc extra =
-- See Note [Overflow checking and fromIntegral]
failIf (== fromIntegral (0x80000000 :: Word32)) "SetTextCharacterExtra" $
c_SetTextCharacterExtra dc extra
foreign import WINDOWS_CCONV unsafe "windows.h SetTextCharacterExtra"
c_SetTextCharacterExtra :: HDC -> Int -> IO Int
getTextCharacterExtra :: HDC -> IO Int
getTextCharacterExtra dc =
-- See Note [Overflow checking and fromIntegral]
failIf (== fromIntegral (0x80000000 :: Word32)) "GetTextCharacterExtra" $ c_GetTextCharacterExtra dc
foreign import WINDOWS_CCONV unsafe "windows.h GetTextCharacterExtra"
c_GetTextCharacterExtra :: HDC -> IO Int
getMiterLimit :: HDC -> IO Float
getMiterLimit dc =
alloca $ \ p_res -> do
failIfFalse_ "GetMiterLimit" $ c_GetMiterLimit dc p_res
peek p_res
foreign import WINDOWS_CCONV unsafe "windows.h GetMiterLimit"
c_GetMiterLimit :: HDC -> Ptr FLOAT -> IO Bool
setMiterLimit :: HDC -> Float -> IO Float
setMiterLimit dc new_limit =
alloca $ \ p_old_limit -> do
failIfFalse_ "SetMiterLimit" $ c_SetMiterLimit dc new_limit p_old_limit
peek p_old_limit
foreign import WINDOWS_CCONV unsafe "windows.h SetMiterLimit"
c_SetMiterLimit :: HDC -> FLOAT -> Ptr FLOAT -> IO Bool
----------------------------------------------------------------
saveDC :: HDC -> IO Int
saveDC dc =
failIfZero "SaveDC" $ c_SaveDC dc
foreign import WINDOWS_CCONV unsafe "windows.h SaveDC"
c_SaveDC :: HDC -> IO Int
restoreDC :: HDC -> Int -> IO ()
restoreDC dc saved =
failIfFalse_ "RestoreDC" $ c_RestoreDC dc saved
foreign import WINDOWS_CCONV unsafe "windows.h RestoreDC"
c_RestoreDC :: HDC -> Int -> IO Bool
----------------------------------------------------------------
getCurrentBitmap :: HDC -> IO HBITMAP
getCurrentBitmap dc =
failIfNull "GetCurrentBitmap" $ c_GetCurrentBitmap dc oBJ_BITMAP
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentBitmap :: HDC -> UINT -> IO HBITMAP
getCurrentBrush :: HDC -> IO HBRUSH
getCurrentBrush dc =
failIfNull "GetCurrentBrush" $ c_GetCurrentBrush dc oBJ_BRUSH
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentBrush :: HDC -> UINT -> IO HBRUSH
getCurrentFont :: HDC -> IO HFONT
getCurrentFont dc =
failIfNull "GetCurrentFont" $ c_GetCurrentFont dc oBJ_FONT
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentFont :: HDC -> UINT -> IO HFONT
getCurrentPalette :: HDC -> IO HPALETTE
getCurrentPalette dc =
failIfNull "GetCurrentPalette" $ c_GetCurrentPalette dc oBJ_PAL
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentPalette :: HDC -> UINT -> IO HPALETTE
getCurrentPen :: HDC -> IO HPEN
getCurrentPen dc =
failIfNull "GetCurrentPen" $ c_GetCurrentPen dc oBJ_PEN
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentPen :: HDC -> UINT -> IO HPEN
selectBitmap :: HDC -> HBITMAP -> IO HBITMAP
selectBitmap dc bitmap =
failIfNull "SelectBitmap" $ c_SelectBitmap dc bitmap
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectBitmap :: HDC -> HBITMAP -> IO HBITMAP
selectBrush :: HDC -> HBRUSH -> IO HBRUSH
selectBrush dc brush =
failIfNull "SelectBrush" $ c_SelectBrush dc brush
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectBrush :: HDC -> HBRUSH -> IO HBRUSH
selectFont :: HDC -> HFONT -> IO HFONT
selectFont dc font =
failIfNull "SelectFont" $ c_SelectFont dc font
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectFont :: HDC -> HFONT -> IO HFONT
selectPen :: HDC -> HPEN -> IO HPEN
selectPen dc pen =
failIfNull "SelectPen" $ c_SelectPen dc pen
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectPen :: HDC -> HPEN -> IO HPEN
----------------------------------------------------------------
--
----------------------------------------------------------------
selectPalette :: HDC -> HPALETTE -> Bool -> IO HPALETTE
selectPalette dc palette force_bg =
failIfNull "SelectPalette" $ c_SelectPalette dc palette force_bg
foreign import WINDOWS_CCONV unsafe "windows.h SelectPalette"
c_SelectPalette :: HDC -> HPALETTE -> Bool -> IO HPALETTE
selectRgn :: HDC -> HRGN -> IO RegionType
selectRgn dc rgn =
withForeignPtr rgn $ \ p_rgn ->
failIf (== gDI_ERROR) "SelectRgn" $ c_SelectRgn dc p_rgn
foreign import ccall unsafe "windows.h SelectObjectInt"
c_SelectRgn :: HDC -> PRGN -> IO RegionType
avoid using ( ) at different types by calling our own
-- wrapper.
selectClipRgn :: HDC -> Maybe HRGN -> IO RegionType
selectClipRgn dc mb_rgn =
maybeWith withForeignPtr mb_rgn $ \ p_rgn ->
failIfZero "SelectClipRgn" $ c_SelectClipRgn dc p_rgn
foreign import WINDOWS_CCONV unsafe "windows.h SelectClipRgn"
c_SelectClipRgn :: HDC -> PRGN -> IO RegionType
extSelectClipRgn :: HDC -> Maybe HRGN -> ClippingMode -> IO RegionType
extSelectClipRgn dc mb_rgn mode =
maybeWith withForeignPtr mb_rgn $ \ p_rgn ->
failIfZero "ExtSelectClipRgn" $ c_ExtSelectClipRgn dc p_rgn mode
foreign import WINDOWS_CCONV unsafe "windows.h ExtSelectClipRgn"
c_ExtSelectClipRgn :: HDC -> PRGN -> ClippingMode -> IO RegionType
selectClipPath :: HDC -> ClippingMode -> IO RegionType
selectClipPath dc mode =
failIfZero "SelectClipPath" $ c_SelectClipPath dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SelectClipPath"
c_SelectClipPath :: HDC -> ClippingMode -> IO RegionType
----------------------------------------------------------------
-- Misc
----------------------------------------------------------------
cancelDC :: HDC -> IO ()
cancelDC dc =
failIfFalse_ "CancelDC" $ c_CancelDC dc
foreign import WINDOWS_CCONV unsafe "windows.h CancelDC"
c_CancelDC :: HDC -> IO Bool
createCompatibleDC :: Maybe HDC -> IO HDC
createCompatibleDC mb_dc =
failIfNull "CreateCompatibleDC" $ c_CreateCompatibleDC (maybePtr mb_dc)
foreign import WINDOWS_CCONV unsafe "windows.h CreateCompatibleDC"
c_CreateCompatibleDC :: HDC -> IO HDC
deleteDC :: HDC -> IO ()
deleteDC dc =
failIfFalse_ "DeleteDC" $ c_DeleteDC dc
foreign import WINDOWS_CCONV unsafe "windows.h DeleteDC"
c_DeleteDC :: HDC -> IO Bool
----------------------------------------------------------------
-- End
----------------------------------------------------------------
#endif
| null | https://raw.githubusercontent.com/tolysz/ghcjs-stack/83d5be83e87286d984e89635d5926702c55b9f29/special/Win32/Graphics/Win32/GDI/HDC.hs | haskell | ---------------------------------------------------------------------------
|
Module : Graphics.Win32.GDI.HDC
License : BSD-style (see the file libraries/base/LICENSE)
Stability : provisional
Portability : portable
---------------------------------------------------------------------------
See Note [Overflow checking and fromIntegral]
See Note [Overflow checking and fromIntegral]
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
wrapper.
--------------------------------------------------------------
Misc
--------------------------------------------------------------
--------------------------------------------------------------
End
-------------------------------------------------------------- | #if __GLASGOW_HASKELL__ >= 701
# LANGUAGE Trustworthy #
#endif
Copyright : ( c ) , 1997 - 2003
Maintainer : Vuokko < >
A collection of FFI declarations for interfacing with Win32 .
module Graphics.Win32.GDI.HDC
( module Graphics.Win32.GDI.HDC
) where
#ifndef ghcjs_HOST_OS
import System.Win32.Types
import Graphics.Win32.GDI.Types
import Foreign
#include "windows_cconv.h"
Note [ Overflow checking and fromIntegral ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some windows APIs use the value 0x80000000 to represent failure return
codes . However , when GHC builds libraries with -XNegativeLiterals
enabled , it will fail in contexts where the type would suffer from
signed overflow - such as Int32 . ( : : Int32 = = 0x80000000 )
Technically , the frontend is correct that the literal overflows in the
context it is used in . So instead , we use fromIntegral to convert the
literal from a to the necessary type . This is n't any less
efficient ( fromIntegral is optimized away , ) and conveys the idea we
simply want the same representational value .
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some windows APIs use the value 0x80000000 to represent failure return
codes. However, when GHC builds libraries with -XNegativeLiterals
enabled, it will fail in contexts where the type would suffer from
signed overflow - such as Int32. (minBound :: Int32 == 0x80000000)
Technically, the frontend is correct that the literal overflows in the
context it is used in. So instead, we use fromIntegral to convert the
literal from a Word32 to the necessary type. This isn't any less
efficient (fromIntegral is optimized away,) and conveys the idea we
simply want the same representational value.
-}
setArcDirection :: HDC -> ArcDirection -> IO ArcDirection
setArcDirection dc dir =
failIfZero "SetArcDirection" $ c_SetArcDirection dc dir
foreign import WINDOWS_CCONV unsafe "windows.h SetArcDirection"
c_SetArcDirection :: HDC -> ArcDirection -> IO ArcDirection
getArcDirection :: HDC -> IO ArcDirection
getArcDirection dc =
failIfZero "GetArcDirection" $ c_GetArcDirection dc
foreign import WINDOWS_CCONV unsafe "windows.h GetArcDirection"
c_GetArcDirection :: HDC -> IO ArcDirection
setPolyFillMode :: HDC -> PolyFillMode -> IO PolyFillMode
setPolyFillMode dc mode =
failIfZero "SetPolyFillMode" $ c_SetPolyFillMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetPolyFillMode"
c_SetPolyFillMode :: HDC -> PolyFillMode -> IO PolyFillMode
getPolyFillMode :: HDC -> IO PolyFillMode
getPolyFillMode dc =
failIfZero "GetPolyFillMode" $ c_GetPolyFillMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetPolyFillMode"
c_GetPolyFillMode :: HDC -> IO PolyFillMode
setGraphicsMode :: HDC -> GraphicsMode -> IO GraphicsMode
setGraphicsMode dc mode =
failIfZero "SetGraphicsMode" $ c_SetGraphicsMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetGraphicsMode"
c_SetGraphicsMode :: HDC -> GraphicsMode -> IO GraphicsMode
getGraphicsMode :: HDC -> IO GraphicsMode
getGraphicsMode dc =
failIfZero "GetGraphicsMode" $ c_GetGraphicsMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetGraphicsMode"
c_GetGraphicsMode :: HDC -> IO GraphicsMode
setStretchBltMode :: HDC -> StretchBltMode -> IO StretchBltMode
setStretchBltMode dc mode =
failIfZero "SetStretchBltMode" $ c_SetStretchBltMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetStretchBltMode"
c_SetStretchBltMode :: HDC -> StretchBltMode -> IO StretchBltMode
getStretchBltMode :: HDC -> IO StretchBltMode
getStretchBltMode dc =
failIfZero "GetStretchBltMode" $ c_GetStretchBltMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetStretchBltMode"
c_GetStretchBltMode :: HDC -> IO StretchBltMode
setBkColor :: HDC -> COLORREF -> IO COLORREF
setBkColor dc color =
failIfZero "SetBkColor" $ c_SetBkColor dc color
foreign import WINDOWS_CCONV unsafe "windows.h SetBkColor"
c_SetBkColor :: HDC -> COLORREF -> IO COLORREF
getBkColor :: HDC -> IO COLORREF
getBkColor dc =
failIfZero "GetBkColor" $ c_GetBkColor dc
foreign import WINDOWS_CCONV unsafe "windows.h GetBkColor"
c_GetBkColor :: HDC -> IO COLORREF
setTextColor :: HDC -> COLORREF -> IO COLORREF
setTextColor dc color =
failIf (== cLR_INVALID) "SetTextColor" $ c_SetTextColor dc color
foreign import WINDOWS_CCONV unsafe "windows.h SetTextColor"
c_SetTextColor :: HDC -> COLORREF -> IO COLORREF
getTextColor :: HDC -> IO COLORREF
getTextColor dc =
failIf (== cLR_INVALID) "GetTextColor" $ c_GetTextColor dc
foreign import WINDOWS_CCONV unsafe "windows.h GetTextColor"
c_GetTextColor :: HDC -> IO COLORREF
setBkMode :: HDC -> BackgroundMode -> IO BackgroundMode
setBkMode dc mode =
failIfZero "SetBkMode" $ c_SetBkMode dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SetBkMode"
c_SetBkMode :: HDC -> BackgroundMode -> IO BackgroundMode
getBkMode :: HDC -> IO BackgroundMode
getBkMode dc =
failIfZero "GetBkMode" $ c_GetBkMode dc
foreign import WINDOWS_CCONV unsafe "windows.h GetBkMode"
c_GetBkMode :: HDC -> IO BackgroundMode
setBrushOrgEx :: HDC -> Int -> Int -> IO POINT
setBrushOrgEx dc x y =
allocaPOINT $ \ pt -> do
failIfFalse_ "SetBrushOrgEx" $ c_SetBrushOrgEx dc x y pt
peekPOINT pt
foreign import WINDOWS_CCONV unsafe "windows.h SetBrushOrgEx"
c_SetBrushOrgEx :: HDC -> Int -> Int -> Ptr POINT -> IO Bool
getBrushOrgEx :: HDC -> IO POINT
getBrushOrgEx dc =
allocaPOINT $ \ pt -> do
failIfFalse_ "GetBrushOrgEx" $ c_GetBrushOrgEx dc pt
peekPOINT pt
foreign import WINDOWS_CCONV unsafe "windows.h GetBrushOrgEx"
c_GetBrushOrgEx :: HDC -> Ptr POINT -> IO Bool
setTextAlign :: HDC -> TextAlignment -> IO TextAlignment
setTextAlign dc align =
failIf (== gDI_ERROR) "SetTextAlign" $ c_SetTextAlign dc align
foreign import WINDOWS_CCONV unsafe "windows.h SetTextAlign"
c_SetTextAlign :: HDC -> TextAlignment -> IO TextAlignment
getTextAlign :: HDC -> IO TextAlignment
getTextAlign dc =
failIf (== gDI_ERROR) "GetTextAlign" $ c_GetTextAlign dc
foreign import WINDOWS_CCONV unsafe "windows.h GetTextAlign"
c_GetTextAlign :: HDC -> IO TextAlignment
setTextCharacterExtra :: HDC -> Int -> IO Int
setTextCharacterExtra dc extra =
failIf (== fromIntegral (0x80000000 :: Word32)) "SetTextCharacterExtra" $
c_SetTextCharacterExtra dc extra
foreign import WINDOWS_CCONV unsafe "windows.h SetTextCharacterExtra"
c_SetTextCharacterExtra :: HDC -> Int -> IO Int
getTextCharacterExtra :: HDC -> IO Int
getTextCharacterExtra dc =
failIf (== fromIntegral (0x80000000 :: Word32)) "GetTextCharacterExtra" $ c_GetTextCharacterExtra dc
foreign import WINDOWS_CCONV unsafe "windows.h GetTextCharacterExtra"
c_GetTextCharacterExtra :: HDC -> IO Int
getMiterLimit :: HDC -> IO Float
getMiterLimit dc =
alloca $ \ p_res -> do
failIfFalse_ "GetMiterLimit" $ c_GetMiterLimit dc p_res
peek p_res
foreign import WINDOWS_CCONV unsafe "windows.h GetMiterLimit"
c_GetMiterLimit :: HDC -> Ptr FLOAT -> IO Bool
setMiterLimit :: HDC -> Float -> IO Float
setMiterLimit dc new_limit =
alloca $ \ p_old_limit -> do
failIfFalse_ "SetMiterLimit" $ c_SetMiterLimit dc new_limit p_old_limit
peek p_old_limit
foreign import WINDOWS_CCONV unsafe "windows.h SetMiterLimit"
c_SetMiterLimit :: HDC -> FLOAT -> Ptr FLOAT -> IO Bool
saveDC :: HDC -> IO Int
saveDC dc =
failIfZero "SaveDC" $ c_SaveDC dc
foreign import WINDOWS_CCONV unsafe "windows.h SaveDC"
c_SaveDC :: HDC -> IO Int
restoreDC :: HDC -> Int -> IO ()
restoreDC dc saved =
failIfFalse_ "RestoreDC" $ c_RestoreDC dc saved
foreign import WINDOWS_CCONV unsafe "windows.h RestoreDC"
c_RestoreDC :: HDC -> Int -> IO Bool
getCurrentBitmap :: HDC -> IO HBITMAP
getCurrentBitmap dc =
failIfNull "GetCurrentBitmap" $ c_GetCurrentBitmap dc oBJ_BITMAP
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentBitmap :: HDC -> UINT -> IO HBITMAP
getCurrentBrush :: HDC -> IO HBRUSH
getCurrentBrush dc =
failIfNull "GetCurrentBrush" $ c_GetCurrentBrush dc oBJ_BRUSH
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentBrush :: HDC -> UINT -> IO HBRUSH
getCurrentFont :: HDC -> IO HFONT
getCurrentFont dc =
failIfNull "GetCurrentFont" $ c_GetCurrentFont dc oBJ_FONT
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentFont :: HDC -> UINT -> IO HFONT
getCurrentPalette :: HDC -> IO HPALETTE
getCurrentPalette dc =
failIfNull "GetCurrentPalette" $ c_GetCurrentPalette dc oBJ_PAL
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentPalette :: HDC -> UINT -> IO HPALETTE
getCurrentPen :: HDC -> IO HPEN
getCurrentPen dc =
failIfNull "GetCurrentPen" $ c_GetCurrentPen dc oBJ_PEN
foreign import WINDOWS_CCONV unsafe "windows.h GetCurrentObject"
c_GetCurrentPen :: HDC -> UINT -> IO HPEN
selectBitmap :: HDC -> HBITMAP -> IO HBITMAP
selectBitmap dc bitmap =
failIfNull "SelectBitmap" $ c_SelectBitmap dc bitmap
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectBitmap :: HDC -> HBITMAP -> IO HBITMAP
selectBrush :: HDC -> HBRUSH -> IO HBRUSH
selectBrush dc brush =
failIfNull "SelectBrush" $ c_SelectBrush dc brush
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectBrush :: HDC -> HBRUSH -> IO HBRUSH
selectFont :: HDC -> HFONT -> IO HFONT
selectFont dc font =
failIfNull "SelectFont" $ c_SelectFont dc font
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectFont :: HDC -> HFONT -> IO HFONT
selectPen :: HDC -> HPEN -> IO HPEN
selectPen dc pen =
failIfNull "SelectPen" $ c_SelectPen dc pen
foreign import WINDOWS_CCONV unsafe "windows.h SelectObject"
c_SelectPen :: HDC -> HPEN -> IO HPEN
selectPalette :: HDC -> HPALETTE -> Bool -> IO HPALETTE
selectPalette dc palette force_bg =
failIfNull "SelectPalette" $ c_SelectPalette dc palette force_bg
foreign import WINDOWS_CCONV unsafe "windows.h SelectPalette"
c_SelectPalette :: HDC -> HPALETTE -> Bool -> IO HPALETTE
selectRgn :: HDC -> HRGN -> IO RegionType
selectRgn dc rgn =
withForeignPtr rgn $ \ p_rgn ->
failIf (== gDI_ERROR) "SelectRgn" $ c_SelectRgn dc p_rgn
foreign import ccall unsafe "windows.h SelectObjectInt"
c_SelectRgn :: HDC -> PRGN -> IO RegionType
avoid using ( ) at different types by calling our own
selectClipRgn :: HDC -> Maybe HRGN -> IO RegionType
selectClipRgn dc mb_rgn =
maybeWith withForeignPtr mb_rgn $ \ p_rgn ->
failIfZero "SelectClipRgn" $ c_SelectClipRgn dc p_rgn
foreign import WINDOWS_CCONV unsafe "windows.h SelectClipRgn"
c_SelectClipRgn :: HDC -> PRGN -> IO RegionType
extSelectClipRgn :: HDC -> Maybe HRGN -> ClippingMode -> IO RegionType
extSelectClipRgn dc mb_rgn mode =
maybeWith withForeignPtr mb_rgn $ \ p_rgn ->
failIfZero "ExtSelectClipRgn" $ c_ExtSelectClipRgn dc p_rgn mode
foreign import WINDOWS_CCONV unsafe "windows.h ExtSelectClipRgn"
c_ExtSelectClipRgn :: HDC -> PRGN -> ClippingMode -> IO RegionType
selectClipPath :: HDC -> ClippingMode -> IO RegionType
selectClipPath dc mode =
failIfZero "SelectClipPath" $ c_SelectClipPath dc mode
foreign import WINDOWS_CCONV unsafe "windows.h SelectClipPath"
c_SelectClipPath :: HDC -> ClippingMode -> IO RegionType
cancelDC :: HDC -> IO ()
cancelDC dc =
failIfFalse_ "CancelDC" $ c_CancelDC dc
foreign import WINDOWS_CCONV unsafe "windows.h CancelDC"
c_CancelDC :: HDC -> IO Bool
createCompatibleDC :: Maybe HDC -> IO HDC
createCompatibleDC mb_dc =
failIfNull "CreateCompatibleDC" $ c_CreateCompatibleDC (maybePtr mb_dc)
foreign import WINDOWS_CCONV unsafe "windows.h CreateCompatibleDC"
c_CreateCompatibleDC :: HDC -> IO HDC
deleteDC :: HDC -> IO ()
deleteDC dc =
failIfFalse_ "DeleteDC" $ c_DeleteDC dc
foreign import WINDOWS_CCONV unsafe "windows.h DeleteDC"
c_DeleteDC :: HDC -> IO Bool
#endif
|
0b3d504174c13a4645163f3511b53bf0d5e38b27abfe58442ff36668893daa8f | metabase/metabase | describe_table.clj | (ns metabase.driver.sql-jdbc.sync.describe-table
"SQL JDBC impl for `describe-table`, `describe-table-fks`, and `describe-nested-field-columns`."
(:require
[cheshire.core :as json]
[clojure.java.jdbc :as jdbc]
[clojure.set :as set]
[clojure.string :as str]
[medley.core :as m]
[metabase.db.metadata-queries :as metadata-queries]
[metabase.driver :as driver]
[metabase.driver.sql-jdbc.connection :as sql-jdbc.conn]
[metabase.driver.sql-jdbc.sync.common :as sql-jdbc.sync.common]
[metabase.driver.sql-jdbc.sync.interface :as sql-jdbc.sync.interface]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.mbql.schema :as mbql.s]
[metabase.models.table :as table]
[metabase.util :as u]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.log :as log])
(:import
(java.sql Connection DatabaseMetaData ResultSet)))
(set! *warn-on-reflection* true)
(defmethod sql-jdbc.sync.interface/column->semantic-type :sql-jdbc [_ _ _] nil)
(defn pattern-based-database-type->base-type
"Return a `database-type->base-type` function that matches types based on a sequence of pattern / base-type pairs.
`pattern->type` is a map of regex pattern to MBQL type keyword."
[pattern->type]
(fn database-type->base-type [column-type]
(let [column-type (name column-type)]
(some
(fn [[pattern base-type]]
(when (re-find pattern column-type)
base-type))
pattern->type))))
(defn get-catalogs
"Returns a set of all of the catalogs found via `metadata`"
[^DatabaseMetaData metadata]
(with-open [rs (.getCatalogs metadata)]
(set (map :table_cat (jdbc/metadata-result rs)))))
(defn- database-type->base-type-or-warn
"Given a `database-type` (e.g. `VARCHAR`) return the mapped Metabase type (e.g. `:type/Text`)."
[driver database-type]
(or (sql-jdbc.sync.interface/database-type->base-type driver (keyword database-type))
(do (log/warn (format "Don't know how to map column type '%s' to a Field base_type, falling back to :type/*."
database-type))
:type/*)))
(defn- calculated-semantic-type
"Get an appropriate semantic type for a column with `column-name` of type `database-type`."
[driver ^String column-name ^String database-type]
(when-let [semantic-type (sql-jdbc.sync.interface/column->semantic-type driver database-type column-name)]
(assert (isa? semantic-type :type/*)
(str "Invalid type: " semantic-type))
semantic-type))
(defmethod sql-jdbc.sync.interface/fallback-metadata-query :sql-jdbc
[driver schema table]
{:pre [(string? table)]}
;; Using our SQL compiler here to get portable LIMIT (e.g. `SELECT TOP n ...` for SQL Server/Oracle)
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(let [honeysql {:select [:*]
:from [(sql.qp/maybe-wrap-unaliased-expr (sql.qp/->honeysql driver (hx/identifier :table schema table)))]
:where [:not= (sql.qp/inline-num 1) (sql.qp/inline-num 1)]}
honeysql (sql.qp/apply-top-level-clause driver :limit honeysql {:limit 0})]
(sql.qp/format-honeysql driver honeysql))))
(defn fallback-fields-metadata-from-select-query
"In some rare cases `:column_name` is blank (eg. SQLite's views with group by) fallback to sniffing the type from a
SELECT * query."
[driver ^Connection conn table-schema table-name]
some DBs (: sqlite ) do n't actually return the correct metadata for LIMIT 0 queries
(let [[sql & params] (sql-jdbc.sync.interface/fallback-metadata-query driver table-schema table-name)]
(reify clojure.lang.IReduceInit
(reduce [_ rf init]
(with-open [stmt (sql-jdbc.sync.common/prepare-statement driver conn sql params)
rs (.executeQuery stmt)]
(let [metadata (.getMetaData rs)]
(reduce
((map (fn [^Integer i]
{:name (.getColumnName metadata i)
:database-type (.getColumnTypeName metadata i)})) rf)
init
(range 1 (inc (.getColumnCount metadata))))))))))
(defn- jdbc-fields-metadata
"Reducible metadata about the Fields belonging to a Table, fetching using JDBC DatabaseMetaData methods."
[driver ^Connection conn db-name-or-nil schema table-name]
(sql-jdbc.sync.common/reducible-results
#(.getColumns (.getMetaData conn)
db-name-or-nil
(some->> schema (driver/escape-entity-name-for-metadata driver))
(some->> table-name (driver/escape-entity-name-for-metadata driver))
nil)
(fn [^ResultSet rs]
;; #getColumns(java.lang.String,%20java.lang.String,%20java.lang.String,%20java.lang.String)
#(let [default (.getString rs "COLUMN_DEF")
no-default? (contains? #{nil "NULL" "null"} default)
nullable (.getInt rs "NULLABLE")
not-nullable? (= 0 nullable)
auto-increment (.getString rs "IS_AUTOINCREMENT")
no-auto-increment? (= "NO" auto-increment)
column-name (.getString rs "COLUMN_NAME")
required? (and no-default? not-nullable? no-auto-increment?)]
(merge
{:name column-name
:database-type (.getString rs "TYPE_NAME")
:database-required required?}
(when-let [remarks (.getString rs "REMARKS")]
(when-not (str/blank? remarks)
{:field-comment remarks})))))))
(defn ^:private fields-metadata
[driver ^Connection conn {schema :schema, table-name :name} ^String db-name-or-nil]
{:pre [(instance? Connection conn) (string? table-name)]}
(reify clojure.lang.IReduceInit
(reduce [_ rf init]
1 . Return all the that come back from DatabaseMetaData that include type info .
;;
2 . Iff there are some that do n't have type info , concatenate
` fallback - fields - metadata - from - select - query ` , which fetches the same Fields using a different method .
;;
3 . Filter out any duplicates between the two methods using ` m / distinct - by ` .
(let [has-fields-without-type-info? (volatile! false)
jdbc-metadata (eduction
(remove (fn [{:keys [database-type]}]
(when (str/blank? database-type)
(vreset! has-fields-without-type-info? true)
true)))
(jdbc-fields-metadata driver conn db-name-or-nil schema table-name))
fallback-metadata (reify clojure.lang.IReduceInit
(reduce [_ rf init]
(reduce
rf
init
(when @has-fields-without-type-info?
(fallback-fields-metadata-from-select-query driver conn schema table-name)))))]
VERY IMPORTANT ! DO NOT REWRITE THIS TO BE LAZY ! IT ONLY WORKS BECAUSE AS NORMAL - FIELDS GETS REDUCED ,
HAS - FIELDS - WITHOUT - TYPE - INFO ? WILL GET SET TO TRUE IF APPLICABLE AND THEN FALLBACK - FIELDS WILL RUN WHEN
;; IT'S TIME TO START EVALUATING THAT.
(reduce
((comp cat (m/distinct-by :name)) rf)
init
[jdbc-metadata fallback-metadata])))))
(defn describe-table-fields-xf
"Returns a transducer for computing metatdata about the fields in `table`."
[driver table]
(map-indexed (fn [i {:keys [database-type], column-name :name, :as col}]
(let [semantic-type (calculated-semantic-type driver column-name database-type)]
(merge
(u/select-non-nil-keys col [:name :database-type :field-comment :database-required])
{:base-type (database-type->base-type-or-warn driver database-type)
:database-position i}
(when semantic-type
{:semantic-type semantic-type})
(when (and
(isa? semantic-type :type/SerializedJSON)
(driver/database-supports?
driver
:nested-field-columns
(table/database table)))
{:visibility-type :details-only}))))))
(defmulti describe-table-fields
"Returns a set of column metadata for `table` using JDBC Connection `conn`."
{:added "0.45.0"
:arglists '([driver ^Connection conn table ^String db-name-or-nil])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defmethod describe-table-fields :sql-jdbc
[driver conn table db-name-or-nil]
(into
#{}
(describe-table-fields-xf driver table)
(fields-metadata driver conn table db-name-or-nil)))
(defmulti get-table-pks
"Returns a set of primary keys for `table` using a JDBC DatabaseMetaData from JDBC Connection `conn`.
Note: If db-name, schema, and table-name are not passed, this may return _all_ pks that the metadata's connection can access."
{:added "0.45.0"
:arglists '([driver ^Connection conn db-name-or-nil table])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defmethod get-table-pks :default
[_driver ^Connection conn db-name-or-nil table]
(let [^DatabaseMetaData metadata (.getMetaData conn)]
(into #{} (sql-jdbc.sync.common/reducible-results
#(.getPrimaryKeys metadata db-name-or-nil (:schema table) (:name table))
(fn [^ResultSet rs] #(.getString rs "COLUMN_NAME"))))))
(defn add-table-pks
"Using `conn`, find any primary keys for `table` (or more, see: [[get-table-pks]]) and finally assoc `:pk?` to true for those columns."
[driver ^Connection conn db-name-or-nil table]
(let [pks (get-table-pks driver conn db-name-or-nil table)]
(update table :fields (fn [fields]
(set (for [field fields]
(if-not (contains? pks (:name field))
field
(assoc field :pk? true))))))))
(defn- describe-table*
([driver ^Connection conn table]
(describe-table* driver conn nil table))
([driver ^Connection conn db-name-or-nil table]
{:pre [(instance? Connection conn)]}
(->> (assoc (select-keys table [:name :schema])
:fields (describe-table-fields driver conn table nil))
find PKs and mark them
(add-table-pks driver conn db-name-or-nil))))
(defn describe-table
"Default implementation of `driver/describe-table` for SQL JDBC drivers. Uses JDBC DatabaseMetaData."
[driver db-or-id-or-spec-or-conn table]
(if (instance? Connection db-or-id-or-spec-or-conn)
(describe-table* driver db-or-id-or-spec-or-conn table)
(let [spec (sql-jdbc.conn/db->pooled-connection-spec db-or-id-or-spec-or-conn)]
(with-open [conn (jdbc/get-connection spec)]
(describe-table* driver conn table)))))
(defn- describe-table-fks*
[_driver ^Connection conn {^String schema :schema, ^String table-name :name} & [^String db-name-or-nil]]
(into
#{}
(sql-jdbc.sync.common/reducible-results #(.getImportedKeys (.getMetaData conn) db-name-or-nil schema table-name)
(fn [^ResultSet rs]
(fn []
{:fk-column-name (.getString rs "FKCOLUMN_NAME")
:dest-table {:name (.getString rs "PKTABLE_NAME")
:schema (.getString rs "PKTABLE_SCHEM")}
:dest-column-name (.getString rs "PKCOLUMN_NAME")})))))
(defn describe-table-fks
"Default implementation of `driver/describe-table-fks` for SQL JDBC drivers. Uses JDBC DatabaseMetaData."
[driver db-or-id-or-spec-or-conn table & [db-name-or-nil]]
(if (instance? Connection db-or-id-or-spec-or-conn)
(describe-table-fks* driver db-or-id-or-spec-or-conn table db-name-or-nil)
(let [spec (sql-jdbc.conn/db->pooled-connection-spec db-or-id-or-spec-or-conn)]
(with-open [conn (jdbc/get-connection spec)]
(describe-table-fks* driver conn table db-name-or-nil)))))
(def ^:dynamic *nested-field-column-max-row-length*
"Max string length for a row for nested field column before we just give up on parsing it.
Marked as mutable because we mutate it for tests."
50000)
(defn- flattened-row [field-name row]
(letfn [(flatten-row [row path]
(lazy-seq
(when-let [[[k v] & xs] (seq row)]
(cond (and (map? v) (not-empty v))
(into (flatten-row v (conj path k))
(flatten-row xs path))
:else
(cons [(conj path k) v]
(flatten-row xs path))))))]
(into {} (flatten-row row [field-name]))))
(defn- type-by-parsing-string
"Mostly just (type member) but with a bit to suss out strings which are ISO8601 and say that they are datetimes"
[member]
(let [member-type (type member)]
(if (and (instance? String member)
(mbql.s/can-parse-datetime? member))
java.time.LocalDateTime
member-type)))
(defn- row->types [row]
(into {} (for [[field-name field-val] row
;; We put top-level array row type semantics on JSON roadmap but skip for now
:when (map? field-val)]
(let [flat-row (flattened-row field-name field-val)]
(into {} (map (fn [[k v]] [k (type-by-parsing-string v)]) flat-row))))))
(defn- describe-json-xform [member]
((comp (map #(for [[k v] %
:when (< (count v) *nested-field-column-max-row-length*)]
[k (json/parse-string v)]))
(map #(into {} %))
(map row->types)) member))
(def ^:const max-nested-field-columns
"Maximum number of nested field columns."
100)
(defn- describe-json-rf
"Reducing function that takes a bunch of maps from row->types,
and gets them to conform to the type hierarchy,
going through and taking the lowest common denominator type at each pass,
ignoring the nils."
([] nil)
([acc-field-type-map] acc-field-type-map)
([acc-field-type-map second-field-type-map]
(into {}
(for [json-column (set/union (set (keys second-field-type-map))
(set (keys acc-field-type-map)))]
(cond
(or (nil? acc-field-type-map)
(nil? (acc-field-type-map json-column))
(= (hash (acc-field-type-map json-column))
(hash (second-field-type-map json-column))))
[json-column (second-field-type-map json-column)]
(or (nil? second-field-type-map)
(nil? (second-field-type-map json-column)))
[json-column (acc-field-type-map json-column)]
(every? #(isa? % Number) [(acc-field-type-map json-column)
(second-field-type-map json-column)])
[json-column java.lang.Number]
(every?
(fn [column-type]
(some (fn [allowed-type]
(isa? column-type allowed-type))
[String Number Boolean java.time.LocalDateTime]))
[(acc-field-type-map json-column) (second-field-type-map json-column)])
[json-column java.lang.String]
:else
[json-column nil])))))
(def field-type-map
"Map from Java types for deserialized JSON (so small subset of Java types) to MBQL types.
We actually do deserialize the JSON in order to determine types,
so the java / clojure types we get have to be matched to MBQL types"
{java.lang.String :type/Text
JSON itself has the single number type , but Java serde of JSON is stricter
java.lang.Long :type/Integer
clojure.lang.BigInt :type/BigInteger
java.math.BigInteger :type/BigInteger
java.lang.Integer :type/Integer
java.lang.Double :type/Float
java.lang.Float :type/Float
java.math.BigDecimal :type/Decimal
java.lang.Number :type/Number
java.lang.Boolean :type/Boolean
java.time.LocalDateTime :type/DateTime
clojure.lang.PersistentVector :type/Array
clojure.lang.PersistentArrayMap :type/Structured
clojure.lang.PersistentHashMap :type/Structured})
(def db-type-map
"Map from MBQL types to database types.
This is the lowest common denominator of types, hopefully,
although as of writing this is just geared towards Postgres types"
{:type/Text "text"
:type/Integer "bigint"
You might think that the ordinary ' bigint ' type in Postgres and MySQL should be this .
However , Bigint in those DB 's maxes out at 2 ^ 64 .
JSON , like Javascript itself , will happily represent 1.8 * ( 10 ^ 308 ) ,
;; Losing digits merrily along the way.
We ca n't really trust anyone to use MAX_SAFE_INTEGER , in JSON - land ..
;; So really without forcing arbitrary precision ('decimal' type),
;; we have too many numerical regimes to test.
( # 22732 ) was basically the consequence of missing one .
:type/BigInteger "decimal"
:type/Float "double precision"
:type/Number "double precision"
:type/Decimal "decimal"
:type/Boolean "boolean"
:type/DateTime "timestamp"
:type/Array "text"
:type/Structured "text"})
(defn- field-types->fields [field-types]
(let [valid-fields (for [[field-path field-type] (seq field-types)]
(if (nil? field-type)
nil
(let [curr-type (get field-type-map field-type :type/*)]
{:name (str/join " \u2192 " (map name field-path)) ;; right arrow
:database-type (db-type-map curr-type)
:base-type curr-type
;; Postgres JSONB field, which gets most usage, doesn't maintain JSON object ordering...
:database-position 0
:visibility-type :normal
:nfc-path field-path})))
field-hash (apply hash-set (filter some? valid-fields))]
field-hash))
The name 's nested field columns but what the people wanted ( issue # 708 )
;; was JSON so what they're getting is JSON.
(defn describe-nested-field-columns
"Default implementation of [[metabase.driver.sql-jdbc.sync.interface/describe-nested-field-columns]] for SQL JDBC
drivers. Goes and queries the table if there are JSON columns for the nested contents."
[driver spec table]
(with-open [conn (jdbc/get-connection spec)]
(let [table-identifier-info [(:schema table) (:name table)]
table-fields (describe-table-fields driver conn table nil)
json-fields (filter #(= (:semantic-type %) :type/SerializedJSON) table-fields)]
(if (nil? (seq json-fields))
#{}
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(let [json-field-names (mapv #(apply hx/identifier :field (into table-identifier-info [(:name %)])) json-fields)
table-identifier (apply hx/identifier :table table-identifier-info)
sql-args (sql.qp/format-honeysql driver {:select (mapv sql.qp/maybe-wrap-unaliased-expr json-field-names)
:from [(sql.qp/maybe-wrap-unaliased-expr table-identifier)]
:limit metadata-queries/nested-field-sample-limit})
query (jdbc/reducible-query spec sql-args {:identifiers identity})
field-types (transduce describe-json-xform describe-json-rf query)
fields (field-types->fields field-types)]
(if (> (count fields) max-nested-field-columns)
(do
(log/warn
(format
"More nested field columns detected than maximum. Limiting the number of nested field columns to %d."
max-nested-field-columns))
(set (take max-nested-field-columns fields)))
fields)))))))
| null | https://raw.githubusercontent.com/metabase/metabase/b820fa55df837b14064983ef39f82af39d4db59a/src/metabase/driver/sql_jdbc/sync/describe_table.clj | clojure | Using our SQL compiler here to get portable LIMIT (e.g. `SELECT TOP n ...` for SQL Server/Oracle)
#getColumns(java.lang.String,%20java.lang.String,%20java.lang.String,%20java.lang.String)
IT'S TIME TO START EVALUATING THAT.
We put top-level array row type semantics on JSON roadmap but skip for now
Losing digits merrily along the way.
So really without forcing arbitrary precision ('decimal' type),
we have too many numerical regimes to test.
right arrow
Postgres JSONB field, which gets most usage, doesn't maintain JSON object ordering...
was JSON so what they're getting is JSON. | (ns metabase.driver.sql-jdbc.sync.describe-table
"SQL JDBC impl for `describe-table`, `describe-table-fks`, and `describe-nested-field-columns`."
(:require
[cheshire.core :as json]
[clojure.java.jdbc :as jdbc]
[clojure.set :as set]
[clojure.string :as str]
[medley.core :as m]
[metabase.db.metadata-queries :as metadata-queries]
[metabase.driver :as driver]
[metabase.driver.sql-jdbc.connection :as sql-jdbc.conn]
[metabase.driver.sql-jdbc.sync.common :as sql-jdbc.sync.common]
[metabase.driver.sql-jdbc.sync.interface :as sql-jdbc.sync.interface]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.mbql.schema :as mbql.s]
[metabase.models.table :as table]
[metabase.util :as u]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.log :as log])
(:import
(java.sql Connection DatabaseMetaData ResultSet)))
(set! *warn-on-reflection* true)
(defmethod sql-jdbc.sync.interface/column->semantic-type :sql-jdbc [_ _ _] nil)
(defn pattern-based-database-type->base-type
"Return a `database-type->base-type` function that matches types based on a sequence of pattern / base-type pairs.
`pattern->type` is a map of regex pattern to MBQL type keyword."
[pattern->type]
(fn database-type->base-type [column-type]
(let [column-type (name column-type)]
(some
(fn [[pattern base-type]]
(when (re-find pattern column-type)
base-type))
pattern->type))))
(defn get-catalogs
"Returns a set of all of the catalogs found via `metadata`"
[^DatabaseMetaData metadata]
(with-open [rs (.getCatalogs metadata)]
(set (map :table_cat (jdbc/metadata-result rs)))))
(defn- database-type->base-type-or-warn
"Given a `database-type` (e.g. `VARCHAR`) return the mapped Metabase type (e.g. `:type/Text`)."
[driver database-type]
(or (sql-jdbc.sync.interface/database-type->base-type driver (keyword database-type))
(do (log/warn (format "Don't know how to map column type '%s' to a Field base_type, falling back to :type/*."
database-type))
:type/*)))
(defn- calculated-semantic-type
"Get an appropriate semantic type for a column with `column-name` of type `database-type`."
[driver ^String column-name ^String database-type]
(when-let [semantic-type (sql-jdbc.sync.interface/column->semantic-type driver database-type column-name)]
(assert (isa? semantic-type :type/*)
(str "Invalid type: " semantic-type))
semantic-type))
(defmethod sql-jdbc.sync.interface/fallback-metadata-query :sql-jdbc
[driver schema table]
{:pre [(string? table)]}
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(let [honeysql {:select [:*]
:from [(sql.qp/maybe-wrap-unaliased-expr (sql.qp/->honeysql driver (hx/identifier :table schema table)))]
:where [:not= (sql.qp/inline-num 1) (sql.qp/inline-num 1)]}
honeysql (sql.qp/apply-top-level-clause driver :limit honeysql {:limit 0})]
(sql.qp/format-honeysql driver honeysql))))
(defn fallback-fields-metadata-from-select-query
"In some rare cases `:column_name` is blank (eg. SQLite's views with group by) fallback to sniffing the type from a
SELECT * query."
[driver ^Connection conn table-schema table-name]
some DBs (: sqlite ) do n't actually return the correct metadata for LIMIT 0 queries
(let [[sql & params] (sql-jdbc.sync.interface/fallback-metadata-query driver table-schema table-name)]
(reify clojure.lang.IReduceInit
(reduce [_ rf init]
(with-open [stmt (sql-jdbc.sync.common/prepare-statement driver conn sql params)
rs (.executeQuery stmt)]
(let [metadata (.getMetaData rs)]
(reduce
((map (fn [^Integer i]
{:name (.getColumnName metadata i)
:database-type (.getColumnTypeName metadata i)})) rf)
init
(range 1 (inc (.getColumnCount metadata))))))))))
(defn- jdbc-fields-metadata
"Reducible metadata about the Fields belonging to a Table, fetching using JDBC DatabaseMetaData methods."
[driver ^Connection conn db-name-or-nil schema table-name]
(sql-jdbc.sync.common/reducible-results
#(.getColumns (.getMetaData conn)
db-name-or-nil
(some->> schema (driver/escape-entity-name-for-metadata driver))
(some->> table-name (driver/escape-entity-name-for-metadata driver))
nil)
(fn [^ResultSet rs]
#(let [default (.getString rs "COLUMN_DEF")
no-default? (contains? #{nil "NULL" "null"} default)
nullable (.getInt rs "NULLABLE")
not-nullable? (= 0 nullable)
auto-increment (.getString rs "IS_AUTOINCREMENT")
no-auto-increment? (= "NO" auto-increment)
column-name (.getString rs "COLUMN_NAME")
required? (and no-default? not-nullable? no-auto-increment?)]
(merge
{:name column-name
:database-type (.getString rs "TYPE_NAME")
:database-required required?}
(when-let [remarks (.getString rs "REMARKS")]
(when-not (str/blank? remarks)
{:field-comment remarks})))))))
(defn ^:private fields-metadata
[driver ^Connection conn {schema :schema, table-name :name} ^String db-name-or-nil]
{:pre [(instance? Connection conn) (string? table-name)]}
(reify clojure.lang.IReduceInit
(reduce [_ rf init]
1 . Return all the that come back from DatabaseMetaData that include type info .
2 . Iff there are some that do n't have type info , concatenate
` fallback - fields - metadata - from - select - query ` , which fetches the same Fields using a different method .
3 . Filter out any duplicates between the two methods using ` m / distinct - by ` .
(let [has-fields-without-type-info? (volatile! false)
jdbc-metadata (eduction
(remove (fn [{:keys [database-type]}]
(when (str/blank? database-type)
(vreset! has-fields-without-type-info? true)
true)))
(jdbc-fields-metadata driver conn db-name-or-nil schema table-name))
fallback-metadata (reify clojure.lang.IReduceInit
(reduce [_ rf init]
(reduce
rf
init
(when @has-fields-without-type-info?
(fallback-fields-metadata-from-select-query driver conn schema table-name)))))]
VERY IMPORTANT ! DO NOT REWRITE THIS TO BE LAZY ! IT ONLY WORKS BECAUSE AS NORMAL - FIELDS GETS REDUCED ,
HAS - FIELDS - WITHOUT - TYPE - INFO ? WILL GET SET TO TRUE IF APPLICABLE AND THEN FALLBACK - FIELDS WILL RUN WHEN
(reduce
((comp cat (m/distinct-by :name)) rf)
init
[jdbc-metadata fallback-metadata])))))
(defn describe-table-fields-xf
"Returns a transducer for computing metatdata about the fields in `table`."
[driver table]
(map-indexed (fn [i {:keys [database-type], column-name :name, :as col}]
(let [semantic-type (calculated-semantic-type driver column-name database-type)]
(merge
(u/select-non-nil-keys col [:name :database-type :field-comment :database-required])
{:base-type (database-type->base-type-or-warn driver database-type)
:database-position i}
(when semantic-type
{:semantic-type semantic-type})
(when (and
(isa? semantic-type :type/SerializedJSON)
(driver/database-supports?
driver
:nested-field-columns
(table/database table)))
{:visibility-type :details-only}))))))
(defmulti describe-table-fields
"Returns a set of column metadata for `table` using JDBC Connection `conn`."
{:added "0.45.0"
:arglists '([driver ^Connection conn table ^String db-name-or-nil])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defmethod describe-table-fields :sql-jdbc
[driver conn table db-name-or-nil]
(into
#{}
(describe-table-fields-xf driver table)
(fields-metadata driver conn table db-name-or-nil)))
(defmulti get-table-pks
"Returns a set of primary keys for `table` using a JDBC DatabaseMetaData from JDBC Connection `conn`.
Note: If db-name, schema, and table-name are not passed, this may return _all_ pks that the metadata's connection can access."
{:added "0.45.0"
:arglists '([driver ^Connection conn db-name-or-nil table])}
driver/dispatch-on-initialized-driver
:hierarchy #'driver/hierarchy)
(defmethod get-table-pks :default
[_driver ^Connection conn db-name-or-nil table]
(let [^DatabaseMetaData metadata (.getMetaData conn)]
(into #{} (sql-jdbc.sync.common/reducible-results
#(.getPrimaryKeys metadata db-name-or-nil (:schema table) (:name table))
(fn [^ResultSet rs] #(.getString rs "COLUMN_NAME"))))))
(defn add-table-pks
"Using `conn`, find any primary keys for `table` (or more, see: [[get-table-pks]]) and finally assoc `:pk?` to true for those columns."
[driver ^Connection conn db-name-or-nil table]
(let [pks (get-table-pks driver conn db-name-or-nil table)]
(update table :fields (fn [fields]
(set (for [field fields]
(if-not (contains? pks (:name field))
field
(assoc field :pk? true))))))))
(defn- describe-table*
([driver ^Connection conn table]
(describe-table* driver conn nil table))
([driver ^Connection conn db-name-or-nil table]
{:pre [(instance? Connection conn)]}
(->> (assoc (select-keys table [:name :schema])
:fields (describe-table-fields driver conn table nil))
find PKs and mark them
(add-table-pks driver conn db-name-or-nil))))
(defn describe-table
"Default implementation of `driver/describe-table` for SQL JDBC drivers. Uses JDBC DatabaseMetaData."
[driver db-or-id-or-spec-or-conn table]
(if (instance? Connection db-or-id-or-spec-or-conn)
(describe-table* driver db-or-id-or-spec-or-conn table)
(let [spec (sql-jdbc.conn/db->pooled-connection-spec db-or-id-or-spec-or-conn)]
(with-open [conn (jdbc/get-connection spec)]
(describe-table* driver conn table)))))
(defn- describe-table-fks*
[_driver ^Connection conn {^String schema :schema, ^String table-name :name} & [^String db-name-or-nil]]
(into
#{}
(sql-jdbc.sync.common/reducible-results #(.getImportedKeys (.getMetaData conn) db-name-or-nil schema table-name)
(fn [^ResultSet rs]
(fn []
{:fk-column-name (.getString rs "FKCOLUMN_NAME")
:dest-table {:name (.getString rs "PKTABLE_NAME")
:schema (.getString rs "PKTABLE_SCHEM")}
:dest-column-name (.getString rs "PKCOLUMN_NAME")})))))
(defn describe-table-fks
"Default implementation of `driver/describe-table-fks` for SQL JDBC drivers. Uses JDBC DatabaseMetaData."
[driver db-or-id-or-spec-or-conn table & [db-name-or-nil]]
(if (instance? Connection db-or-id-or-spec-or-conn)
(describe-table-fks* driver db-or-id-or-spec-or-conn table db-name-or-nil)
(let [spec (sql-jdbc.conn/db->pooled-connection-spec db-or-id-or-spec-or-conn)]
(with-open [conn (jdbc/get-connection spec)]
(describe-table-fks* driver conn table db-name-or-nil)))))
(def ^:dynamic *nested-field-column-max-row-length*
"Max string length for a row for nested field column before we just give up on parsing it.
Marked as mutable because we mutate it for tests."
50000)
(defn- flattened-row [field-name row]
(letfn [(flatten-row [row path]
(lazy-seq
(when-let [[[k v] & xs] (seq row)]
(cond (and (map? v) (not-empty v))
(into (flatten-row v (conj path k))
(flatten-row xs path))
:else
(cons [(conj path k) v]
(flatten-row xs path))))))]
(into {} (flatten-row row [field-name]))))
(defn- type-by-parsing-string
"Mostly just (type member) but with a bit to suss out strings which are ISO8601 and say that they are datetimes"
[member]
(let [member-type (type member)]
(if (and (instance? String member)
(mbql.s/can-parse-datetime? member))
java.time.LocalDateTime
member-type)))
(defn- row->types [row]
(into {} (for [[field-name field-val] row
:when (map? field-val)]
(let [flat-row (flattened-row field-name field-val)]
(into {} (map (fn [[k v]] [k (type-by-parsing-string v)]) flat-row))))))
(defn- describe-json-xform [member]
((comp (map #(for [[k v] %
:when (< (count v) *nested-field-column-max-row-length*)]
[k (json/parse-string v)]))
(map #(into {} %))
(map row->types)) member))
(def ^:const max-nested-field-columns
"Maximum number of nested field columns."
100)
(defn- describe-json-rf
"Reducing function that takes a bunch of maps from row->types,
and gets them to conform to the type hierarchy,
going through and taking the lowest common denominator type at each pass,
ignoring the nils."
([] nil)
([acc-field-type-map] acc-field-type-map)
([acc-field-type-map second-field-type-map]
(into {}
(for [json-column (set/union (set (keys second-field-type-map))
(set (keys acc-field-type-map)))]
(cond
(or (nil? acc-field-type-map)
(nil? (acc-field-type-map json-column))
(= (hash (acc-field-type-map json-column))
(hash (second-field-type-map json-column))))
[json-column (second-field-type-map json-column)]
(or (nil? second-field-type-map)
(nil? (second-field-type-map json-column)))
[json-column (acc-field-type-map json-column)]
(every? #(isa? % Number) [(acc-field-type-map json-column)
(second-field-type-map json-column)])
[json-column java.lang.Number]
(every?
(fn [column-type]
(some (fn [allowed-type]
(isa? column-type allowed-type))
[String Number Boolean java.time.LocalDateTime]))
[(acc-field-type-map json-column) (second-field-type-map json-column)])
[json-column java.lang.String]
:else
[json-column nil])))))
(def field-type-map
"Map from Java types for deserialized JSON (so small subset of Java types) to MBQL types.
We actually do deserialize the JSON in order to determine types,
so the java / clojure types we get have to be matched to MBQL types"
{java.lang.String :type/Text
JSON itself has the single number type , but Java serde of JSON is stricter
java.lang.Long :type/Integer
clojure.lang.BigInt :type/BigInteger
java.math.BigInteger :type/BigInteger
java.lang.Integer :type/Integer
java.lang.Double :type/Float
java.lang.Float :type/Float
java.math.BigDecimal :type/Decimal
java.lang.Number :type/Number
java.lang.Boolean :type/Boolean
java.time.LocalDateTime :type/DateTime
clojure.lang.PersistentVector :type/Array
clojure.lang.PersistentArrayMap :type/Structured
clojure.lang.PersistentHashMap :type/Structured})
(def db-type-map
"Map from MBQL types to database types.
This is the lowest common denominator of types, hopefully,
although as of writing this is just geared towards Postgres types"
{:type/Text "text"
:type/Integer "bigint"
You might think that the ordinary ' bigint ' type in Postgres and MySQL should be this .
However , Bigint in those DB 's maxes out at 2 ^ 64 .
JSON , like Javascript itself , will happily represent 1.8 * ( 10 ^ 308 ) ,
We ca n't really trust anyone to use MAX_SAFE_INTEGER , in JSON - land ..
( # 22732 ) was basically the consequence of missing one .
:type/BigInteger "decimal"
:type/Float "double precision"
:type/Number "double precision"
:type/Decimal "decimal"
:type/Boolean "boolean"
:type/DateTime "timestamp"
:type/Array "text"
:type/Structured "text"})
(defn- field-types->fields [field-types]
(let [valid-fields (for [[field-path field-type] (seq field-types)]
(if (nil? field-type)
nil
(let [curr-type (get field-type-map field-type :type/*)]
:database-type (db-type-map curr-type)
:base-type curr-type
:database-position 0
:visibility-type :normal
:nfc-path field-path})))
field-hash (apply hash-set (filter some? valid-fields))]
field-hash))
The name 's nested field columns but what the people wanted ( issue # 708 )
(defn describe-nested-field-columns
"Default implementation of [[metabase.driver.sql-jdbc.sync.interface/describe-nested-field-columns]] for SQL JDBC
drivers. Goes and queries the table if there are JSON columns for the nested contents."
[driver spec table]
(with-open [conn (jdbc/get-connection spec)]
(let [table-identifier-info [(:schema table) (:name table)]
table-fields (describe-table-fields driver conn table nil)
json-fields (filter #(= (:semantic-type %) :type/SerializedJSON) table-fields)]
(if (nil? (seq json-fields))
#{}
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(let [json-field-names (mapv #(apply hx/identifier :field (into table-identifier-info [(:name %)])) json-fields)
table-identifier (apply hx/identifier :table table-identifier-info)
sql-args (sql.qp/format-honeysql driver {:select (mapv sql.qp/maybe-wrap-unaliased-expr json-field-names)
:from [(sql.qp/maybe-wrap-unaliased-expr table-identifier)]
:limit metadata-queries/nested-field-sample-limit})
query (jdbc/reducible-query spec sql-args {:identifiers identity})
field-types (transduce describe-json-xform describe-json-rf query)
fields (field-types->fields field-types)]
(if (> (count fields) max-nested-field-columns)
(do
(log/warn
(format
"More nested field columns detected than maximum. Limiting the number of nested field columns to %d."
max-nested-field-columns))
(set (take max-nested-field-columns fields)))
fields)))))))
|
809fdb06e9311232aa3c0c389b3f5459e79d45f3481f377ecf53227f2a04e93a | realworldocaml/examples | ext_list.mli | open Core.Std
Include the interface of the list module from Core
include (module type of List)
Signature of function we 're adding
val intersperse : 'a list -> 'a -> 'a list
| null | https://raw.githubusercontent.com/realworldocaml/examples/32ea926861a0b728813a29b0e4cf20dd15eb486e/code/files-modules-and-programs/ext_list.mli | ocaml | open Core.Std
Include the interface of the list module from Core
include (module type of List)
Signature of function we 're adding
val intersperse : 'a list -> 'a -> 'a list
|
|
b66ff2442bf0473b406f649036b455addca198082f2fe7ca59e7b1cc8de9fe7d | javalib-team/sawja | safe.mli |
* This file is part of SAWJA
* Copyright ( c)2009 , 2010 ( CNRS )
*
* This program is free software : you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation , either version 3 of
* the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* General Public License for more details .
*
* You should have received a copy of the GNU General Public
* License along with this program . If not , see
* < / > .
* This file is part of SAWJA
* Copyright (c)2009, 2010 Laurent Hubert (CNRS)
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* </>.
*)
* Defines a fixpoint solver managing domains for differents levels of the code
representation ( global , class , field , method , program point ) . [ Safe ] defines a
structure of variables and the constraints between those variables . It also
defines domains for different levels of the program : global , classes , fields ,
methods and program points . { ! ReachableMethods } is a very simple use case of
this solver , and { ! XTA } is a richer example .
representation (global, class, field, method, program point). [Safe] defines a
structure of variables and the constraints between those variables. It also
defines domains for different levels of the program: global, classes, fields,
methods and program points. {!ReachableMethods} is a very simple use case of
this solver, and {!XTA} is a richer example.*)
* In order to use this solver :
- Instantiate a variable module using the functor { ! Safe . . Make }
with the necessary { ! Safe . . CONTEXT } . You could use
{ ! . EmptyContext } if no context is needed .
- Define a domain module ( regarding { ! Safe . Domain . S } interface )
for each level of the program ( global , class , field , method ,
program point ) . You could use { ! Domain . Empty } module for
unnecessary levels in your analysis . Some classic domain
representations are supplied in { ! Safe . Domain } module .
- Instantiate a state module using the functor { ! Safe . State . Make } with the
previously defined modules .
- Instantiate a constraints module using the functor
{ ! Safe . Constraints . Make } with the state module .
- Instantiate a solver module using the functor
{ ! Safe . Solver . Make } with the constraints module .
Once the aforementioned modules have been created :
- Create an initial state { ! Safe . State . S.t } using { ! }
and modifying it .
- Compute the constraint list { ! Safe.Constraints.S.cst } [ list ] .
- Create the variable list { ! Safe . . S.t } [ list ] of entry points
variables .
- And then use the { ! Safe . Solver . Make.solve_constraints } function to obtain
the fixpoint .
- Instantiate a variable module using the functor {!Safe.Var.Make}
with the necessary {!Safe.Var.CONTEXT}. You could use
{!Var.EmptyContext} if no context is needed.
- Define a domain module (regarding {!Safe.Domain.S} interface)
for each level of the program (global, class, field, method,
program point). You could use {!Domain.Empty} module for
unnecessary levels in your analysis. Some classic domain
representations are supplied in {!Safe.Domain} module.
- Instantiate a state module using the functor {!Safe.State.Make} with the
previously defined modules.
- Instantiate a constraints module using the functor
{!Safe.Constraints.Make} with the state module.
- Instantiate a solver module using the functor
{!Safe.Solver.Make} with the constraints module.
Once the aforementioned modules have been created:
- Create an initial state {!Safe.State.S.t} using {!Safe.State.S.bot}
and modifying it.
- Compute the constraint list {!Safe.Constraints.S.cst} [list].
- Create the variable list {!Safe.Var.S.t} [list] of entry points
variables.
- And then use the {!Safe.Solver.Make.solve_constraints} function to obtain
the fixpoint.
*)
open! Javalib_pack
module Domain : sig
This exception can be used for debugging purpose . If your domain raise such
* an exception , it will stop and raise an State . DebugSt exception containing
* the last state reached before the fail .
* an exception, it will stop and raise an State.DebugSt exception containing
* the last state reached before the fail.*)
exception DebugDom
(** This may be used to combine analyzes. *)
module type TRADUCTOR_ANALYSIS =
sig
type localID
type localDomain
type globalID
type globalDomain
val loc2gloID : localID -> globalID
val loc2gloDomain : localDomain -> globalDomain
val glo2locID : globalID -> localID
val glo2locDomain : globalDomain -> localDomain
end
* Used when there is only one analysis .
module Trad_Identity :
functor (TYPE : sig type id type dom end) ->
sig
type localID = TYPE.id
type localDomain = TYPE.dom
type globalID = TYPE.id
type globalDomain = TYPE.dom
val loc2gloID : localID -> globalID
val loc2gloDomain : localDomain -> globalDomain
val glo2locID : globalID -> localID
val glo2locDomain : globalDomain -> localDomain
end
module type S = sig
(** Type of combined sub-analyzes domains (eg. D1.t * D2.t). *)
type t
(** Type of combined sub-analyzes IDs (Left of D1.analysisID | Right of
D2.analysisID)*)
type analysisID
(** Type of sub-analyzes domains (eg. Left of D1.analysisDomain | Right of
D2.analysisDomain) *)
type analysisDomain
(** Standard domain operations. *)
val bot : t
val isBot : analysisDomain -> bool
(** [join modifies v1 v2] returns the union of [v1] and [v2] and sets
[modifies] to true if the result is different from [v1]. *)
val join : ?modifies:bool ref -> t -> t -> t
* [ modifies v1 v2 ] returns the union of [ v1 ] and [ v2 ]
and sets [ modifies ] to true if the result is different from
[ v1 ] . The option [ do_join ] allows avoiding to compute the
join of values when it is known that the target value ( the
second one ) is smaller or equal .
and sets [modifies] to true if the result is different from
[v1]. The option [do_join] allows avoiding to compute the
join of values when it is known that the target value (the
second one) is smaller or equal.*)
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val pprint : Format.formatter -> t -> unit
end
module Empty : S
(** Builds a domain for local variables given the domain of the variables. *)
module Local : functor (Var:S) -> sig
type t
type analysisID = Var.analysisID
type analysisDomain = t
No map ( Unreachable code )
val init : t
(** [init] is an initial value for local variables: it is not bottom but
contains no local variable (it correspond to a reachable point in the
code). *)
val isBot : analysisDomain -> bool
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val pprint : Format.formatter -> t -> unit
val get_var : int -> analysisDomain -> Var.t
val set_var : int -> Var.t -> analysisDomain -> analysisDomain
(** [set_var x v d] sets the value [v] to the variable [x] in the local
function [d]. If a previous binding was already in place, then it is
simply discarded *)
end
module Stack : functor (Var:S) -> sig
type t =
Bot (*No Stack (unreacheble code)*)
| Top (*Unknown stack (has potentially infinite element) *)
| Stack of Var.t list (*Known stack composed of abstract elements*)
type analysisID = Var.analysisID
type analysisDomain = t
val bot : t
val top : t
val isBot : analysisDomain -> bool
val isTop : analysisDomain -> bool
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val pprint : Format.formatter -> t -> unit
val init : t
(** initial (empty) stack *)
val push : Var.t -> t -> t
val pop_n : int -> t -> t
val pop : t -> t
val first : t -> Var.t
(** raise [Invalid_argument] if the stack is empty. Raise Failure if the
stack is Top. *)
val dup : t -> t
val dupX1 : t -> t
val dupX2 : t -> t
val dup2 : t -> t
val dup2X1 : t -> t
val dup2X2 : t -> t
val swap : t -> t
end
module Combine : functor (Left : S) -> functor (Right : S) -> sig
include S
module Trad_Left : functor (Trad : TRADUCTOR_ANALYSIS
with type globalID = Left.analysisID
and type globalDomain = Left.analysisDomain) ->
(TRADUCTOR_ANALYSIS
with type localID = Trad.localID
and type localDomain = Trad.localDomain
and type globalID = analysisID
and type globalDomain = analysisDomain)
module Trad_Right :
functor (Trad : TRADUCTOR_ANALYSIS
with type globalID = Right.analysisID
and type globalDomain = Right.analysisDomain) ->
(TRADUCTOR_ANALYSIS
with type localID = Trad.localID
and type localDomain = Trad.localDomain
and type globalID = analysisID
and type globalDomain = analysisDomain)
end
end
module Var : sig
module type CONTEXT =
sig
(** The Context can be
- Context sensitivity (duplicate program points)
- Analysis identification (several program points because there are several analyses)
- Information flow (intermediate state, return, parameters, returned exceptions, etc. ) *)
type context
val compare : context -> context -> int
val equal : context -> context -> bool
val hash : context -> int
val to_string : context -> string
val pprint : Format.formatter -> context -> unit
end
module EmptyContext : (CONTEXT with type context = unit)
module type S = sig
module Context : CONTEXT
(** just a shortcut *)
type ioc = JBasics.class_name
type var_global = [ `Global of Context.context ]
type var_ioc = [ `IOC of Context.context * ioc ]
type var_field =
[ `Field of Context.context * ioc * JBasics.field_signature ]
type var_method =
[ `Method of Context.context * ioc * JBasics.method_signature ]
type var_pp =
[ `PP of Context.context * ioc * JBasics.method_signature * int ]
type t =
[ `Field of Context.context * ioc * JBasics.field_signature
| `Global of Context.context
| `IOC of Context.context * ioc
| `Method of Context.context * ioc * JBasics.method_signature
| `PP of Context.context * ioc * JBasics.method_signature * int ]
val compare : t -> t -> int
val equal : t -> t -> bool
val hash : t -> int
val pprint : Format.formatter -> t -> unit
val compare_global : var_global -> var_global -> int
val compare_ioc : var_ioc -> var_ioc -> int
val compare_field : var_field -> var_field -> int
val compare_method : var_method -> var_method -> int
val compare_pp : var_pp -> var_pp -> int
val equal_global : var_global -> var_global -> bool
val equal_ioc : var_ioc -> var_ioc -> bool
val equal_field : var_field -> var_field -> bool
val equal_method : var_method -> var_method -> bool
val equal_pp : var_pp -> var_pp -> bool
val hash_global : var_global -> int
val hash_ioc : var_ioc -> int
val hash_field : var_field -> int
val hash_method : var_method -> int
val hash_pp : var_pp -> int
val pprint_global : Format.formatter -> var_global -> unit
val pprint_ioc : Format.formatter -> var_ioc -> unit
val pprint_field : Format.formatter -> var_field -> unit
val pprint_method : Format.formatter -> var_method -> unit
val pprint_pp : Format.formatter -> var_pp -> unit
end
module Make :
functor (Context : CONTEXT) -> (S with module Context = Context)
end
module State : sig
module type S = sig
* One domain for each kind of variable .
module Var : Var.S
module Global : Domain.S
module IOC : Domain.S
module Field : Domain.S
module Method : Domain.S
module PP : Domain.S
type analysisID =
[ `FieldAnalysis of Field.analysisID
| `GlobalAnalysis of Global.analysisID
| `IOCAnalysis of IOC.analysisID
| `MethodAnalysis of Method.analysisID
| `PPAnalysis of PP.analysisID ]
* Data ( value ) for one particular analysis .
type analysisDomain =
[ `FieldDomain of Field.analysisDomain
| `GlobalDomain of Global.analysisDomain
| `IOCDomain of IOC.analysisDomain
| `MethodDomain of Method.analysisDomain
| `PPDomain of PP.analysisDomain ]
* Data for all analyses for one particular variable ( slot ) .
type abData =
[ `Field of Field.t
| `Global of Global.t
| `IOC of IOC.t
| `Method of Method.t
| `PP of PP.t ]
type t
exception DebugSt of t
(** [bot (g,c,f,m,p)] generates an bottom element where [g], [c], [f], [m]
and [p] are approximations of the number of global, class, field, method
and program point variables, respectively. Note that any positive value
is correct, but poorly chosen ones may affect performance. *)
val bot : (int*int*int*int*int) -> t
val pprint : Format.formatter -> t -> unit
val get_pinfo :
'a JProgram.program -> t -> JPrintHtml.info -> JPrintHtml.info
val join_ad :
?do_join:bool -> ?modifies:bool ref -> abData -> analysisDomain -> abData
* [ join ] must only be used for initialization of State and not during
constraint resolution .
constraint resolution.*)
val join : ?do_join:bool -> ?modifies:bool ref -> t -> Var.t -> analysisDomain -> unit
* { 2 Accessing data content }
val get : t -> Var.t -> abData
val get_global : t -> Var.var_global -> Global.t
val get_IOC : t -> Var.var_ioc -> IOC.t
val get_field : t -> Var.var_field -> Field.t
val get_method : t -> Var.var_method -> Method.t
val get_PP : t -> Var.var_pp -> PP.t
val get_ab_global : abData -> Global.t
val get_ab_field : abData -> Field.t
val get_ab_method : abData -> Method.t
val get_ab_IOC : abData -> IOC.t
val get_ab_pp : abData -> PP.t
* { 2 Modifying final results }
* { b Warning : State MUST not be modified manually during constraint
resolution . The following functions MUST only be used on the final result
of State } !
resolution. The following functions MUST only be used on the final result
of State}! *)
val iter_global : t -> (t -> Var.var_global -> abData -> unit)
-> unit
val iter_IOC : t -> (t -> Var.var_ioc -> abData-> unit) -> unit
val iter_field : t -> (t -> Var.var_field -> abData -> unit)
-> unit
val iter_method : t -> (t -> Var.var_method -> abData -> unit)
-> unit
val iter_PP : t -> (t -> Var.var_pp -> abData -> unit) -> unit
val replace : t -> Var.t -> abData -> unit
val remove : t -> Var.t -> unit
end
module Make :
functor (Var : Var.S) ->
functor (GlobalDomain : Domain.S) ->
functor (IOCDomain : Domain.S) ->
functor (FieldDomain : Domain.S) ->
functor (MethodDomain : Domain.S) ->
functor (PPDomain : Domain.S) ->
(S with module Var = Var
and module Global = GlobalDomain
and module IOC = IOCDomain
and module Field = FieldDomain
and module Method = MethodDomain
and module PP = PPDomain)
end
module Constraints : sig
module type S = sig
module State : State.S
type variable = State.Var.t
type cst = {
dependencies : variable list;
target : variable;
transferFun : State.t -> State.analysisDomain;
}
val get_dependencies : cst -> variable list
val get_target : cst -> variable
val pprint : Format.formatter -> cst -> unit
* [ apply_cst ? modifies abst cst ] applies the constraint [ cst ] on the current
[ abst ] . The result of the constraint ( given by [ cst.transferFun ] ) is
joined to the current value stored in [ abst ] . [ modifies ] is set to true
if the application of a constraint modified the state [ abst ] .
If a DebugDom exception is raised by the used domain , this function catch
it and raise a DebugSt exception containing the last state reached before
the fail . This is intended for debug .
[abst]. The result of the constraint (given by [cst.transferFun]) is
joined to the current value stored in [abst]. [modifies] is set to true
if the application of a constraint modified the state [abst].
If a DebugDom exception is raised by the used domain, this function catch
it and raise a DebugSt exception containing the last state reached before
the fail. This is intended for debug.
*)
val apply_cst : ?do_join:bool -> ?modifies:bool ref -> State.t -> cst -> unit
end
module Make : functor (State : State.S) ->
(S with module State = State)
end
module Solver : sig
module Make : functor (Constraints : Constraints.S) -> sig
(** [debug_level] defines the debugging level (verbosity) of the solver *)
val debug_level : int ref
* [ solve_constraints ~optimize_join prog csts state init ] computes the
fixpoint of the constraints [ csts ] , starting from the initial state
[ state ] by applying the constraints that depends on nothing or on initial
variables [ init ] . If [ ] is true , then it tries to avoid
joining useless values , at the cost of some additional computations .
fixpoint of the constraints [csts], starting from the initial state
[state] by applying the constraints that depends on nothing or on initial
variables [init]. If [optimize_join] is true, then it tries to avoid
joining useless values, at the cost of some additional computations. *)
val solve_constraints :
?optimize_join:bool ->
'a ->
Constraints.cst list ->
Constraints.State.t ->
Constraints.State.Var.t list -> Constraints.State.t
end
end
| null | https://raw.githubusercontent.com/javalib-team/sawja/5b46e4afc024092cdeaf8ba125f0c5ac05cb9137/src/safe.mli | ocaml | * This may be used to combine analyzes.
* Type of combined sub-analyzes domains (eg. D1.t * D2.t).
* Type of combined sub-analyzes IDs (Left of D1.analysisID | Right of
D2.analysisID)
* Type of sub-analyzes domains (eg. Left of D1.analysisDomain | Right of
D2.analysisDomain)
* Standard domain operations.
* [join modifies v1 v2] returns the union of [v1] and [v2] and sets
[modifies] to true if the result is different from [v1].
* Builds a domain for local variables given the domain of the variables.
* [init] is an initial value for local variables: it is not bottom but
contains no local variable (it correspond to a reachable point in the
code).
* [set_var x v d] sets the value [v] to the variable [x] in the local
function [d]. If a previous binding was already in place, then it is
simply discarded
No Stack (unreacheble code)
Unknown stack (has potentially infinite element)
Known stack composed of abstract elements
* initial (empty) stack
* raise [Invalid_argument] if the stack is empty. Raise Failure if the
stack is Top.
* The Context can be
- Context sensitivity (duplicate program points)
- Analysis identification (several program points because there are several analyses)
- Information flow (intermediate state, return, parameters, returned exceptions, etc. )
* just a shortcut
* [bot (g,c,f,m,p)] generates an bottom element where [g], [c], [f], [m]
and [p] are approximations of the number of global, class, field, method
and program point variables, respectively. Note that any positive value
is correct, but poorly chosen ones may affect performance.
* [debug_level] defines the debugging level (verbosity) of the solver |
* This file is part of SAWJA
* Copyright ( c)2009 , 2010 ( CNRS )
*
* This program is free software : you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation , either version 3 of
* the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* General Public License for more details .
*
* You should have received a copy of the GNU General Public
* License along with this program . If not , see
* < / > .
* This file is part of SAWJA
* Copyright (c)2009, 2010 Laurent Hubert (CNRS)
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* </>.
*)
* Defines a fixpoint solver managing domains for differents levels of the code
representation ( global , class , field , method , program point ) . [ Safe ] defines a
structure of variables and the constraints between those variables . It also
defines domains for different levels of the program : global , classes , fields ,
methods and program points . { ! ReachableMethods } is a very simple use case of
this solver , and { ! XTA } is a richer example .
representation (global, class, field, method, program point). [Safe] defines a
structure of variables and the constraints between those variables. It also
defines domains for different levels of the program: global, classes, fields,
methods and program points. {!ReachableMethods} is a very simple use case of
this solver, and {!XTA} is a richer example.*)
* In order to use this solver :
- Instantiate a variable module using the functor { ! Safe . . Make }
with the necessary { ! Safe . . CONTEXT } . You could use
{ ! . EmptyContext } if no context is needed .
- Define a domain module ( regarding { ! Safe . Domain . S } interface )
for each level of the program ( global , class , field , method ,
program point ) . You could use { ! Domain . Empty } module for
unnecessary levels in your analysis . Some classic domain
representations are supplied in { ! Safe . Domain } module .
- Instantiate a state module using the functor { ! Safe . State . Make } with the
previously defined modules .
- Instantiate a constraints module using the functor
{ ! Safe . Constraints . Make } with the state module .
- Instantiate a solver module using the functor
{ ! Safe . Solver . Make } with the constraints module .
Once the aforementioned modules have been created :
- Create an initial state { ! Safe . State . S.t } using { ! }
and modifying it .
- Compute the constraint list { ! Safe.Constraints.S.cst } [ list ] .
- Create the variable list { ! Safe . . S.t } [ list ] of entry points
variables .
- And then use the { ! Safe . Solver . Make.solve_constraints } function to obtain
the fixpoint .
- Instantiate a variable module using the functor {!Safe.Var.Make}
with the necessary {!Safe.Var.CONTEXT}. You could use
{!Var.EmptyContext} if no context is needed.
- Define a domain module (regarding {!Safe.Domain.S} interface)
for each level of the program (global, class, field, method,
program point). You could use {!Domain.Empty} module for
unnecessary levels in your analysis. Some classic domain
representations are supplied in {!Safe.Domain} module.
- Instantiate a state module using the functor {!Safe.State.Make} with the
previously defined modules.
- Instantiate a constraints module using the functor
{!Safe.Constraints.Make} with the state module.
- Instantiate a solver module using the functor
{!Safe.Solver.Make} with the constraints module.
Once the aforementioned modules have been created:
- Create an initial state {!Safe.State.S.t} using {!Safe.State.S.bot}
and modifying it.
- Compute the constraint list {!Safe.Constraints.S.cst} [list].
- Create the variable list {!Safe.Var.S.t} [list] of entry points
variables.
- And then use the {!Safe.Solver.Make.solve_constraints} function to obtain
the fixpoint.
*)
open! Javalib_pack
module Domain : sig
This exception can be used for debugging purpose . If your domain raise such
* an exception , it will stop and raise an State . DebugSt exception containing
* the last state reached before the fail .
* an exception, it will stop and raise an State.DebugSt exception containing
* the last state reached before the fail.*)
exception DebugDom
module type TRADUCTOR_ANALYSIS =
sig
type localID
type localDomain
type globalID
type globalDomain
val loc2gloID : localID -> globalID
val loc2gloDomain : localDomain -> globalDomain
val glo2locID : globalID -> localID
val glo2locDomain : globalDomain -> localDomain
end
* Used when there is only one analysis .
module Trad_Identity :
functor (TYPE : sig type id type dom end) ->
sig
type localID = TYPE.id
type localDomain = TYPE.dom
type globalID = TYPE.id
type globalDomain = TYPE.dom
val loc2gloID : localID -> globalID
val loc2gloDomain : localDomain -> globalDomain
val glo2locID : globalID -> localID
val glo2locDomain : globalDomain -> localDomain
end
module type S = sig
type t
type analysisID
type analysisDomain
val bot : t
val isBot : analysisDomain -> bool
val join : ?modifies:bool ref -> t -> t -> t
* [ modifies v1 v2 ] returns the union of [ v1 ] and [ v2 ]
and sets [ modifies ] to true if the result is different from
[ v1 ] . The option [ do_join ] allows avoiding to compute the
join of values when it is known that the target value ( the
second one ) is smaller or equal .
and sets [modifies] to true if the result is different from
[v1]. The option [do_join] allows avoiding to compute the
join of values when it is known that the target value (the
second one) is smaller or equal.*)
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val pprint : Format.formatter -> t -> unit
end
module Empty : S
module Local : functor (Var:S) -> sig
type t
type analysisID = Var.analysisID
type analysisDomain = t
No map ( Unreachable code )
val init : t
val isBot : analysisDomain -> bool
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val pprint : Format.formatter -> t -> unit
val get_var : int -> analysisDomain -> Var.t
val set_var : int -> Var.t -> analysisDomain -> analysisDomain
end
module Stack : functor (Var:S) -> sig
type t =
type analysisID = Var.analysisID
type analysisDomain = t
val bot : t
val top : t
val isBot : analysisDomain -> bool
val isTop : analysisDomain -> bool
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val pprint : Format.formatter -> t -> unit
val init : t
val push : Var.t -> t -> t
val pop_n : int -> t -> t
val pop : t -> t
val first : t -> Var.t
val dup : t -> t
val dupX1 : t -> t
val dupX2 : t -> t
val dup2 : t -> t
val dup2X1 : t -> t
val dup2X2 : t -> t
val swap : t -> t
end
module Combine : functor (Left : S) -> functor (Right : S) -> sig
include S
module Trad_Left : functor (Trad : TRADUCTOR_ANALYSIS
with type globalID = Left.analysisID
and type globalDomain = Left.analysisDomain) ->
(TRADUCTOR_ANALYSIS
with type localID = Trad.localID
and type localDomain = Trad.localDomain
and type globalID = analysisID
and type globalDomain = analysisDomain)
module Trad_Right :
functor (Trad : TRADUCTOR_ANALYSIS
with type globalID = Right.analysisID
and type globalDomain = Right.analysisDomain) ->
(TRADUCTOR_ANALYSIS
with type localID = Trad.localID
and type localDomain = Trad.localDomain
and type globalID = analysisID
and type globalDomain = analysisDomain)
end
end
module Var : sig
module type CONTEXT =
sig
type context
val compare : context -> context -> int
val equal : context -> context -> bool
val hash : context -> int
val to_string : context -> string
val pprint : Format.formatter -> context -> unit
end
module EmptyContext : (CONTEXT with type context = unit)
module type S = sig
module Context : CONTEXT
type ioc = JBasics.class_name
type var_global = [ `Global of Context.context ]
type var_ioc = [ `IOC of Context.context * ioc ]
type var_field =
[ `Field of Context.context * ioc * JBasics.field_signature ]
type var_method =
[ `Method of Context.context * ioc * JBasics.method_signature ]
type var_pp =
[ `PP of Context.context * ioc * JBasics.method_signature * int ]
type t =
[ `Field of Context.context * ioc * JBasics.field_signature
| `Global of Context.context
| `IOC of Context.context * ioc
| `Method of Context.context * ioc * JBasics.method_signature
| `PP of Context.context * ioc * JBasics.method_signature * int ]
val compare : t -> t -> int
val equal : t -> t -> bool
val hash : t -> int
val pprint : Format.formatter -> t -> unit
val compare_global : var_global -> var_global -> int
val compare_ioc : var_ioc -> var_ioc -> int
val compare_field : var_field -> var_field -> int
val compare_method : var_method -> var_method -> int
val compare_pp : var_pp -> var_pp -> int
val equal_global : var_global -> var_global -> bool
val equal_ioc : var_ioc -> var_ioc -> bool
val equal_field : var_field -> var_field -> bool
val equal_method : var_method -> var_method -> bool
val equal_pp : var_pp -> var_pp -> bool
val hash_global : var_global -> int
val hash_ioc : var_ioc -> int
val hash_field : var_field -> int
val hash_method : var_method -> int
val hash_pp : var_pp -> int
val pprint_global : Format.formatter -> var_global -> unit
val pprint_ioc : Format.formatter -> var_ioc -> unit
val pprint_field : Format.formatter -> var_field -> unit
val pprint_method : Format.formatter -> var_method -> unit
val pprint_pp : Format.formatter -> var_pp -> unit
end
module Make :
functor (Context : CONTEXT) -> (S with module Context = Context)
end
module State : sig
module type S = sig
* One domain for each kind of variable .
module Var : Var.S
module Global : Domain.S
module IOC : Domain.S
module Field : Domain.S
module Method : Domain.S
module PP : Domain.S
type analysisID =
[ `FieldAnalysis of Field.analysisID
| `GlobalAnalysis of Global.analysisID
| `IOCAnalysis of IOC.analysisID
| `MethodAnalysis of Method.analysisID
| `PPAnalysis of PP.analysisID ]
* Data ( value ) for one particular analysis .
type analysisDomain =
[ `FieldDomain of Field.analysisDomain
| `GlobalDomain of Global.analysisDomain
| `IOCDomain of IOC.analysisDomain
| `MethodDomain of Method.analysisDomain
| `PPDomain of PP.analysisDomain ]
* Data for all analyses for one particular variable ( slot ) .
type abData =
[ `Field of Field.t
| `Global of Global.t
| `IOC of IOC.t
| `Method of Method.t
| `PP of PP.t ]
type t
exception DebugSt of t
val bot : (int*int*int*int*int) -> t
val pprint : Format.formatter -> t -> unit
val get_pinfo :
'a JProgram.program -> t -> JPrintHtml.info -> JPrintHtml.info
val join_ad :
?do_join:bool -> ?modifies:bool ref -> abData -> analysisDomain -> abData
* [ join ] must only be used for initialization of State and not during
constraint resolution .
constraint resolution.*)
val join : ?do_join:bool -> ?modifies:bool ref -> t -> Var.t -> analysisDomain -> unit
* { 2 Accessing data content }
val get : t -> Var.t -> abData
val get_global : t -> Var.var_global -> Global.t
val get_IOC : t -> Var.var_ioc -> IOC.t
val get_field : t -> Var.var_field -> Field.t
val get_method : t -> Var.var_method -> Method.t
val get_PP : t -> Var.var_pp -> PP.t
val get_ab_global : abData -> Global.t
val get_ab_field : abData -> Field.t
val get_ab_method : abData -> Method.t
val get_ab_IOC : abData -> IOC.t
val get_ab_pp : abData -> PP.t
* { 2 Modifying final results }
* { b Warning : State MUST not be modified manually during constraint
resolution . The following functions MUST only be used on the final result
of State } !
resolution. The following functions MUST only be used on the final result
of State}! *)
val iter_global : t -> (t -> Var.var_global -> abData -> unit)
-> unit
val iter_IOC : t -> (t -> Var.var_ioc -> abData-> unit) -> unit
val iter_field : t -> (t -> Var.var_field -> abData -> unit)
-> unit
val iter_method : t -> (t -> Var.var_method -> abData -> unit)
-> unit
val iter_PP : t -> (t -> Var.var_pp -> abData -> unit) -> unit
val replace : t -> Var.t -> abData -> unit
val remove : t -> Var.t -> unit
end
module Make :
functor (Var : Var.S) ->
functor (GlobalDomain : Domain.S) ->
functor (IOCDomain : Domain.S) ->
functor (FieldDomain : Domain.S) ->
functor (MethodDomain : Domain.S) ->
functor (PPDomain : Domain.S) ->
(S with module Var = Var
and module Global = GlobalDomain
and module IOC = IOCDomain
and module Field = FieldDomain
and module Method = MethodDomain
and module PP = PPDomain)
end
module Constraints : sig
module type S = sig
module State : State.S
type variable = State.Var.t
type cst = {
dependencies : variable list;
target : variable;
transferFun : State.t -> State.analysisDomain;
}
val get_dependencies : cst -> variable list
val get_target : cst -> variable
val pprint : Format.formatter -> cst -> unit
* [ apply_cst ? modifies abst cst ] applies the constraint [ cst ] on the current
[ abst ] . The result of the constraint ( given by [ cst.transferFun ] ) is
joined to the current value stored in [ abst ] . [ modifies ] is set to true
if the application of a constraint modified the state [ abst ] .
If a DebugDom exception is raised by the used domain , this function catch
it and raise a DebugSt exception containing the last state reached before
the fail . This is intended for debug .
[abst]. The result of the constraint (given by [cst.transferFun]) is
joined to the current value stored in [abst]. [modifies] is set to true
if the application of a constraint modified the state [abst].
If a DebugDom exception is raised by the used domain, this function catch
it and raise a DebugSt exception containing the last state reached before
the fail. This is intended for debug.
*)
val apply_cst : ?do_join:bool -> ?modifies:bool ref -> State.t -> cst -> unit
end
module Make : functor (State : State.S) ->
(S with module State = State)
end
module Solver : sig
module Make : functor (Constraints : Constraints.S) -> sig
val debug_level : int ref
* [ solve_constraints ~optimize_join prog csts state init ] computes the
fixpoint of the constraints [ csts ] , starting from the initial state
[ state ] by applying the constraints that depends on nothing or on initial
variables [ init ] . If [ ] is true , then it tries to avoid
joining useless values , at the cost of some additional computations .
fixpoint of the constraints [csts], starting from the initial state
[state] by applying the constraints that depends on nothing or on initial
variables [init]. If [optimize_join] is true, then it tries to avoid
joining useless values, at the cost of some additional computations. *)
val solve_constraints :
?optimize_join:bool ->
'a ->
Constraints.cst list ->
Constraints.State.t ->
Constraints.State.Var.t list -> Constraints.State.t
end
end
|
578ad8fd71fa2e58e79d70041d7a542dad2189f2d7aa980790ba5efe24b98fb2 | art-w/mcavl | mcavl.ml | module type Ordered = S.Ordered
module Set = Mcset.Make
module Map = Mcmap.Make
| null | https://raw.githubusercontent.com/art-w/mcavl/bf7d3414dbb805a61ebbe809cbb3f2a7ec599c42/src/mcavl.ml | ocaml | module type Ordered = S.Ordered
module Set = Mcset.Make
module Map = Mcmap.Make
|
|
72d46313029485e53dad2b56053e252f2849b1b49d475d7a1cb4d35241c8224f | zoomhub/zoomhub | Instances.hs | {-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - orphans #
module Network.HTTP.Client.Instances where
import Data.Aeson (ToJSON, Value (String), object, toJSON, (.=))
import qualified Data.ByteString as BS
import Data.CaseInsensitive (original)
import qualified Data.HashMap.Strict as HM
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Network.HTTP.Client
( HttpException (HttpExceptionRequest, InvalidUrlException),
HttpExceptionContent (ConnectionFailure, StatusCodeException),
host,
method,
path,
port,
queryString,
responseHeaders,
responseStatus,
)
import Network.HTTP.Types (Header, ResponseHeaders)
import Network.HTTP.Types.Status (statusCode)
-- Number of bytes we store for responses with exceptions:
maxBodyBytes :: Int
maxBodyBytes = 256
instance ToJSON HttpException where
toJSON (InvalidUrlException url reason) =
object
[ "type" .= ("InvalidUrlException" :: Text),
"url" .= url,
"reason" .= reason
]
toJSON (HttpExceptionRequest r (ConnectionFailure e)) =
object
[ "type" .= ("ConnectionFailure" :: Text),
"host" .= lenientDecodeUtf8 (host r),
"method" .= show (method r),
"port" .= port r,
"path" .= lenientDecodeUtf8 (path r),
"query" .= lenientDecodeUtf8 (queryString r),
"exception" .= toJSONString (show e)
]
toJSON (HttpExceptionRequest _ (StatusCodeException res _)) =
object
[ "type" .= ("StatusCodeException" :: Text),
"status" .= statusCode (responseStatus res),
"headers" .= headersToJSON (responseHeaders res)
]
toJSON e = String . T.pack . show $ e
toJSONString :: String -> Value
toJSONString = String . T.pack
Duplicated from ` RequestLogger ` :
toObject :: ToJSON a => [(Text, a)] -> Value
toObject = toJSON . HM.fromList
headersToJSON :: ResponseHeaders -> Value
headersToJSON = toObject . map headerToJSON'
where
headerToJSON' ("Cookie", _) = ("Cookie" :: Text, "<redacted>" :: Text)
headerToJSON' ("X-Response-Body-Start", v) =
( "X-Response-Body-Start" :: Text,
lenientDecodeUtf8 $ BS.take maxBodyBytes v
)
headerToJSON' hd = headerToJSON hd
headerToJSON :: Header -> (Text, Text)
headerToJSON (headerName, header) =
(lenientDecodeUtf8 . original $ headerName, lenientDecodeUtf8 header)
lenientDecodeUtf8 :: BS.ByteString -> Text
lenientDecodeUtf8 = decodeUtf8With lenientDecode
| null | https://raw.githubusercontent.com/zoomhub/zoomhub/eb51b6532be53818573237710b29639be6103eb3/src/Network/HTTP/Client/Instances.hs | haskell | # LANGUAGE OverloadedStrings #
Number of bytes we store for responses with exceptions: | # OPTIONS_GHC -fno - warn - orphans #
module Network.HTTP.Client.Instances where
import Data.Aeson (ToJSON, Value (String), object, toJSON, (.=))
import qualified Data.ByteString as BS
import Data.CaseInsensitive (original)
import qualified Data.HashMap.Strict as HM
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Network.HTTP.Client
( HttpException (HttpExceptionRequest, InvalidUrlException),
HttpExceptionContent (ConnectionFailure, StatusCodeException),
host,
method,
path,
port,
queryString,
responseHeaders,
responseStatus,
)
import Network.HTTP.Types (Header, ResponseHeaders)
import Network.HTTP.Types.Status (statusCode)
maxBodyBytes :: Int
maxBodyBytes = 256
instance ToJSON HttpException where
toJSON (InvalidUrlException url reason) =
object
[ "type" .= ("InvalidUrlException" :: Text),
"url" .= url,
"reason" .= reason
]
toJSON (HttpExceptionRequest r (ConnectionFailure e)) =
object
[ "type" .= ("ConnectionFailure" :: Text),
"host" .= lenientDecodeUtf8 (host r),
"method" .= show (method r),
"port" .= port r,
"path" .= lenientDecodeUtf8 (path r),
"query" .= lenientDecodeUtf8 (queryString r),
"exception" .= toJSONString (show e)
]
toJSON (HttpExceptionRequest _ (StatusCodeException res _)) =
object
[ "type" .= ("StatusCodeException" :: Text),
"status" .= statusCode (responseStatus res),
"headers" .= headersToJSON (responseHeaders res)
]
toJSON e = String . T.pack . show $ e
toJSONString :: String -> Value
toJSONString = String . T.pack
Duplicated from ` RequestLogger ` :
toObject :: ToJSON a => [(Text, a)] -> Value
toObject = toJSON . HM.fromList
headersToJSON :: ResponseHeaders -> Value
headersToJSON = toObject . map headerToJSON'
where
headerToJSON' ("Cookie", _) = ("Cookie" :: Text, "<redacted>" :: Text)
headerToJSON' ("X-Response-Body-Start", v) =
( "X-Response-Body-Start" :: Text,
lenientDecodeUtf8 $ BS.take maxBodyBytes v
)
headerToJSON' hd = headerToJSON hd
headerToJSON :: Header -> (Text, Text)
headerToJSON (headerName, header) =
(lenientDecodeUtf8 . original $ headerName, lenientDecodeUtf8 header)
lenientDecodeUtf8 :: BS.ByteString -> Text
lenientDecodeUtf8 = decodeUtf8With lenientDecode
|
5fce684007fb6ecd9372d54eec32e0a6cf0b968cce7daabde3a1f67cdda3d022 | Vagabond/erlang-syslog | syslog.erl | Copyright 2009 < > . All rights reserved .
%%%
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions are met:
%%%
%%% 1. Redistributions of source code must retain the above copyright notice,
%%% this list of conditions and the following disclaimer.
%%% 2. Redistributions in binary form must reproduce the above copyright
%%% notice, this list of conditions and the following disclaimer in the
%%% documentation and/or other materials provided with the distribution.
%%%
THIS SOFTWARE IS PROVIDED ` ` AS IS '' AND ANY EXPRESS OR
%%% IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
%%% MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
%%% EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
%%% LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
%%% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
%%% SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
%% @doc erlang wrapper for syslog port
-module(syslog).
-behaviour(gen_server).
-define(DRV_NAME, "syslog_drv").
this constant must match the same in syslog_drv.c
-define(SYSLOGDRV_OPEN, 1).
%% API
-export([
start/0,
start_link/0,
stop/0,
open/3,
log/3,
log/4,
close/1,
priority/1,
facility/1,
openlog_opt/1,
openlog_opts/1,
load/0,
unload/0
]).
%% gen_server callbacks
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-record(state, {}).
-type priority() :: emerg | alert | crit | err |
warning | notice | info | debug | non_neg_integer().
-type facility() :: kern | user | mail | daemon | auth | syslog |
lpr | news | uucp | cron | authpriv | ftp |
netinfo | remoteauth | install | ras |
local0 | local1 | local2 | local3 |
local4 | local5 | local6 | local7 | non_neg_integer().
-type openlog_opt() :: pid | cons | odelay | ndelay | perror | pos_integer().
-export_type([priority/0, facility/0, openlog_opt/0]).
%%% API %%%
-spec start() ->
{ok, pid()} | ignore | {error, any()}.
start() ->
gen_server:start({local, ?MODULE}, ?MODULE, [], []).
-spec start_link() ->
{ok, pid()} | ignore | {error, any()}.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec stop() ->
ok.
stop() ->
gen_server:cast(?MODULE, stop).
-spec open(Ident :: string(),
Logopt :: list(openlog_opt()),
Facility :: facility()) ->
{ok, port()} |
{error, any()}.
open(Ident, Logopt, Facility) ->
Log = erlang:open_port({spawn, ?DRV_NAME}, [binary]),
Args = term_to_binary({Ident, openlog_opts(Logopt), facility(Facility)}),
try erlang:port_control(Log, ?SYSLOGDRV_OPEN, Args) of
<<>> ->
{ok, Log};
BinError ->
binary_to_term(BinError)
catch
_:Reason ->
{error, Reason}
end.
-spec log(Log :: port(),
Priority :: priority(),
Message :: iolist()) ->
ok.
log(_Log, _Priority, []) ->
ok;
log(Log, Priority, Message) ->
NumPri = priority(Priority),
encode the priority value as a 4 - byte integer in network order , and
add a 0 byte to the end of the command data to act as a NUL character
true = erlang:port_command(Log, [<<NumPri:32/big>>, Message, <<0:8>>]),
ok.
-spec log(Log :: port(),
Priority :: priority(),
FormatStr :: string(),
FormatArgs :: list()) ->
ok.
log(Log, Priority, FormatStr, FormatArgs) ->
log(Log, Priority, io_lib:format(FormatStr, FormatArgs)).
-spec close(Log :: port()) ->
ok.
close(Log) ->
true = erlang:port_close(Log),
ok.
-spec priority(N :: priority() | non_neg_integer()) ->
non_neg_integer().
priority(emerg) -> 0;
priority(alert) -> 1;
priority(crit) -> 2;
priority(err) -> 3;
priority(warning) -> 4;
priority(notice) -> 5;
priority(info) -> 6;
priority(debug) -> 7;
priority(N) when is_integer(N), N >= 0 -> N;
priority(_) -> erlang:error(badarg).
-spec facility(N :: facility() | non_neg_integer()) ->
non_neg_integer().
facility(kern) -> 0;
facility(user) -> 8;
facility(mail) -> 16;
facility(daemon) -> 24;
facility(auth) -> 32;
facility(syslog) -> 40;
facility(lpr) -> 48;
facility(news) -> 56;
facility(uucp) -> 64;
facility(cron) -> 72;
facility(authpriv) -> 80;
facility(ftp) -> 88;
facility(netinfo) -> 96;
facility(remoteauth)-> 104;
facility(install) -> 112;
facility(ras) -> 120;
facility(local0) -> 16 * 8;
facility(local1) -> 17 * 8;
facility(local2) -> 18 * 8;
facility(local3) -> 19 * 8;
facility(local4) -> 20 * 8;
facility(local5) -> 21 * 8;
facility(local6) -> 22 * 8;
facility(local7) -> 23 * 8;
facility(N) when is_integer(N), N >= 0 -> N;
facility(_) -> erlang:error(badarg).
-spec openlog_opt(N :: openlog_opt() | pos_integer()) ->
pos_integer().
openlog_opt(pid) -> 1;
openlog_opt(cons) -> 2;
openlog_opt(odelay) -> 4;
openlog_opt(ndelay) -> 8;
openlog_opt(perror) -> 20;
openlog_opt(N) when is_integer(N), N >= 1 -> N;
openlog_opt(_) -> erlang:error(badarg).
-spec openlog_opts(N :: list(openlog_opt() | pos_integer()) |
openlog_opt() | pos_integer()) ->
pos_integer().
openlog_opts([Queue]) -> openlog_opt(Queue);
openlog_opts([Tail|Queue]) ->
openlog_opt(Tail) bor openlog_opts(Queue);
openlog_opts([]) -> 0;
openlog_opts(N) -> openlog_opt(N).
-spec load() ->
ok | {error, string()}.
load() ->
PrivDir = case code:priv_dir(?MODULE) of
{error, bad_name} ->
EbinDir = filename:dirname(code:which(?MODULE)),
AppPath = filename:dirname(EbinDir),
filename:join(AppPath, "priv");
Path ->
Path
end,
case erl_ddll:load_driver(PrivDir, ?DRV_NAME) of
ok -> ok;
{error, already_loaded} -> ok;
{error, LoadError} ->
LoadErrorStr = erl_ddll:format_error(LoadError),
ErrStr = lists:flatten(
io_lib:format("could not load driver ~s: ~p",
[?DRV_NAME, LoadErrorStr])),
{error, ErrStr}
end.
-spec unload() ->
ok | {error, string()}.
unload() ->
case erl_ddll:unload_driver(?DRV_NAME) of
ok -> ok;
{error, UnloadError} ->
UnloadErrorStr = erl_ddll:format_error(UnloadError),
ErrStr = lists:flatten(
io_lib:format("could not unload driver ~s: ~p",
[?DRV_NAME, UnloadErrorStr])),
{error, ErrStr}
end.
%%% gen_server callbacks %%%
init([]) ->
case load() of
ok ->
{ok, #state{}};
{error, Reason} ->
{stop, Reason}
end.
handle_call(_Msg, _From, State) ->
{reply, ok, State}.
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_, State, _) ->
{ok, State}.
%%% internal functions %%%
-ifdef(TEST).
openlog_opts_test() ->
11 = openlog_opts([1,2,8]),
1 = openlog_opts(pid),
try
foo = openlog_opts(foo)
catch
error:badarg ->
ok;
Reason ->
throw(Reason)
end.
closed_test() ->
{ok, _} = syslog:start(),
try
{ok, Log} = open("test", pid, local0),
Self = self(),
{connected,Self} = erlang:port_info(Log, connected),
ok = close(Log),
try
close(Log)
catch
error:badarg ->
ok;
Reason1 ->
throw(Reason1)
end,
try
ok = log(Log, 8, "writing to closed log")
catch
error:badarg ->
ok;
Reason2 ->
throw(Reason2)
end
after
syslog:stop()
end.
-endif.
| null | https://raw.githubusercontent.com/Vagabond/erlang-syslog/2c38c70450f80b44711c7c1702171f32f881d4a6/src/syslog.erl | erlang |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@doc erlang wrapper for syslog port
API
gen_server callbacks
API %%%
gen_server callbacks %%%
internal functions %%% | Copyright 2009 < > . All rights reserved .
THIS SOFTWARE IS PROVIDED ` ` AS IS '' AND ANY EXPRESS OR
INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
-module(syslog).
-behaviour(gen_server).
-define(DRV_NAME, "syslog_drv").
this constant must match the same in syslog_drv.c
-define(SYSLOGDRV_OPEN, 1).
-export([
start/0,
start_link/0,
stop/0,
open/3,
log/3,
log/4,
close/1,
priority/1,
facility/1,
openlog_opt/1,
openlog_opts/1,
load/0,
unload/0
]).
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-record(state, {}).
-type priority() :: emerg | alert | crit | err |
warning | notice | info | debug | non_neg_integer().
-type facility() :: kern | user | mail | daemon | auth | syslog |
lpr | news | uucp | cron | authpriv | ftp |
netinfo | remoteauth | install | ras |
local0 | local1 | local2 | local3 |
local4 | local5 | local6 | local7 | non_neg_integer().
-type openlog_opt() :: pid | cons | odelay | ndelay | perror | pos_integer().
-export_type([priority/0, facility/0, openlog_opt/0]).
-spec start() ->
{ok, pid()} | ignore | {error, any()}.
start() ->
gen_server:start({local, ?MODULE}, ?MODULE, [], []).
-spec start_link() ->
{ok, pid()} | ignore | {error, any()}.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec stop() ->
ok.
stop() ->
gen_server:cast(?MODULE, stop).
-spec open(Ident :: string(),
Logopt :: list(openlog_opt()),
Facility :: facility()) ->
{ok, port()} |
{error, any()}.
open(Ident, Logopt, Facility) ->
Log = erlang:open_port({spawn, ?DRV_NAME}, [binary]),
Args = term_to_binary({Ident, openlog_opts(Logopt), facility(Facility)}),
try erlang:port_control(Log, ?SYSLOGDRV_OPEN, Args) of
<<>> ->
{ok, Log};
BinError ->
binary_to_term(BinError)
catch
_:Reason ->
{error, Reason}
end.
-spec log(Log :: port(),
Priority :: priority(),
Message :: iolist()) ->
ok.
log(_Log, _Priority, []) ->
ok;
log(Log, Priority, Message) ->
NumPri = priority(Priority),
encode the priority value as a 4 - byte integer in network order , and
add a 0 byte to the end of the command data to act as a NUL character
true = erlang:port_command(Log, [<<NumPri:32/big>>, Message, <<0:8>>]),
ok.
-spec log(Log :: port(),
Priority :: priority(),
FormatStr :: string(),
FormatArgs :: list()) ->
ok.
log(Log, Priority, FormatStr, FormatArgs) ->
log(Log, Priority, io_lib:format(FormatStr, FormatArgs)).
-spec close(Log :: port()) ->
ok.
close(Log) ->
true = erlang:port_close(Log),
ok.
-spec priority(N :: priority() | non_neg_integer()) ->
non_neg_integer().
priority(emerg) -> 0;
priority(alert) -> 1;
priority(crit) -> 2;
priority(err) -> 3;
priority(warning) -> 4;
priority(notice) -> 5;
priority(info) -> 6;
priority(debug) -> 7;
priority(N) when is_integer(N), N >= 0 -> N;
priority(_) -> erlang:error(badarg).
-spec facility(N :: facility() | non_neg_integer()) ->
non_neg_integer().
facility(kern) -> 0;
facility(user) -> 8;
facility(mail) -> 16;
facility(daemon) -> 24;
facility(auth) -> 32;
facility(syslog) -> 40;
facility(lpr) -> 48;
facility(news) -> 56;
facility(uucp) -> 64;
facility(cron) -> 72;
facility(authpriv) -> 80;
facility(ftp) -> 88;
facility(netinfo) -> 96;
facility(remoteauth)-> 104;
facility(install) -> 112;
facility(ras) -> 120;
facility(local0) -> 16 * 8;
facility(local1) -> 17 * 8;
facility(local2) -> 18 * 8;
facility(local3) -> 19 * 8;
facility(local4) -> 20 * 8;
facility(local5) -> 21 * 8;
facility(local6) -> 22 * 8;
facility(local7) -> 23 * 8;
facility(N) when is_integer(N), N >= 0 -> N;
facility(_) -> erlang:error(badarg).
-spec openlog_opt(N :: openlog_opt() | pos_integer()) ->
pos_integer().
openlog_opt(pid) -> 1;
openlog_opt(cons) -> 2;
openlog_opt(odelay) -> 4;
openlog_opt(ndelay) -> 8;
openlog_opt(perror) -> 20;
openlog_opt(N) when is_integer(N), N >= 1 -> N;
openlog_opt(_) -> erlang:error(badarg).
-spec openlog_opts(N :: list(openlog_opt() | pos_integer()) |
openlog_opt() | pos_integer()) ->
pos_integer().
openlog_opts([Queue]) -> openlog_opt(Queue);
openlog_opts([Tail|Queue]) ->
openlog_opt(Tail) bor openlog_opts(Queue);
openlog_opts([]) -> 0;
openlog_opts(N) -> openlog_opt(N).
-spec load() ->
ok | {error, string()}.
load() ->
PrivDir = case code:priv_dir(?MODULE) of
{error, bad_name} ->
EbinDir = filename:dirname(code:which(?MODULE)),
AppPath = filename:dirname(EbinDir),
filename:join(AppPath, "priv");
Path ->
Path
end,
case erl_ddll:load_driver(PrivDir, ?DRV_NAME) of
ok -> ok;
{error, already_loaded} -> ok;
{error, LoadError} ->
LoadErrorStr = erl_ddll:format_error(LoadError),
ErrStr = lists:flatten(
io_lib:format("could not load driver ~s: ~p",
[?DRV_NAME, LoadErrorStr])),
{error, ErrStr}
end.
-spec unload() ->
ok | {error, string()}.
unload() ->
case erl_ddll:unload_driver(?DRV_NAME) of
ok -> ok;
{error, UnloadError} ->
UnloadErrorStr = erl_ddll:format_error(UnloadError),
ErrStr = lists:flatten(
io_lib:format("could not unload driver ~s: ~p",
[?DRV_NAME, UnloadErrorStr])),
{error, ErrStr}
end.
init([]) ->
case load() of
ok ->
{ok, #state{}};
{error, Reason} ->
{stop, Reason}
end.
handle_call(_Msg, _From, State) ->
{reply, ok, State}.
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_, State, _) ->
{ok, State}.
-ifdef(TEST).
openlog_opts_test() ->
11 = openlog_opts([1,2,8]),
1 = openlog_opts(pid),
try
foo = openlog_opts(foo)
catch
error:badarg ->
ok;
Reason ->
throw(Reason)
end.
closed_test() ->
{ok, _} = syslog:start(),
try
{ok, Log} = open("test", pid, local0),
Self = self(),
{connected,Self} = erlang:port_info(Log, connected),
ok = close(Log),
try
close(Log)
catch
error:badarg ->
ok;
Reason1 ->
throw(Reason1)
end,
try
ok = log(Log, 8, "writing to closed log")
catch
error:badarg ->
ok;
Reason2 ->
throw(Reason2)
end
after
syslog:stop()
end.
-endif.
|
a9ce35628b87571c8488d95a73cb877f72e12f162498c38aac9d0a356b6a361c | takikawa/racket-ppa | info.rkt | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote ("base" "plot-lib" "math-lib" ("gui-lib" #:version "1.18") "snip-lib" "typed-racket-lib" "typed-racket-more"))) (define build-deps (quote ())) (define pkg-desc "Plot GUI interface") (define pkg-authors (quote (ntoronto))) (define version "1.1") (define test-responsibles (quote ((all )))) (define license (quote (Apache-2.0 OR MIT)))))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/26d6ae74a1b19258c9789b7c14c074d867a4b56b/share/pkgs/plot-gui-lib/info.rkt | racket | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote ("base" "plot-lib" "math-lib" ("gui-lib" #:version "1.18") "snip-lib" "typed-racket-lib" "typed-racket-more"))) (define build-deps (quote ())) (define pkg-desc "Plot GUI interface") (define pkg-authors (quote (ntoronto))) (define version "1.1") (define test-responsibles (quote ((all )))) (define license (quote (Apache-2.0 OR MIT)))))
|
|
b08aba2d7020d7a84feb2d8afc536af6bd4561a8f5d1a3839f20157a9e7eaac1 | quil-lang/quilc | python-tests.lisp | ;;;; python-tests.lisp
;;;;
Author :
(in-package #:libquilc-tests)
(deftest test-python-compile-quil ()
"Test compiling Quil from Python."
(uiop:with-current-directory ("lib/")
(let* ((input-source "H 0")
(parsed-program (cl-quil:safely-parse-quil input-source))
(chip-spec (cl-quil::build-nq-linear-chip 8))
(processed-program (cl-quil:compiler-hook parsed-program chip-spec))
(expected-output (quilc::print-program processed-program nil)))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/compile_quil.py")
:env '((:PYTHONPATH . "."))
:input `(,input-source)
:output :string)
(declare (ignore error-output exit-code))
(is (string= output expected-output))))))
(deftest test-python-compile-protoquil ()
"Test compiling ProtoQuil from Python."
(uiop:with-current-directory ("lib/")
(let* ((input-source "DECLARE ro BIT; H 0; MEASURE 0 ro")
(parsed-program (cl-quil:safely-parse-quil input-source))
(chip-spec (cl-quil::build-nq-linear-chip 8))
(processed-program (cl-quil:compiler-hook parsed-program chip-spec :protoquil t))
(expected-output (quilc::print-program processed-program nil)))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/compile_protoquil.py")
:env '((:PYTHONPATH . "."))
:input `(,input-source)
:output :string)
(declare (ignore error-output exit-code))
(is (string= output expected-output))))))
(deftest test-python-compile-protoquil-bad-program ()
"Test compiling an invalid ProtoQuil program from Python. Should throw an error."
(uiop:with-current-directory ("lib/")
(let* ((input-source "DECLARE ro BIT; MEASURE 0 ro; H 0"))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/compile_protoquil.py")
:ignore-error-status t
:env '((:PYTHONPATH . "."))
:input `(,input-source)
:output :string)
(declare (ignore error-output))
(is (eql exit-code 1)
(string= output "unable to compile program"))))))
(deftest test-python-print-chip-spec ()
"Test printing chip specifications from Python."
(uiop:with-current-directory ("lib/")
(let ((chip-spec1 (cl-quil::build-nq-linear-chip 8))
(chip-spec2 (quilc::lookup-isa-descriptor-for-name "8Q")))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/print_chip_spec.py")
:env '((:PYTHONPATH . "."))
:output :string)
(declare (ignore error-output exit-code))
(is (string= output (concatenate 'string
(cl-quil::debug-print-chip-spec chip-spec1 nil)
(cl-quil::debug-print-chip-spec chip-spec2 nil))))))))
| null | https://raw.githubusercontent.com/quil-lang/quilc/5f70950681008fd0dc345d574b8d293c5a638d5d/lib/tests/python-tests.lisp | lisp | python-tests.lisp
| Author :
(in-package #:libquilc-tests)
(deftest test-python-compile-quil ()
"Test compiling Quil from Python."
(uiop:with-current-directory ("lib/")
(let* ((input-source "H 0")
(parsed-program (cl-quil:safely-parse-quil input-source))
(chip-spec (cl-quil::build-nq-linear-chip 8))
(processed-program (cl-quil:compiler-hook parsed-program chip-spec))
(expected-output (quilc::print-program processed-program nil)))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/compile_quil.py")
:env '((:PYTHONPATH . "."))
:input `(,input-source)
:output :string)
(declare (ignore error-output exit-code))
(is (string= output expected-output))))))
(deftest test-python-compile-protoquil ()
"Test compiling ProtoQuil from Python."
(uiop:with-current-directory ("lib/")
(let* ((input-source "DECLARE ro BIT; H 0; MEASURE 0 ro")
(parsed-program (cl-quil:safely-parse-quil input-source))
(chip-spec (cl-quil::build-nq-linear-chip 8))
(processed-program (cl-quil:compiler-hook parsed-program chip-spec :protoquil t))
(expected-output (quilc::print-program processed-program nil)))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/compile_protoquil.py")
:env '((:PYTHONPATH . "."))
:input `(,input-source)
:output :string)
(declare (ignore error-output exit-code))
(is (string= output expected-output))))))
(deftest test-python-compile-protoquil-bad-program ()
"Test compiling an invalid ProtoQuil program from Python. Should throw an error."
(uiop:with-current-directory ("lib/")
(let* ((input-source "DECLARE ro BIT; MEASURE 0 ro; H 0"))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/compile_protoquil.py")
:ignore-error-status t
:env '((:PYTHONPATH . "."))
:input `(,input-source)
:output :string)
(declare (ignore error-output))
(is (eql exit-code 1)
(string= output "unable to compile program"))))))
(deftest test-python-print-chip-spec ()
"Test printing chip specifications from Python."
(uiop:with-current-directory ("lib/")
(let ((chip-spec1 (cl-quil::build-nq-linear-chip 8))
(chip-spec2 (quilc::lookup-isa-descriptor-for-name "8Q")))
(multiple-value-bind (output error-output exit-code)
(uiop:run-program '("python3" "tests/python/print_chip_spec.py")
:env '((:PYTHONPATH . "."))
:output :string)
(declare (ignore error-output exit-code))
(is (string= output (concatenate 'string
(cl-quil::debug-print-chip-spec chip-spec1 nil)
(cl-quil::debug-print-chip-spec chip-spec2 nil))))))))
|
1efbc7d4e7fe2411cda8ca2e3a0edbb8f6e25838b8a567e15952e2f7145be2de | tolysz/prepare-ghcjs | Data.hs | # LANGUAGE Trustworthy #
# LANGUAGE RankNTypes , ScopedTypeVariables , PolyKinds , StandaloneDeriving ,
TypeOperators , GADTs , FlexibleInstances #
TypeOperators, GADTs, FlexibleInstances #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Data
Copyright : ( c ) The University of Glasgow , CWI 2001 - -2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : experimental
-- Portability : non-portable (local universal quantification)
--
\"Scrap your boilerplate\ " --- Generic programming in Haskell . See
-- <#Scrap_your_boilerplate.21>.
-- This module provides the 'Data' class with its primitives for
-- generic programming, along with instances for many datatypes. It
corresponds to a merge between the previous " Data . Generics . Basics "
and almost all of " Data . Generics . Instances " . The instances that are
-- not present in this module were moved to the
@Data . Generics . Instances@ module in the @syb@ package .
--
-- For more information, please visit the new
-- SYB wiki: <>.
--
-----------------------------------------------------------------------------
module Data.Data (
* Module Data . re - exported for convenience
module Data.Typeable,
-- * The Data class for processing constructor applications
Data(
gfoldl,
gunfold,
toConstr,
dataTypeOf,
dataCast1, -- mediate types and unary type constructors
dataCast2, -- mediate types and binary type constructors
-- Generic maps defined in terms of gfoldl
gmapT,
gmapQ,
gmapQl,
gmapQr,
gmapQi,
gmapM,
gmapMp,
gmapMo
),
* Datatype representations
DataType, -- abstract
-- ** Constructors
mkDataType,
mkIntType,
mkFloatType,
mkCharType,
mkNoRepType,
-- ** Observers
dataTypeName,
DataRep(..),
dataTypeRep,
-- ** Convenience functions
repConstr,
isAlgType,
dataTypeConstrs,
indexConstr,
maxConstrIndex,
isNorepType,
-- * Data constructor representations
Constr, -- abstract
alias for , start at 1
Fixity(..),
-- ** Constructors
mkConstr,
mkIntegralConstr,
mkRealConstr,
mkCharConstr,
-- ** Observers
constrType,
ConstrRep(..),
constrRep,
constrFields,
constrFixity,
-- ** Convenience function: algebraic data types
constrIndex,
-- ** From strings to constructors and vice versa: all data types
showConstr,
readConstr,
-- * Convenience functions: take type constructors apart
tyconUQname,
tyconModule,
-- * Generic operations defined in terms of 'gunfold'
fromConstr,
fromConstrB,
fromConstrM
) where
------------------------------------------------------------------------------
import Data.Either
import Data.Eq
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Typeable
import Data.Version( Version(..) )
import GHC.Base hiding (Any, IntRep, FloatRep)
import GHC.List
import GHC.Num
import GHC.Read
import GHC.Show
import Text.Read( reads )
-- Imports for the instances
import Data.Int -- So we can give Data instance for Int8, ...
import Data.Type.Coercion
So we can give Data instance for , ...
import GHC.Real -- So we can give Data instance for Ratio
import GHC.IOBase -- So we can give Data instance for IO , Handle
import GHC.Ptr -- So we can give Data instance for Ptr
import GHC.ForeignPtr -- So we can give Data instance for ForeignPtr
import GHC.Stable -- So we can give Data instance for StablePtr
--import GHC.ST -- So we can give Data instance for ST
import GHC.Conc -- So we can give Data instance for MVar & Co.
import GHC.Arr -- So we can give Data instance for Array
import qualified GHC.Generics as Generics (Fixity(..))
import GHC.Generics hiding (Fixity(..))
So we can give Data instance for U1 , V1 , ...
------------------------------------------------------------------------------
--
-- The Data class
--
------------------------------------------------------------------------------
|
The ' Data ' class comprehends a fundamental primitive ' gfoldl ' for
folding over constructor applications , say terms . This primitive can
be instantiated in several ways to map over the immediate subterms
of a term ; see the @gmap@ combinators later in this class . Indeed , a
generic programmer does not necessarily need to use the ingenious gfoldl
primitive but rather the intuitive @gmap@ combinators . The ' gfoldl '
primitive is completed by means to query top - level constructors , to
turn constructor representations into proper terms , and to list all
possible datatype constructors . This completion allows us to serve
generic programming scenarios like read , show , equality , term generation .
The combinators ' gmapT ' , ' gmapQ ' , ' gmapM ' , etc are all provided with
default definitions in terms of ' gfoldl ' , leaving open the opportunity
to provide datatype - specific definitions .
( The inclusion of the @gmap@ combinators as members of class ' Data '
allows the programmer or the compiler to derive specialised , and maybe
more efficient code per datatype . /Note/ : ' gfoldl ' is more higher - order
than the @gmap@ combinators . This is subject to ongoing benchmarking
experiments . It might turn out that the @gmap@ combinators will be
moved out of the class ' Data ' . )
Conceptually , the definition of the @gmap@ combinators in terms of the
primitive ' gfoldl ' requires the identification of the ' gfoldl ' function
arguments . Technically , we also need to identify the type constructor
@c@ for the construction of the result type from the folded term type .
In the definition of @gmapQ@/x/ combinators , we use phantom type
constructors for the @c@ in the type of ' gfoldl ' because the result type
of a query does not involve the ( polymorphic ) type of the term argument .
In the definition of ' gmapQl ' we simply use the plain constant type
constructor because ' gfoldl ' is left - associative anyway and so it is
readily suited to fold a left - associative binary operation over the
immediate subterms . In the definition of gmapQr , extra effort is
needed . We use a higher - order accumulation trick to mediate between
left - associative constructor application vs. right - associative binary
operation ( e.g. , @(:)@ ) . When the query is meant to compute a value
of type @r@ , then the result type withing generic folding is @r - > r@.
So the result of folding is a function to which we finally pass the
right unit .
With the @-XDeriveDataTypeable@ option , GHC can generate instances of the
' Data ' class automatically . For example , given the declaration
> data T a b = C1 a b | C2 deriving ( Typeable , Data )
GHC will generate an instance that is equivalent to
> instance ( Data a , Data b ) = > Data ( T a b ) where
> gfoldl k z ( C1 a b ) = z C1 ` k ` a ` k ` b
> gfoldl k z C2 = z C2
>
> gunfold k z c = case constrIndex c of
> 1 - > k ( k ( z C1 ) )
> 2 - > z C2
>
> toConstr ( C1 _ _ ) = con_C1
> toConstr C2 = con_C2
>
> _ = ty_T
>
> con_C1 = mkConstr ty_T " C1 " [ ] Prefix
> con_C2 = mkConstr ty_T " C2 " [ ] Prefix
> ty_T = mkDataType " Module . T " [ con_C1 , ]
This is suitable for datatypes that are exported transparently .
The 'Data' class comprehends a fundamental primitive 'gfoldl' for
folding over constructor applications, say terms. This primitive can
be instantiated in several ways to map over the immediate subterms
of a term; see the @gmap@ combinators later in this class. Indeed, a
generic programmer does not necessarily need to use the ingenious gfoldl
primitive but rather the intuitive @gmap@ combinators. The 'gfoldl'
primitive is completed by means to query top-level constructors, to
turn constructor representations into proper terms, and to list all
possible datatype constructors. This completion allows us to serve
generic programming scenarios like read, show, equality, term generation.
The combinators 'gmapT', 'gmapQ', 'gmapM', etc are all provided with
default definitions in terms of 'gfoldl', leaving open the opportunity
to provide datatype-specific definitions.
(The inclusion of the @gmap@ combinators as members of class 'Data'
allows the programmer or the compiler to derive specialised, and maybe
more efficient code per datatype. /Note/: 'gfoldl' is more higher-order
than the @gmap@ combinators. This is subject to ongoing benchmarking
experiments. It might turn out that the @gmap@ combinators will be
moved out of the class 'Data'.)
Conceptually, the definition of the @gmap@ combinators in terms of the
primitive 'gfoldl' requires the identification of the 'gfoldl' function
arguments. Technically, we also need to identify the type constructor
@c@ for the construction of the result type from the folded term type.
In the definition of @gmapQ@/x/ combinators, we use phantom type
constructors for the @c@ in the type of 'gfoldl' because the result type
of a query does not involve the (polymorphic) type of the term argument.
In the definition of 'gmapQl' we simply use the plain constant type
constructor because 'gfoldl' is left-associative anyway and so it is
readily suited to fold a left-associative binary operation over the
immediate subterms. In the definition of gmapQr, extra effort is
needed. We use a higher-order accumulation trick to mediate between
left-associative constructor application vs. right-associative binary
operation (e.g., @(:)@). When the query is meant to compute a value
of type @r@, then the result type withing generic folding is @r -> r@.
So the result of folding is a function to which we finally pass the
right unit.
With the @-XDeriveDataTypeable@ option, GHC can generate instances of the
'Data' class automatically. For example, given the declaration
> data T a b = C1 a b | C2 deriving (Typeable, Data)
GHC will generate an instance that is equivalent to
> instance (Data a, Data b) => Data (T a b) where
> gfoldl k z (C1 a b) = z C1 `k` a `k` b
> gfoldl k z C2 = z C2
>
> gunfold k z c = case constrIndex c of
> 1 -> k (k (z C1))
> 2 -> z C2
>
> toConstr (C1 _ _) = con_C1
> toConstr C2 = con_C2
>
> dataTypeOf _ = ty_T
>
> con_C1 = mkConstr ty_T "C1" [] Prefix
> con_C2 = mkConstr ty_T "C2" [] Prefix
> ty_T = mkDataType "Module.T" [con_C1, con_C2]
This is suitable for datatypes that are exported transparently.
-}
class Typeable a => Data a where
-- | Left-associative fold operation for constructor applications.
--
-- The type of 'gfoldl' is a headache, but operationally it is a simple
-- generalisation of a list fold.
--
-- The default definition for 'gfoldl' is @'const' 'id'@, which is
-- suitable for abstract datatypes with no substructures.
gfoldl :: (forall d b. Data d => c (d -> b) -> d -> c b)
-- ^ defines how nonempty constructor applications are
-- folded. It takes the folded tail of the constructor
application and its head , i.e. , an immediate subterm ,
-- and combines them in some way.
-> (forall g. g -> c g)
-- ^ defines how the empty constructor application is
folded , like the neutral \/ start element for list
-- folding.
-> a
-- ^ structure to be folded.
-> c a
-- ^ result, with a type defined in terms of @a@, but
-- variability is achieved by means of type constructor
-- @c@ for the construction of the actual result type.
-- See the 'Data' instances in this file for an illustration of 'gfoldl'.
gfoldl _ z = z
-- | Unfolding constructor applications
gunfold :: (forall b r. Data b => c (b -> r) -> c r)
-> (forall r. r -> c r)
-> Constr
-> c a
-- | Obtaining the constructor from a given datum.
-- For proper terms, this is meant to be the top-level constructor.
-- Primitive datatypes are here viewed as potentially infinite sets of
-- values (i.e., constructors).
toConstr :: a -> Constr
-- | The outer type constructor of the type
dataTypeOf :: a -> DataType
------------------------------------------------------------------------------
--
-- Mediate types and type constructors
--
------------------------------------------------------------------------------
-- | Mediate types and unary type constructors.
-- In 'Data' instances of the form @T a@, 'dataCast1' should be defined
-- as 'gcast1'.
--
-- The default definition is @'const' 'Nothing'@, which is appropriate
-- for non-unary type constructors.
dataCast1 :: Typeable t
=> (forall d. Data d => c (t d))
-> Maybe (c a)
dataCast1 _ = Nothing
-- | Mediate types and binary type constructors.
-- In 'Data' instances of the form @T a b@, 'dataCast2' should be
-- defined as 'gcast2'.
--
-- The default definition is @'const' 'Nothing'@, which is appropriate
-- for non-binary type constructors.
dataCast2 :: Typeable t
=> (forall d e. (Data d, Data e) => c (t d e))
-> Maybe (c a)
dataCast2 _ = Nothing
------------------------------------------------------------------------------
--
-- Typical generic maps defined in terms of gfoldl
--
------------------------------------------------------------------------------
-- | A generic transformation that maps over the immediate subterms
--
-- The default definition instantiates the type constructor @c@ in the
-- type of 'gfoldl' to an identity datatype constructor, using the
-- isomorphism pair as injection and projection.
gmapT :: (forall b. Data b => b -> b) -> a -> a
-- Use an identity datatype constructor ID (see below)
-- to instantiate the type constructor c in the type of gfoldl,
-- and perform injections ID and projections unID accordingly.
--
gmapT f x0 = unID (gfoldl k ID x0)
where
k :: Data d => ID (d->b) -> d -> ID b
k (ID c) x = ID (c (f x))
-- | A generic query with a left-associative binary operator
gmapQl :: forall r r'. (r -> r' -> r) -> r -> (forall d. Data d => d -> r') -> a -> r
gmapQl o r f = unCONST . gfoldl k z
where
k :: Data d => CONST r (d->b) -> d -> CONST r b
k c x = CONST $ (unCONST c) `o` f x
z :: g -> CONST r g
z _ = CONST r
-- | A generic query with a right-associative binary operator
gmapQr :: forall r r'. (r' -> r -> r) -> r -> (forall d. Data d => d -> r') -> a -> r
gmapQr o r0 f x0 = unQr (gfoldl k (const (Qr id)) x0) r0
where
k :: Data d => Qr r (d->b) -> d -> Qr r b
k (Qr c) x = Qr (\r -> c (f x `o` r))
-- | A generic query that processes the immediate subterms and returns a list
-- of results. The list is given in the same order as originally specified
-- in the declaration of the data constructors.
gmapQ :: (forall d. Data d => d -> u) -> a -> [u]
gmapQ f = gmapQr (:) [] f
| A generic query that processes one child by index ( zero - based )
gmapQi :: forall u. Int -> (forall d. Data d => d -> u) -> a -> u
gmapQi i f x = case gfoldl k z x of { Qi _ q -> fromJust q }
where
k :: Data d => Qi u (d -> b) -> d -> Qi u b
k (Qi i' q) a = Qi (i'+1) (if i==i' then Just (f a) else q)
z :: g -> Qi q g
z _ = Qi 0 Nothing
-- | A generic monadic transformation that maps over the immediate subterms
--
The default definition instantiates the type constructor @c@ in
-- the type of 'gfoldl' to the monad datatype constructor, defining
-- injection and projection using 'return' and '>>='.
gmapM :: forall m. Monad m => (forall d. Data d => d -> m d) -> a -> m a
-- Use immediately the monad datatype constructor
-- to instantiate the type constructor c in the type of gfoldl,
-- so injection and projection is done by return and >>=.
--
gmapM f = gfoldl k return
where
k :: Data d => m (d -> b) -> d -> m b
k c x = do c' <- c
x' <- f x
return (c' x')
| Transformation of at least one immediate subterm does not fail
gmapMp :: forall m. MonadPlus m => (forall d. Data d => d -> m d) -> a -> m a
The type constructor that we use here simply keeps track of the fact
if we already succeeded for an immediate subterm ; see Mp below . To
this end , we couple the monadic computation with a Boolean .
The type constructor that we use here simply keeps track of the fact
if we already succeeded for an immediate subterm; see Mp below. To
this end, we couple the monadic computation with a Boolean.
-}
gmapMp f x = unMp (gfoldl k z x) >>= \(x',b) ->
if b then return x' else mzero
where
z :: g -> Mp m g
z g = Mp (return (g,False))
k :: Data d => Mp m (d -> b) -> d -> Mp m b
k (Mp c) y
= Mp ( c >>= \(h, b) ->
(f y >>= \y' -> return (h y', True))
`mplus` return (h y, b)
)
| Transformation of one immediate subterm with success
gmapMo :: forall m. MonadPlus m => (forall d. Data d => d -> m d) -> a -> m a
We use the same pairing trick as for gmapMp ,
i.e. , we use an extra Bool component to keep track of the
fact whether an immediate subterm was processed successfully .
However , we cut of mapping over subterms once a first subterm
was transformed successfully .
We use the same pairing trick as for gmapMp,
i.e., we use an extra Bool component to keep track of the
fact whether an immediate subterm was processed successfully.
However, we cut of mapping over subterms once a first subterm
was transformed successfully.
-}
gmapMo f x = unMp (gfoldl k z x) >>= \(x',b) ->
if b then return x' else mzero
where
z :: g -> Mp m g
z g = Mp (return (g,False))
k :: Data d => Mp m (d -> b) -> d -> Mp m b
k (Mp c) y
= Mp ( c >>= \(h,b) -> if b
then return (h y, b)
else (f y >>= \y' -> return (h y',True))
`mplus` return (h y, b)
)
-- | The identity type constructor needed for the definition of gmapT
newtype ID x = ID { unID :: x }
-- | The constant type constructor needed for the definition of gmapQl
newtype CONST c a = CONST { unCONST :: c }
-- | Type constructor for adding counters to queries
data Qi q a = Qi Int (Maybe q)
-- | The type constructor used in definition of gmapQr
newtype Qr r a = Qr { unQr :: r -> r }
| The type constructor used in definition of gmapMp
newtype Mp m x = Mp { unMp :: m (x, Bool) }
------------------------------------------------------------------------------
--
-- Generic unfolding
--
------------------------------------------------------------------------------
-- | Build a term skeleton
fromConstr :: Data a => Constr -> a
fromConstr = fromConstrB (errorWithoutStackTrace "Data.Data.fromConstr")
-- | Build a term and use a generic function for subterms
fromConstrB :: Data a
=> (forall d. Data d => d)
-> Constr
-> a
fromConstrB f = unID . gunfold k z
where
k :: forall b r. Data b => ID (b -> r) -> ID r
k c = ID (unID c f)
z :: forall r. r -> ID r
z = ID
| Monadic variation on ' fromConstrB '
fromConstrM :: forall m a. (Monad m, Data a)
=> (forall d. Data d => m d)
-> Constr
-> m a
fromConstrM f = gunfold k z
where
k :: forall b r. Data b => m (b -> r) -> m r
k c = do { c' <- c; b <- f; return (c' b) }
z :: forall r. r -> m r
z = return
------------------------------------------------------------------------------
--
Datatype and constructor representations
--
------------------------------------------------------------------------------
--
-- | Representation of datatypes.
-- A package of constructor representations with names of type and module.
--
data DataType = DataType
{ tycon :: String
, datarep :: DataRep
}
deriving Show
-- | Representation of constructors. Note that equality on constructors
-- with different types may not work -- i.e. the constructors for 'False' and
-- 'Nothing' may compare equal.
data Constr = Constr
{ conrep :: ConstrRep
, constring :: String
for AlgRep only
for AlgRep only
, datatype :: DataType
}
instance Show Constr where
show = constring
-- | Equality of constructors
instance Eq Constr where
c == c' = constrRep c == constrRep c'
-- | Public representation of datatypes
data DataRep = AlgRep [Constr]
| IntRep
| FloatRep
| CharRep
| NoRep
deriving (Eq,Show)
-- The list of constructors could be an array, a balanced tree, or others.
-- | Public representation of constructors
data ConstrRep = AlgConstr ConIndex
| IntConstr Integer
| FloatConstr Rational
| CharConstr Char
deriving (Eq,Show)
-- | Unique index for datatype constructors,
counting from 1 in the order they are given in the program text .
type ConIndex = Int
-- | Fixity of constructors
data Fixity = Prefix
| Infix -- Later: add associativity and precedence
deriving (Eq,Show)
------------------------------------------------------------------------------
--
-- Observers for datatype representations
--
------------------------------------------------------------------------------
-- | Gets the type constructor including the module
dataTypeName :: DataType -> String
dataTypeName = tycon
-- | Gets the public presentation of a datatype
dataTypeRep :: DataType -> DataRep
dataTypeRep = datarep
-- | Gets the datatype of a constructor
constrType :: Constr -> DataType
constrType = datatype
-- | Gets the public presentation of constructors
constrRep :: Constr -> ConstrRep
constrRep = conrep
-- | Look up a constructor by its representation
repConstr :: DataType -> ConstrRep -> Constr
repConstr dt cr =
case (dataTypeRep dt, cr) of
(AlgRep cs, AlgConstr i) -> cs !! (i-1)
(IntRep, IntConstr i) -> mkIntegralConstr dt i
(FloatRep, FloatConstr f) -> mkRealConstr dt f
(CharRep, CharConstr c) -> mkCharConstr dt c
_ -> errorWithoutStackTrace "Data.Data.repConstr: The given ConstrRep does not fit to the given DataType."
------------------------------------------------------------------------------
--
-- Representations of algebraic data types
--
------------------------------------------------------------------------------
-- | Constructs an algebraic datatype
mkDataType :: String -> [Constr] -> DataType
mkDataType str cs = DataType
{ tycon = str
, datarep = AlgRep cs
}
-- | Constructs a constructor
mkConstr :: DataType -> String -> [String] -> Fixity -> Constr
mkConstr dt str fields fix =
Constr
{ conrep = AlgConstr idx
, constring = str
, confields = fields
, confixity = fix
, datatype = dt
}
where
idx = head [ i | (c,i) <- dataTypeConstrs dt `zip` [1..],
showConstr c == str ]
-- | Gets the constructors of an algebraic datatype
dataTypeConstrs :: DataType -> [Constr]
dataTypeConstrs dt = case datarep dt of
(AlgRep cons) -> cons
_ -> errorWithoutStackTrace $ "Data.Data.dataTypeConstrs is not supported for "
++ dataTypeName dt ++
", as it is not an algebraic data type."
-- | Gets the field labels of a constructor. The list of labels
-- is returned in the same order as they were given in the original
-- constructor declaration.
constrFields :: Constr -> [String]
constrFields = confields
-- | Gets the fixity of a constructor
constrFixity :: Constr -> Fixity
constrFixity = confixity
------------------------------------------------------------------------------
--
-- From strings to constr's and vice versa: all data types
--
------------------------------------------------------------------------------
-- | Gets the string for a constructor
showConstr :: Constr -> String
showConstr = constring
-- | Lookup a constructor via a string
readConstr :: DataType -> String -> Maybe Constr
readConstr dt str =
case dataTypeRep dt of
AlgRep cons -> idx cons
IntRep -> mkReadCon (\i -> (mkPrimCon dt str (IntConstr i)))
FloatRep -> mkReadCon ffloat
CharRep -> mkReadCon (\c -> (mkPrimCon dt str (CharConstr c)))
NoRep -> Nothing
where
-- Read a value and build a constructor
mkReadCon :: Read t => (t -> Constr) -> Maybe Constr
mkReadCon f = case (reads str) of
[(t,"")] -> Just (f t)
_ -> Nothing
Traverse list of algebraic datatype constructors
idx :: [Constr] -> Maybe Constr
idx cons = let fit = filter ((==) str . showConstr) cons
in if fit == []
then Nothing
else Just (head fit)
ffloat :: Double -> Constr
ffloat = mkPrimCon dt str . FloatConstr . toRational
------------------------------------------------------------------------------
--
-- Convenience funtions: algebraic data types
--
------------------------------------------------------------------------------
-- | Test for an algebraic type
isAlgType :: DataType -> Bool
isAlgType dt = case datarep dt of
(AlgRep _) -> True
_ -> False
-- | Gets the constructor for an index (algebraic datatypes only)
indexConstr :: DataType -> ConIndex -> Constr
indexConstr dt idx = case datarep dt of
(AlgRep cs) -> cs !! (idx-1)
_ -> errorWithoutStackTrace $ "Data.Data.indexConstr is not supported for "
++ dataTypeName dt ++
", as it is not an algebraic data type."
-- | Gets the index of a constructor (algebraic datatypes only)
constrIndex :: Constr -> ConIndex
constrIndex con = case constrRep con of
(AlgConstr idx) -> idx
_ -> errorWithoutStackTrace $ "Data.Data.constrIndex is not supported for "
++ dataTypeName (constrType con) ++
", as it is not an algebraic data type."
-- | Gets the maximum constructor index of an algebraic datatype
maxConstrIndex :: DataType -> ConIndex
maxConstrIndex dt = case dataTypeRep dt of
AlgRep cs -> length cs
_ -> errorWithoutStackTrace $ "Data.Data.maxConstrIndex is not supported for "
++ dataTypeName dt ++
", as it is not an algebraic data type."
------------------------------------------------------------------------------
--
-- Representation of primitive types
--
------------------------------------------------------------------------------
-- | Constructs the 'Int' type
mkIntType :: String -> DataType
mkIntType = mkPrimType IntRep
-- | Constructs the 'Float' type
mkFloatType :: String -> DataType
mkFloatType = mkPrimType FloatRep
| Constructs the ' ' type
mkCharType :: String -> DataType
mkCharType = mkPrimType CharRep
| Helper for ' mkIntType ' , ' mkFloatType '
mkPrimType :: DataRep -> String -> DataType
mkPrimType dr str = DataType
{ tycon = str
, datarep = dr
}
-- Makes a constructor for primitive types
mkPrimCon :: DataType -> String -> ConstrRep -> Constr
mkPrimCon dt str cr = Constr
{ datatype = dt
, conrep = cr
, constring = str
, confields = errorWithoutStackTrace "Data.Data.confields"
, confixity = errorWithoutStackTrace "Data.Data.confixity"
}
mkIntegralConstr :: (Integral a, Show a) => DataType -> a -> Constr
mkIntegralConstr dt i = case datarep dt of
IntRep -> mkPrimCon dt (show i) (IntConstr (toInteger i))
_ -> errorWithoutStackTrace $ "Data.Data.mkIntegralConstr is not supported for "
++ dataTypeName dt ++
", as it is not an Integral data type."
mkRealConstr :: (Real a, Show a) => DataType -> a -> Constr
mkRealConstr dt f = case datarep dt of
FloatRep -> mkPrimCon dt (show f) (FloatConstr (toRational f))
_ -> errorWithoutStackTrace $ "Data.Data.mkRealConstr is not supported for "
++ dataTypeName dt ++
", as it is not an Real data type."
| Makes a constructor for ' ' .
mkCharConstr :: DataType -> Char -> Constr
mkCharConstr dt c = case datarep dt of
CharRep -> mkPrimCon dt (show c) (CharConstr c)
_ -> errorWithoutStackTrace $ "Data.Data.mkCharConstr is not supported for "
++ dataTypeName dt ++
", as it is not an Char data type."
------------------------------------------------------------------------------
--
-- Non-representations for non-representable types
--
------------------------------------------------------------------------------
-- | Constructs a non-representation for a non-representable type
mkNoRepType :: String -> DataType
mkNoRepType str = DataType
{ tycon = str
, datarep = NoRep
}
-- | Test for a non-representable type
isNorepType :: DataType -> Bool
isNorepType dt = case datarep dt of
NoRep -> True
_ -> False
------------------------------------------------------------------------------
--
-- Convenience for qualified type constructors
--
------------------------------------------------------------------------------
-- | Gets the unqualified type constructor:
-- drop *.*.*... before name
--
tyconUQname :: String -> String
tyconUQname x = let x' = dropWhile (not . (==) '.') x
in if x' == [] then x else tyconUQname (tail x')
-- | Gets the module of a type constructor:
-- take *.*.*... before name
tyconModule :: String -> String
tyconModule x = let (a,b) = break ((==) '.') x
in if b == ""
then b
else a ++ tyconModule' (tail b)
where
tyconModule' y = let y' = tyconModule y
in if y' == "" then "" else ('.':y')
------------------------------------------------------------------------------
------------------------------------------------------------------------------
--
-- Instances of the Data class for Prelude-like types.
-- We define top-level definitions for representations.
--
------------------------------------------------------------------------------
falseConstr :: Constr
falseConstr = mkConstr boolDataType "False" [] Prefix
trueConstr :: Constr
trueConstr = mkConstr boolDataType "True" [] Prefix
boolDataType :: DataType
boolDataType = mkDataType "Prelude.Bool" [falseConstr,trueConstr]
instance Data Bool where
toConstr False = falseConstr
toConstr True = trueConstr
gunfold _ z c = case constrIndex c of
1 -> z False
2 -> z True
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor "
++ show c
++ " is not of type Bool."
dataTypeOf _ = boolDataType
------------------------------------------------------------------------------
charType :: DataType
charType = mkCharType "Prelude.Char"
instance Data Char where
toConstr x = mkCharConstr charType x
gunfold _ z c = case constrRep c of
(CharConstr x) -> z x
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Char."
dataTypeOf _ = charType
------------------------------------------------------------------------------
floatType :: DataType
floatType = mkFloatType "Prelude.Float"
instance Data Float where
toConstr = mkRealConstr floatType
gunfold _ z c = case constrRep c of
(FloatConstr x) -> z (realToFrac x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Float."
dataTypeOf _ = floatType
------------------------------------------------------------------------------
doubleType :: DataType
doubleType = mkFloatType "Prelude.Double"
instance Data Double where
toConstr = mkRealConstr doubleType
gunfold _ z c = case constrRep c of
(FloatConstr x) -> z (realToFrac x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Double."
dataTypeOf _ = doubleType
------------------------------------------------------------------------------
intType :: DataType
intType = mkIntType "Prelude.Int"
instance Data Int where
toConstr x = mkIntegralConstr intType x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int."
dataTypeOf _ = intType
------------------------------------------------------------------------------
integerType :: DataType
integerType = mkIntType "Prelude.Integer"
instance Data Integer where
toConstr = mkIntegralConstr integerType
gunfold _ z c = case constrRep c of
(IntConstr x) -> z x
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Integer."
dataTypeOf _ = integerType
------------------------------------------------------------------------------
int8Type :: DataType
int8Type = mkIntType "Data.Int.Int8"
instance Data Int8 where
toConstr x = mkIntegralConstr int8Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int8."
dataTypeOf _ = int8Type
------------------------------------------------------------------------------
int16Type :: DataType
int16Type = mkIntType "Data.Int.Int16"
instance Data Int16 where
toConstr x = mkIntegralConstr int16Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int16."
dataTypeOf _ = int16Type
------------------------------------------------------------------------------
int32Type :: DataType
int32Type = mkIntType "Data.Int.Int32"
instance Data Int32 where
toConstr x = mkIntegralConstr int32Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int32."
dataTypeOf _ = int32Type
------------------------------------------------------------------------------
int64Type :: DataType
int64Type = mkIntType "Data.Int.Int64"
instance Data Int64 where
toConstr x = mkIntegralConstr int64Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int64."
dataTypeOf _ = int64Type
------------------------------------------------------------------------------
wordType :: DataType
wordType = mkIntType "Data.Word.Word"
instance Data Word where
toConstr x = mkIntegralConstr wordType x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word"
dataTypeOf _ = wordType
------------------------------------------------------------------------------
word8Type :: DataType
word8Type = mkIntType "Data.Word.Word8"
instance Data Word8 where
toConstr x = mkIntegralConstr word8Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word8."
dataTypeOf _ = word8Type
------------------------------------------------------------------------------
word16Type :: DataType
word16Type = mkIntType "Data.Word.Word16"
instance Data Word16 where
toConstr x = mkIntegralConstr word16Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word16."
dataTypeOf _ = word16Type
------------------------------------------------------------------------------
word32Type :: DataType
word32Type = mkIntType "Data.Word.Word32"
instance Data Word32 where
toConstr x = mkIntegralConstr word32Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word32."
dataTypeOf _ = word32Type
------------------------------------------------------------------------------
word64Type :: DataType
word64Type = mkIntType "Data.Word.Word64"
instance Data Word64 where
toConstr x = mkIntegralConstr word64Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word64."
dataTypeOf _ = word64Type
------------------------------------------------------------------------------
ratioConstr :: Constr
ratioConstr = mkConstr ratioDataType ":%" [] Infix
ratioDataType :: DataType
ratioDataType = mkDataType "GHC.Real.Ratio" [ratioConstr]
instance (Data a, Integral a) => Data (Ratio a) where
gfoldl k z (a :% b) = z (%) `k` a `k` b
toConstr _ = ratioConstr
gunfold k z c | constrIndex c == 1 = k (k (z (%)))
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(Ratio)"
dataTypeOf _ = ratioDataType
------------------------------------------------------------------------------
nilConstr :: Constr
nilConstr = mkConstr listDataType "[]" [] Prefix
consConstr :: Constr
consConstr = mkConstr listDataType "(:)" [] Infix
listDataType :: DataType
listDataType = mkDataType "Prelude.[]" [nilConstr,consConstr]
instance Data a => Data [a] where
gfoldl _ z [] = z []
gfoldl f z (x:xs) = z (:) `f` x `f` xs
toConstr [] = nilConstr
toConstr (_:_) = consConstr
gunfold k z c = case constrIndex c of
1 -> z []
2 -> k (k (z (:)))
_ -> errorWithoutStackTrace "Data.Data.gunfold(List)"
dataTypeOf _ = listDataType
dataCast1 f = gcast1 f
--
-- The gmaps are given as an illustration.
-- This shows that the gmaps for lists are different from list maps.
--
gmapT _ [] = []
gmapT f (x:xs) = (f x:f xs)
gmapQ _ [] = []
gmapQ f (x:xs) = [f x,f xs]
gmapM _ [] = return []
gmapM f (x:xs) = f x >>= \x' -> f xs >>= \xs' -> return (x':xs')
------------------------------------------------------------------------------
nothingConstr :: Constr
nothingConstr = mkConstr maybeDataType "Nothing" [] Prefix
justConstr :: Constr
justConstr = mkConstr maybeDataType "Just" [] Prefix
maybeDataType :: DataType
maybeDataType = mkDataType "Prelude.Maybe" [nothingConstr,justConstr]
instance Data a => Data (Maybe a) where
gfoldl _ z Nothing = z Nothing
gfoldl f z (Just x) = z Just `f` x
toConstr Nothing = nothingConstr
toConstr (Just _) = justConstr
gunfold k z c = case constrIndex c of
1 -> z Nothing
2 -> k (z Just)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Maybe)"
dataTypeOf _ = maybeDataType
dataCast1 f = gcast1 f
------------------------------------------------------------------------------
ltConstr :: Constr
ltConstr = mkConstr orderingDataType "LT" [] Prefix
eqConstr :: Constr
eqConstr = mkConstr orderingDataType "EQ" [] Prefix
gtConstr :: Constr
gtConstr = mkConstr orderingDataType "GT" [] Prefix
orderingDataType :: DataType
orderingDataType = mkDataType "Prelude.Ordering" [ltConstr,eqConstr,gtConstr]
instance Data Ordering where
gfoldl _ z LT = z LT
gfoldl _ z EQ = z EQ
gfoldl _ z GT = z GT
toConstr LT = ltConstr
toConstr EQ = eqConstr
toConstr GT = gtConstr
gunfold _ z c = case constrIndex c of
1 -> z LT
2 -> z EQ
3 -> z GT
_ -> errorWithoutStackTrace "Data.Data.gunfold(Ordering)"
dataTypeOf _ = orderingDataType
------------------------------------------------------------------------------
leftConstr :: Constr
leftConstr = mkConstr eitherDataType "Left" [] Prefix
rightConstr :: Constr
rightConstr = mkConstr eitherDataType "Right" [] Prefix
eitherDataType :: DataType
eitherDataType = mkDataType "Prelude.Either" [leftConstr,rightConstr]
instance (Data a, Data b) => Data (Either a b) where
gfoldl f z (Left a) = z Left `f` a
gfoldl f z (Right a) = z Right `f` a
toConstr (Left _) = leftConstr
toConstr (Right _) = rightConstr
gunfold k z c = case constrIndex c of
1 -> k (z Left)
2 -> k (z Right)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Either)"
dataTypeOf _ = eitherDataType
dataCast2 f = gcast2 f
------------------------------------------------------------------------------
tuple0Constr :: Constr
tuple0Constr = mkConstr tuple0DataType "()" [] Prefix
tuple0DataType :: DataType
tuple0DataType = mkDataType "Prelude.()" [tuple0Constr]
instance Data () where
toConstr () = tuple0Constr
gunfold _ z c | constrIndex c == 1 = z ()
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(unit)"
dataTypeOf _ = tuple0DataType
------------------------------------------------------------------------------
tuple2Constr :: Constr
tuple2Constr = mkConstr tuple2DataType "(,)" [] Infix
tuple2DataType :: DataType
tuple2DataType = mkDataType "Prelude.(,)" [tuple2Constr]
instance (Data a, Data b) => Data (a,b) where
gfoldl f z (a,b) = z (,) `f` a `f` b
toConstr (_,_) = tuple2Constr
gunfold k z c | constrIndex c == 1 = k (k (z (,)))
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(tup2)"
dataTypeOf _ = tuple2DataType
dataCast2 f = gcast2 f
------------------------------------------------------------------------------
tuple3Constr :: Constr
tuple3Constr = mkConstr tuple3DataType "(,,)" [] Infix
tuple3DataType :: DataType
tuple3DataType = mkDataType "Prelude.(,,)" [tuple3Constr]
instance (Data a, Data b, Data c) => Data (a,b,c) where
gfoldl f z (a,b,c) = z (,,) `f` a `f` b `f` c
toConstr (_,_,_) = tuple3Constr
gunfold k z c | constrIndex c == 1 = k (k (k (z (,,))))
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(tup3)"
dataTypeOf _ = tuple3DataType
------------------------------------------------------------------------------
tuple4Constr :: Constr
tuple4Constr = mkConstr tuple4DataType "(,,,)" [] Infix
tuple4DataType :: DataType
tuple4DataType = mkDataType "Prelude.(,,,)" [tuple4Constr]
instance (Data a, Data b, Data c, Data d)
=> Data (a,b,c,d) where
gfoldl f z (a,b,c,d) = z (,,,) `f` a `f` b `f` c `f` d
toConstr (_,_,_,_) = tuple4Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (z (,,,)))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup4)"
dataTypeOf _ = tuple4DataType
------------------------------------------------------------------------------
tuple5Constr :: Constr
tuple5Constr = mkConstr tuple5DataType "(,,,,)" [] Infix
tuple5DataType :: DataType
tuple5DataType = mkDataType "Prelude.(,,,,)" [tuple5Constr]
instance (Data a, Data b, Data c, Data d, Data e)
=> Data (a,b,c,d,e) where
gfoldl f z (a,b,c,d,e) = z (,,,,) `f` a `f` b `f` c `f` d `f` e
toConstr (_,_,_,_,_) = tuple5Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (k (z (,,,,))))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup5)"
dataTypeOf _ = tuple5DataType
------------------------------------------------------------------------------
tuple6Constr :: Constr
tuple6Constr = mkConstr tuple6DataType "(,,,,,)" [] Infix
tuple6DataType :: DataType
tuple6DataType = mkDataType "Prelude.(,,,,,)" [tuple6Constr]
instance (Data a, Data b, Data c, Data d, Data e, Data f)
=> Data (a,b,c,d,e,f) where
gfoldl f z (a,b,c,d,e,f') = z (,,,,,) `f` a `f` b `f` c `f` d `f` e `f` f'
toConstr (_,_,_,_,_,_) = tuple6Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (k (k (z (,,,,,)))))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup6)"
dataTypeOf _ = tuple6DataType
------------------------------------------------------------------------------
tuple7Constr :: Constr
tuple7Constr = mkConstr tuple7DataType "(,,,,,,)" [] Infix
tuple7DataType :: DataType
tuple7DataType = mkDataType "Prelude.(,,,,,,)" [tuple7Constr]
instance (Data a, Data b, Data c, Data d, Data e, Data f, Data g)
=> Data (a,b,c,d,e,f,g) where
gfoldl f z (a,b,c,d,e,f',g) =
z (,,,,,,) `f` a `f` b `f` c `f` d `f` e `f` f' `f` g
toConstr (_,_,_,_,_,_,_) = tuple7Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (k (k (k (z (,,,,,,))))))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup7)"
dataTypeOf _ = tuple7DataType
------------------------------------------------------------------------------
instance Data a => Data (Ptr a) where
toConstr _ = errorWithoutStackTrace "Data.Data.toConstr(Ptr)"
gunfold _ _ = errorWithoutStackTrace "Data.Data.gunfold(Ptr)"
dataTypeOf _ = mkNoRepType "GHC.Ptr.Ptr"
dataCast1 x = gcast1 x
------------------------------------------------------------------------------
instance Data a => Data (ForeignPtr a) where
toConstr _ = errorWithoutStackTrace "Data.Data.toConstr(ForeignPtr)"
gunfold _ _ = errorWithoutStackTrace "Data.Data.gunfold(ForeignPtr)"
dataTypeOf _ = mkNoRepType "GHC.ForeignPtr.ForeignPtr"
dataCast1 x = gcast1 x
------------------------------------------------------------------------------
-- The Data instance for Array preserves data abstraction at the cost of
-- inefficiency. We omit reflection services for the sake of data abstraction.
instance (Data a, Data b, Ix a) => Data (Array a b)
where
gfoldl f z a = z (listArray (bounds a)) `f` (elems a)
toConstr _ = errorWithoutStackTrace "Data.Data.toConstr(Array)"
gunfold _ _ = errorWithoutStackTrace "Data.Data.gunfold(Array)"
dataTypeOf _ = mkNoRepType "Data.Array.Array"
dataCast2 x = gcast2 x
----------------------------------------------------------------------------
-- Data instance for Proxy
proxyConstr :: Constr
proxyConstr = mkConstr proxyDataType "Proxy" [] Prefix
proxyDataType :: DataType
proxyDataType = mkDataType "Data.Proxy.Proxy" [proxyConstr]
instance (Data t) => Data (Proxy t) where
gfoldl _ z Proxy = z Proxy
toConstr Proxy = proxyConstr
gunfold _ z c = case constrIndex c of
1 -> z Proxy
_ -> errorWithoutStackTrace "Data.Data.gunfold(Proxy)"
dataTypeOf _ = proxyDataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
-- instance for (:~:)
reflConstr :: Constr
reflConstr = mkConstr equalityDataType "Refl" [] Prefix
equalityDataType :: DataType
equalityDataType = mkDataType "Data.Type.Equality.(:~:)" [reflConstr]
instance (a ~ b, Data a) => Data (a :~: b) where
gfoldl _ z Refl = z Refl
toConstr Refl = reflConstr
gunfold _ z c = case constrIndex c of
1 -> z Refl
_ -> errorWithoutStackTrace "Data.Data.gunfold(:~:)"
dataTypeOf _ = equalityDataType
dataCast2 f = gcast2 f
-----------------------------------------------------------------------
-- instance for Coercion
coercionConstr :: Constr
coercionConstr = mkConstr equalityDataType "Coercion" [] Prefix
coercionDataType :: DataType
coercionDataType = mkDataType "Data.Type.Coercion.Coercion" [coercionConstr]
instance (Coercible a b, Data a, Data b) => Data (Coercion a b) where
gfoldl _ z Coercion = z Coercion
toConstr Coercion = coercionConstr
gunfold _ z c = case constrIndex c of
1 -> z Coercion
_ -> errorWithoutStackTrace "Data.Data.gunfold(Coercion)"
dataTypeOf _ = coercionDataType
dataCast2 f = gcast2 f
-----------------------------------------------------------------------
-- instance for Data.Version
versionConstr :: Constr
versionConstr = mkConstr versionDataType "Version" ["versionBranch","versionTags"] Prefix
versionDataType :: DataType
versionDataType = mkDataType "Data.Version.Version" [versionConstr]
instance Data Version where
gfoldl k z (Version bs ts) = z Version `k` bs `k` ts
toConstr (Version _ _) = versionConstr
gunfold k z c = case constrIndex c of
1 -> k (k (z Version))
_ -> errorWithoutStackTrace "Data.Data.gunfold(Version)"
dataTypeOf _ = versionDataType
-----------------------------------------------------------------------
-- instances for Data.Monoid wrappers
dualConstr :: Constr
dualConstr = mkConstr dualDataType "Dual" ["getDual"] Prefix
dualDataType :: DataType
dualDataType = mkDataType "Data.Monoid.Dual" [dualConstr]
instance Data a => Data (Dual a) where
gfoldl f z (Dual x) = z Dual `f` x
gunfold k z _ = k (z Dual)
toConstr (Dual _) = dualConstr
dataTypeOf _ = dualDataType
dataCast1 f = gcast1 f
allConstr :: Constr
allConstr = mkConstr allDataType "All" ["getAll"] Prefix
allDataType :: DataType
allDataType = mkDataType "All" [allConstr]
instance Data All where
gfoldl f z (All x) = (z All `f` x)
gunfold k z _ = k (z All)
toConstr (All _) = allConstr
dataTypeOf _ = allDataType
anyConstr :: Constr
anyConstr = mkConstr anyDataType "Any" ["getAny"] Prefix
anyDataType :: DataType
anyDataType = mkDataType "Any" [anyConstr]
instance Data Any where
gfoldl f z (Any x) = (z Any `f` x)
gunfold k z _ = k (z Any)
toConstr (Any _) = anyConstr
dataTypeOf _ = anyDataType
sumConstr :: Constr
sumConstr = mkConstr sumDataType "Sum" ["getSum"] Prefix
sumDataType :: DataType
sumDataType = mkDataType "Data.Monoid.Sum" [sumConstr]
instance Data a => Data (Sum a) where
gfoldl f z (Sum x) = z Sum `f` x
gunfold k z _ = k (z Sum)
toConstr (Sum _) = sumConstr
dataTypeOf _ = sumDataType
dataCast1 f = gcast1 f
productConstr :: Constr
productConstr = mkConstr productDataType "Product" ["getProduct"] Prefix
productDataType :: DataType
productDataType = mkDataType "Data.Monoid.Product" [productConstr]
instance Data a => Data (Product a) where
gfoldl f z (Product x) = z Product `f` x
gunfold k z _ = k (z Product)
toConstr (Product _) = productConstr
dataTypeOf _ = productDataType
dataCast1 f = gcast1 f
firstConstr :: Constr
firstConstr = mkConstr firstDataType "First" ["getFirst"] Prefix
firstDataType :: DataType
firstDataType = mkDataType "Data.Monoid.First" [firstConstr]
instance Data a => Data (First a) where
gfoldl f z (First x) = (z First `f` x)
gunfold k z _ = k (z First)
toConstr (First _) = firstConstr
dataTypeOf _ = firstDataType
dataCast1 f = gcast1 f
lastConstr :: Constr
lastConstr = mkConstr lastDataType "Last" ["getLast"] Prefix
lastDataType :: DataType
lastDataType = mkDataType "Data.Monoid.Last" [lastConstr]
instance Data a => Data (Last a) where
gfoldl f z (Last x) = (z Last `f` x)
gunfold k z _ = k (z Last)
toConstr (Last _) = lastConstr
dataTypeOf _ = lastDataType
dataCast1 f = gcast1 f
altConstr :: Constr
altConstr = mkConstr altDataType "Alt" ["getAlt"] Prefix
altDataType :: DataType
altDataType = mkDataType "Alt" [altConstr]
instance (Data (f a), Data a, Typeable f) => Data (Alt f a) where
gfoldl f z (Alt x) = (z Alt `f` x)
gunfold k z _ = k (z Alt)
toConstr (Alt _) = altConstr
dataTypeOf _ = altDataType
-----------------------------------------------------------------------
instances for
u1Constr :: Constr
u1Constr = mkConstr u1DataType "U1" [] Prefix
u1DataType :: DataType
u1DataType = mkDataType "GHC.Generics.U1" [u1Constr]
instance Data p => Data (U1 p) where
gfoldl _ z U1 = z U1
toConstr U1 = u1Constr
gunfold _ z c = case constrIndex c of
1 -> z U1
_ -> errorWithoutStackTrace "Data.Data.gunfold(U1)"
dataTypeOf _ = u1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
par1Constr :: Constr
par1Constr = mkConstr par1DataType "Par1" [] Prefix
par1DataType :: DataType
par1DataType = mkDataType "GHC.Generics.Par1" [par1Constr]
instance Data p => Data (Par1 p) where
gfoldl k z (Par1 p) = z Par1 `k` p
toConstr (Par1 _) = par1Constr
gunfold k z c = case constrIndex c of
1 -> k (z Par1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Par1)"
dataTypeOf _ = par1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
rec1Constr :: Constr
rec1Constr = mkConstr rec1DataType "Rec1" [] Prefix
rec1DataType :: DataType
rec1DataType = mkDataType "GHC.Generics.Rec1" [rec1Constr]
instance (Data (f p), Typeable f, Data p) => Data (Rec1 f p) where
gfoldl k z (Rec1 p) = z Rec1 `k` p
toConstr (Rec1 _) = rec1Constr
gunfold k z c = case constrIndex c of
1 -> k (z Rec1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Rec1)"
dataTypeOf _ = rec1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
k1Constr :: Constr
k1Constr = mkConstr k1DataType "K1" [] Prefix
k1DataType :: DataType
k1DataType = mkDataType "GHC.Generics.K1" [k1Constr]
instance (Typeable i, Data p, Data c) => Data (K1 i c p) where
gfoldl k z (K1 p) = z K1 `k` p
toConstr (K1 _) = k1Constr
gunfold k z c = case constrIndex c of
1 -> k (z K1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(K1)"
dataTypeOf _ = k1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
m1Constr :: Constr
m1Constr = mkConstr m1DataType "M1" [] Prefix
m1DataType :: DataType
m1DataType = mkDataType "GHC.Generics.M1" [m1Constr]
instance (Data p, Data (f p), Typeable c, Typeable i, Typeable f)
=> Data (M1 i c f p) where
gfoldl k z (M1 p) = z M1 `k` p
toConstr (M1 _) = m1Constr
gunfold k z c = case constrIndex c of
1 -> k (z M1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(M1)"
dataTypeOf _ = m1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
sum1DataType :: DataType
sum1DataType = mkDataType "GHC.Generics.:+:" [l1Constr, r1Constr]
l1Constr :: Constr
l1Constr = mkConstr sum1DataType "L1" [] Prefix
r1Constr :: Constr
r1Constr = mkConstr sum1DataType "R1" [] Prefix
instance (Typeable f, Typeable g, Data p, Data (f p), Data (g p))
=> Data ((f :+: g) p) where
gfoldl k z (L1 a) = z L1 `k` a
gfoldl k z (R1 a) = z R1 `k` a
toConstr L1{} = l1Constr
toConstr R1{} = r1Constr
gunfold k z c = case constrIndex c of
1 -> k (z L1)
2 -> k (z R1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(:+:)"
dataTypeOf _ = sum1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
comp1Constr :: Constr
comp1Constr = mkConstr comp1DataType "Comp1" [] Prefix
comp1DataType :: DataType
comp1DataType = mkDataType "GHC.Generics.:.:" [comp1Constr]
instance (Typeable f, Typeable g, Data p, Data (f (g p)))
=> Data ((f :.: g) p) where
gfoldl k z (Comp1 c) = z Comp1 `k` c
toConstr (Comp1 _) = m1Constr
gunfold k z c = case constrIndex c of
1 -> k (z Comp1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(:.:)"
dataTypeOf _ = comp1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
v1DataType :: DataType
v1DataType = mkDataType "GHC.Generics.V1" []
instance Data p => Data (V1 p) where
gfoldl _ _ !_ = undefined
toConstr !_ = undefined
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(V1)"
dataTypeOf _ = v1DataType
dataCast1 f = gcast1 f
-----------------------------------------------------------------------
prod1DataType :: DataType
prod1DataType = mkDataType "GHC.Generics.:*:" [prod1Constr]
prod1Constr :: Constr
prod1Constr = mkConstr prod1DataType "Prod1" [] Infix
instance (Typeable f, Typeable g, Data p, Data (f p), Data (g p))
=> Data ((f :*: g) p) where
gfoldl k z (l :*: r) = z (:*:) `k` l `k` r
toConstr _ = prod1Constr
gunfold k z c = case constrIndex c of
1 -> k (k (z (:*:)))
_ -> errorWithoutStackTrace "Data.Data.gunfold(:*:)"
dataCast1 f = gcast1 f
dataTypeOf _ = prod1DataType
-----------------------------------------------------------------------
prefixConstr :: Constr
prefixConstr = mkConstr fixityDataType "Prefix" [] Prefix
infixConstr :: Constr
infixConstr = mkConstr fixityDataType "Infix" [] Prefix
fixityDataType :: DataType
fixityDataType = mkDataType "GHC.Generics.Fixity" [prefixConstr,infixConstr]
instance Data Generics.Fixity where
gfoldl _ z Generics.Prefix = z Generics.Prefix
gfoldl f z (Generics.Infix a i) = z Generics.Infix `f` a `f` i
toConstr Generics.Prefix = prefixConstr
toConstr Generics.Infix{} = infixConstr
gunfold k z c = case constrIndex c of
1 -> z Generics.Prefix
2 -> k (k (z Generics.Infix))
_ -> errorWithoutStackTrace "Data.Data.gunfold(Fixity)"
dataTypeOf _ = fixityDataType
-----------------------------------------------------------------------
leftAssociativeConstr :: Constr
leftAssociativeConstr
= mkConstr associativityDataType "LeftAssociative" [] Prefix
rightAssociativeConstr :: Constr
rightAssociativeConstr
= mkConstr associativityDataType "RightAssociative" [] Prefix
notAssociativeConstr :: Constr
notAssociativeConstr
= mkConstr associativityDataType "NotAssociative" [] Prefix
associativityDataType :: DataType
associativityDataType = mkDataType "GHC.Generics.Associativity"
[leftAssociativeConstr,rightAssociativeConstr,notAssociativeConstr]
instance Data Associativity where
gfoldl _ z LeftAssociative = z LeftAssociative
gfoldl _ z RightAssociative = z RightAssociative
gfoldl _ z NotAssociative = z NotAssociative
toConstr LeftAssociative = leftAssociativeConstr
toConstr RightAssociative = rightAssociativeConstr
toConstr NotAssociative = notAssociativeConstr
gunfold _ z c = case constrIndex c of
1 -> z LeftAssociative
2 -> z RightAssociative
3 -> z NotAssociative
_ -> errorWithoutStackTrace
"Data.Data.gunfold(Associativity)"
dataTypeOf _ = associativityDataType
-----------------------------------------------------------------------
noSourceUnpackednessConstr :: Constr
noSourceUnpackednessConstr
= mkConstr sourceUnpackednessDataType "NoSourceUnpackedness" [] Prefix
sourceNoUnpackConstr :: Constr
sourceNoUnpackConstr
= mkConstr sourceUnpackednessDataType "SourceNoUnpack" [] Prefix
sourceUnpackConstr :: Constr
sourceUnpackConstr
= mkConstr sourceUnpackednessDataType "SourceUnpack" [] Prefix
sourceUnpackednessDataType :: DataType
sourceUnpackednessDataType = mkDataType "GHC.Generics.SourceUnpackedness"
[noSourceUnpackednessConstr,sourceNoUnpackConstr,sourceUnpackConstr]
instance Data SourceUnpackedness where
gfoldl _ z NoSourceUnpackedness = z NoSourceUnpackedness
gfoldl _ z SourceNoUnpack = z SourceNoUnpack
gfoldl _ z SourceUnpack = z SourceUnpack
toConstr NoSourceUnpackedness = noSourceUnpackednessConstr
toConstr SourceNoUnpack = sourceNoUnpackConstr
toConstr SourceUnpack = sourceUnpackConstr
gunfold _ z c = case constrIndex c of
1 -> z NoSourceUnpackedness
2 -> z SourceNoUnpack
3 -> z SourceUnpack
_ -> errorWithoutStackTrace
"Data.Data.gunfold(SourceUnpackedness)"
dataTypeOf _ = sourceUnpackednessDataType
-----------------------------------------------------------------------
noSourceStrictnessConstr :: Constr
noSourceStrictnessConstr
= mkConstr sourceStrictnessDataType "NoSourceStrictness" [] Prefix
sourceLazyConstr :: Constr
sourceLazyConstr
= mkConstr sourceStrictnessDataType "SourceLazy" [] Prefix
sourceStrictConstr :: Constr
sourceStrictConstr
= mkConstr sourceStrictnessDataType "SourceStrict" [] Prefix
sourceStrictnessDataType :: DataType
sourceStrictnessDataType = mkDataType "GHC.Generics.SourceStrictness"
[noSourceStrictnessConstr,sourceLazyConstr,sourceStrictConstr]
instance Data SourceStrictness where
gfoldl _ z NoSourceStrictness = z NoSourceStrictness
gfoldl _ z SourceLazy = z SourceLazy
gfoldl _ z SourceStrict = z SourceStrict
toConstr NoSourceStrictness = noSourceStrictnessConstr
toConstr SourceLazy = sourceLazyConstr
toConstr SourceStrict = sourceStrictConstr
gunfold _ z c = case constrIndex c of
1 -> z NoSourceStrictness
2 -> z SourceLazy
3 -> z SourceStrict
_ -> errorWithoutStackTrace
"Data.Data.gunfold(SourceStrictness)"
dataTypeOf _ = sourceStrictnessDataType
-----------------------------------------------------------------------
decidedLazyConstr :: Constr
decidedLazyConstr
= mkConstr decidedStrictnessDataType "DecidedLazy" [] Prefix
decidedStrictConstr :: Constr
decidedStrictConstr
= mkConstr decidedStrictnessDataType "DecidedStrict" [] Prefix
decidedUnpackConstr :: Constr
decidedUnpackConstr
= mkConstr decidedStrictnessDataType "DecidedUnpack" [] Prefix
decidedStrictnessDataType :: DataType
decidedStrictnessDataType = mkDataType "GHC.Generics.DecidedStrictness"
[decidedLazyConstr,decidedStrictConstr,decidedUnpackConstr]
instance Data DecidedStrictness where
gfoldl _ z DecidedLazy = z DecidedLazy
gfoldl _ z DecidedStrict = z DecidedStrict
gfoldl _ z DecidedUnpack = z DecidedUnpack
toConstr DecidedLazy = decidedLazyConstr
toConstr DecidedStrict = decidedStrictConstr
toConstr DecidedUnpack = decidedUnpackConstr
gunfold _ z c = case constrIndex c of
1 -> z DecidedLazy
2 -> z DecidedStrict
3 -> z DecidedUnpack
_ -> errorWithoutStackTrace
"Data.Data.gunfold(DecidedStrictness)"
dataTypeOf _ = decidedStrictnessDataType
| null | https://raw.githubusercontent.com/tolysz/prepare-ghcjs/8499e14e27854a366e98f89fab0af355056cf055/spec-lts8/base/Data/Data.hs | haskell | # LANGUAGE BangPatterns #
---------------------------------------------------------------------------
|
Module : Data.Data
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable (local universal quantification)
- Generic programming in Haskell . See
<#Scrap_your_boilerplate.21>.
This module provides the 'Data' class with its primitives for
generic programming, along with instances for many datatypes. It
not present in this module were moved to the
For more information, please visit the new
SYB wiki: <>.
---------------------------------------------------------------------------
* The Data class for processing constructor applications
mediate types and unary type constructors
mediate types and binary type constructors
Generic maps defined in terms of gfoldl
abstract
** Constructors
** Observers
** Convenience functions
* Data constructor representations
abstract
** Constructors
** Observers
** Convenience function: algebraic data types
** From strings to constructors and vice versa: all data types
* Convenience functions: take type constructors apart
* Generic operations defined in terms of 'gunfold'
----------------------------------------------------------------------------
Imports for the instances
So we can give Data instance for Int8, ...
So we can give Data instance for Ratio
So we can give Data instance for IO , Handle
So we can give Data instance for Ptr
So we can give Data instance for ForeignPtr
So we can give Data instance for StablePtr
import GHC.ST -- So we can give Data instance for ST
So we can give Data instance for MVar & Co.
So we can give Data instance for Array
----------------------------------------------------------------------------
The Data class
----------------------------------------------------------------------------
| Left-associative fold operation for constructor applications.
The type of 'gfoldl' is a headache, but operationally it is a simple
generalisation of a list fold.
The default definition for 'gfoldl' is @'const' 'id'@, which is
suitable for abstract datatypes with no substructures.
^ defines how nonempty constructor applications are
folded. It takes the folded tail of the constructor
and combines them in some way.
^ defines how the empty constructor application is
folding.
^ structure to be folded.
^ result, with a type defined in terms of @a@, but
variability is achieved by means of type constructor
@c@ for the construction of the actual result type.
See the 'Data' instances in this file for an illustration of 'gfoldl'.
| Unfolding constructor applications
| Obtaining the constructor from a given datum.
For proper terms, this is meant to be the top-level constructor.
Primitive datatypes are here viewed as potentially infinite sets of
values (i.e., constructors).
| The outer type constructor of the type
----------------------------------------------------------------------------
Mediate types and type constructors
----------------------------------------------------------------------------
| Mediate types and unary type constructors.
In 'Data' instances of the form @T a@, 'dataCast1' should be defined
as 'gcast1'.
The default definition is @'const' 'Nothing'@, which is appropriate
for non-unary type constructors.
| Mediate types and binary type constructors.
In 'Data' instances of the form @T a b@, 'dataCast2' should be
defined as 'gcast2'.
The default definition is @'const' 'Nothing'@, which is appropriate
for non-binary type constructors.
----------------------------------------------------------------------------
Typical generic maps defined in terms of gfoldl
----------------------------------------------------------------------------
| A generic transformation that maps over the immediate subterms
The default definition instantiates the type constructor @c@ in the
type of 'gfoldl' to an identity datatype constructor, using the
isomorphism pair as injection and projection.
Use an identity datatype constructor ID (see below)
to instantiate the type constructor c in the type of gfoldl,
and perform injections ID and projections unID accordingly.
| A generic query with a left-associative binary operator
| A generic query with a right-associative binary operator
| A generic query that processes the immediate subterms and returns a list
of results. The list is given in the same order as originally specified
in the declaration of the data constructors.
| A generic monadic transformation that maps over the immediate subterms
the type of 'gfoldl' to the monad datatype constructor, defining
injection and projection using 'return' and '>>='.
Use immediately the monad datatype constructor
to instantiate the type constructor c in the type of gfoldl,
so injection and projection is done by return and >>=.
| The identity type constructor needed for the definition of gmapT
| The constant type constructor needed for the definition of gmapQl
| Type constructor for adding counters to queries
| The type constructor used in definition of gmapQr
----------------------------------------------------------------------------
Generic unfolding
----------------------------------------------------------------------------
| Build a term skeleton
| Build a term and use a generic function for subterms
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Representation of datatypes.
A package of constructor representations with names of type and module.
| Representation of constructors. Note that equality on constructors
with different types may not work -- i.e. the constructors for 'False' and
'Nothing' may compare equal.
| Equality of constructors
| Public representation of datatypes
The list of constructors could be an array, a balanced tree, or others.
| Public representation of constructors
| Unique index for datatype constructors,
| Fixity of constructors
Later: add associativity and precedence
----------------------------------------------------------------------------
Observers for datatype representations
----------------------------------------------------------------------------
| Gets the type constructor including the module
| Gets the public presentation of a datatype
| Gets the datatype of a constructor
| Gets the public presentation of constructors
| Look up a constructor by its representation
----------------------------------------------------------------------------
Representations of algebraic data types
----------------------------------------------------------------------------
| Constructs an algebraic datatype
| Constructs a constructor
| Gets the constructors of an algebraic datatype
| Gets the field labels of a constructor. The list of labels
is returned in the same order as they were given in the original
constructor declaration.
| Gets the fixity of a constructor
----------------------------------------------------------------------------
From strings to constr's and vice versa: all data types
----------------------------------------------------------------------------
| Gets the string for a constructor
| Lookup a constructor via a string
Read a value and build a constructor
----------------------------------------------------------------------------
Convenience funtions: algebraic data types
----------------------------------------------------------------------------
| Test for an algebraic type
| Gets the constructor for an index (algebraic datatypes only)
| Gets the index of a constructor (algebraic datatypes only)
| Gets the maximum constructor index of an algebraic datatype
----------------------------------------------------------------------------
Representation of primitive types
----------------------------------------------------------------------------
| Constructs the 'Int' type
| Constructs the 'Float' type
Makes a constructor for primitive types
----------------------------------------------------------------------------
Non-representations for non-representable types
----------------------------------------------------------------------------
| Constructs a non-representation for a non-representable type
| Test for a non-representable type
----------------------------------------------------------------------------
Convenience for qualified type constructors
----------------------------------------------------------------------------
| Gets the unqualified type constructor:
drop *.*.*... before name
| Gets the module of a type constructor:
take *.*.*... before name
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Instances of the Data class for Prelude-like types.
We define top-level definitions for representations.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
The gmaps are given as an illustration.
This shows that the gmaps for lists are different from list maps.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
The Data instance for Array preserves data abstraction at the cost of
inefficiency. We omit reflection services for the sake of data abstraction.
--------------------------------------------------------------------------
Data instance for Proxy
---------------------------------------------------------------------
instance for (:~:)
---------------------------------------------------------------------
instance for Coercion
---------------------------------------------------------------------
instance for Data.Version
---------------------------------------------------------------------
instances for Data.Monoid wrappers
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
--------------------------------------------------------------------- | # LANGUAGE Trustworthy #
# LANGUAGE RankNTypes , ScopedTypeVariables , PolyKinds , StandaloneDeriving ,
TypeOperators , GADTs , FlexibleInstances #
TypeOperators, GADTs, FlexibleInstances #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE NoImplicitPrelude #
Copyright : ( c ) The University of Glasgow , CWI 2001 - -2004
corresponds to a merge between the previous " Data . Generics . Basics "
and almost all of " Data . Generics . Instances " . The instances that are
@Data . Generics . Instances@ module in the @syb@ package .
module Data.Data (
* Module Data . re - exported for convenience
module Data.Typeable,
Data(
gfoldl,
gunfold,
toConstr,
dataTypeOf,
gmapT,
gmapQ,
gmapQl,
gmapQr,
gmapQi,
gmapM,
gmapMp,
gmapMo
),
* Datatype representations
mkDataType,
mkIntType,
mkFloatType,
mkCharType,
mkNoRepType,
dataTypeName,
DataRep(..),
dataTypeRep,
repConstr,
isAlgType,
dataTypeConstrs,
indexConstr,
maxConstrIndex,
isNorepType,
alias for , start at 1
Fixity(..),
mkConstr,
mkIntegralConstr,
mkRealConstr,
mkCharConstr,
constrType,
ConstrRep(..),
constrRep,
constrFields,
constrFixity,
constrIndex,
showConstr,
readConstr,
tyconUQname,
tyconModule,
fromConstr,
fromConstrB,
fromConstrM
) where
import Data.Either
import Data.Eq
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Typeable
import Data.Version( Version(..) )
import GHC.Base hiding (Any, IntRep, FloatRep)
import GHC.List
import GHC.Num
import GHC.Read
import GHC.Show
import Text.Read( reads )
import Data.Type.Coercion
So we can give Data instance for , ...
import qualified GHC.Generics as Generics (Fixity(..))
import GHC.Generics hiding (Fixity(..))
So we can give Data instance for U1 , V1 , ...
|
The ' Data ' class comprehends a fundamental primitive ' gfoldl ' for
folding over constructor applications , say terms . This primitive can
be instantiated in several ways to map over the immediate subterms
of a term ; see the @gmap@ combinators later in this class . Indeed , a
generic programmer does not necessarily need to use the ingenious gfoldl
primitive but rather the intuitive @gmap@ combinators . The ' gfoldl '
primitive is completed by means to query top - level constructors , to
turn constructor representations into proper terms , and to list all
possible datatype constructors . This completion allows us to serve
generic programming scenarios like read , show , equality , term generation .
The combinators ' gmapT ' , ' gmapQ ' , ' gmapM ' , etc are all provided with
default definitions in terms of ' gfoldl ' , leaving open the opportunity
to provide datatype - specific definitions .
( The inclusion of the @gmap@ combinators as members of class ' Data '
allows the programmer or the compiler to derive specialised , and maybe
more efficient code per datatype . /Note/ : ' gfoldl ' is more higher - order
than the @gmap@ combinators . This is subject to ongoing benchmarking
experiments . It might turn out that the @gmap@ combinators will be
moved out of the class ' Data ' . )
Conceptually , the definition of the @gmap@ combinators in terms of the
primitive ' gfoldl ' requires the identification of the ' gfoldl ' function
arguments . Technically , we also need to identify the type constructor
@c@ for the construction of the result type from the folded term type .
In the definition of @gmapQ@/x/ combinators , we use phantom type
constructors for the @c@ in the type of ' gfoldl ' because the result type
of a query does not involve the ( polymorphic ) type of the term argument .
In the definition of ' gmapQl ' we simply use the plain constant type
constructor because ' gfoldl ' is left - associative anyway and so it is
readily suited to fold a left - associative binary operation over the
immediate subterms . In the definition of gmapQr , extra effort is
needed . We use a higher - order accumulation trick to mediate between
left - associative constructor application vs. right - associative binary
operation ( e.g. , @(:)@ ) . When the query is meant to compute a value
of type @r@ , then the result type withing generic folding is @r - > r@.
So the result of folding is a function to which we finally pass the
right unit .
With the @-XDeriveDataTypeable@ option , GHC can generate instances of the
' Data ' class automatically . For example , given the declaration
> data T a b = C1 a b | C2 deriving ( Typeable , Data )
GHC will generate an instance that is equivalent to
> instance ( Data a , Data b ) = > Data ( T a b ) where
> gfoldl k z ( C1 a b ) = z C1 ` k ` a ` k ` b
> gfoldl k z C2 = z C2
>
> gunfold k z c = case constrIndex c of
> 1 - > k ( k ( z C1 ) )
> 2 - > z C2
>
> toConstr ( C1 _ _ ) = con_C1
> toConstr C2 = con_C2
>
> _ = ty_T
>
> con_C1 = mkConstr ty_T " C1 " [ ] Prefix
> con_C2 = mkConstr ty_T " C2 " [ ] Prefix
> ty_T = mkDataType " Module . T " [ con_C1 , ]
This is suitable for datatypes that are exported transparently .
The 'Data' class comprehends a fundamental primitive 'gfoldl' for
folding over constructor applications, say terms. This primitive can
be instantiated in several ways to map over the immediate subterms
of a term; see the @gmap@ combinators later in this class. Indeed, a
generic programmer does not necessarily need to use the ingenious gfoldl
primitive but rather the intuitive @gmap@ combinators. The 'gfoldl'
primitive is completed by means to query top-level constructors, to
turn constructor representations into proper terms, and to list all
possible datatype constructors. This completion allows us to serve
generic programming scenarios like read, show, equality, term generation.
The combinators 'gmapT', 'gmapQ', 'gmapM', etc are all provided with
default definitions in terms of 'gfoldl', leaving open the opportunity
to provide datatype-specific definitions.
(The inclusion of the @gmap@ combinators as members of class 'Data'
allows the programmer or the compiler to derive specialised, and maybe
more efficient code per datatype. /Note/: 'gfoldl' is more higher-order
than the @gmap@ combinators. This is subject to ongoing benchmarking
experiments. It might turn out that the @gmap@ combinators will be
moved out of the class 'Data'.)
Conceptually, the definition of the @gmap@ combinators in terms of the
primitive 'gfoldl' requires the identification of the 'gfoldl' function
arguments. Technically, we also need to identify the type constructor
@c@ for the construction of the result type from the folded term type.
In the definition of @gmapQ@/x/ combinators, we use phantom type
constructors for the @c@ in the type of 'gfoldl' because the result type
of a query does not involve the (polymorphic) type of the term argument.
In the definition of 'gmapQl' we simply use the plain constant type
constructor because 'gfoldl' is left-associative anyway and so it is
readily suited to fold a left-associative binary operation over the
immediate subterms. In the definition of gmapQr, extra effort is
needed. We use a higher-order accumulation trick to mediate between
left-associative constructor application vs. right-associative binary
operation (e.g., @(:)@). When the query is meant to compute a value
of type @r@, then the result type withing generic folding is @r -> r@.
So the result of folding is a function to which we finally pass the
right unit.
With the @-XDeriveDataTypeable@ option, GHC can generate instances of the
'Data' class automatically. For example, given the declaration
> data T a b = C1 a b | C2 deriving (Typeable, Data)
GHC will generate an instance that is equivalent to
> instance (Data a, Data b) => Data (T a b) where
> gfoldl k z (C1 a b) = z C1 `k` a `k` b
> gfoldl k z C2 = z C2
>
> gunfold k z c = case constrIndex c of
> 1 -> k (k (z C1))
> 2 -> z C2
>
> toConstr (C1 _ _) = con_C1
> toConstr C2 = con_C2
>
> dataTypeOf _ = ty_T
>
> con_C1 = mkConstr ty_T "C1" [] Prefix
> con_C2 = mkConstr ty_T "C2" [] Prefix
> ty_T = mkDataType "Module.T" [con_C1, con_C2]
This is suitable for datatypes that are exported transparently.
-}
class Typeable a => Data a where
gfoldl :: (forall d b. Data d => c (d -> b) -> d -> c b)
application and its head , i.e. , an immediate subterm ,
-> (forall g. g -> c g)
folded , like the neutral \/ start element for list
-> a
-> c a
gfoldl _ z = z
gunfold :: (forall b r. Data b => c (b -> r) -> c r)
-> (forall r. r -> c r)
-> Constr
-> c a
toConstr :: a -> Constr
dataTypeOf :: a -> DataType
dataCast1 :: Typeable t
=> (forall d. Data d => c (t d))
-> Maybe (c a)
dataCast1 _ = Nothing
dataCast2 :: Typeable t
=> (forall d e. (Data d, Data e) => c (t d e))
-> Maybe (c a)
dataCast2 _ = Nothing
gmapT :: (forall b. Data b => b -> b) -> a -> a
gmapT f x0 = unID (gfoldl k ID x0)
where
k :: Data d => ID (d->b) -> d -> ID b
k (ID c) x = ID (c (f x))
gmapQl :: forall r r'. (r -> r' -> r) -> r -> (forall d. Data d => d -> r') -> a -> r
gmapQl o r f = unCONST . gfoldl k z
where
k :: Data d => CONST r (d->b) -> d -> CONST r b
k c x = CONST $ (unCONST c) `o` f x
z :: g -> CONST r g
z _ = CONST r
gmapQr :: forall r r'. (r' -> r -> r) -> r -> (forall d. Data d => d -> r') -> a -> r
gmapQr o r0 f x0 = unQr (gfoldl k (const (Qr id)) x0) r0
where
k :: Data d => Qr r (d->b) -> d -> Qr r b
k (Qr c) x = Qr (\r -> c (f x `o` r))
gmapQ :: (forall d. Data d => d -> u) -> a -> [u]
gmapQ f = gmapQr (:) [] f
| A generic query that processes one child by index ( zero - based )
gmapQi :: forall u. Int -> (forall d. Data d => d -> u) -> a -> u
gmapQi i f x = case gfoldl k z x of { Qi _ q -> fromJust q }
where
k :: Data d => Qi u (d -> b) -> d -> Qi u b
k (Qi i' q) a = Qi (i'+1) (if i==i' then Just (f a) else q)
z :: g -> Qi q g
z _ = Qi 0 Nothing
The default definition instantiates the type constructor @c@ in
gmapM :: forall m. Monad m => (forall d. Data d => d -> m d) -> a -> m a
gmapM f = gfoldl k return
where
k :: Data d => m (d -> b) -> d -> m b
k c x = do c' <- c
x' <- f x
return (c' x')
| Transformation of at least one immediate subterm does not fail
gmapMp :: forall m. MonadPlus m => (forall d. Data d => d -> m d) -> a -> m a
The type constructor that we use here simply keeps track of the fact
if we already succeeded for an immediate subterm ; see Mp below . To
this end , we couple the monadic computation with a Boolean .
The type constructor that we use here simply keeps track of the fact
if we already succeeded for an immediate subterm; see Mp below. To
this end, we couple the monadic computation with a Boolean.
-}
gmapMp f x = unMp (gfoldl k z x) >>= \(x',b) ->
if b then return x' else mzero
where
z :: g -> Mp m g
z g = Mp (return (g,False))
k :: Data d => Mp m (d -> b) -> d -> Mp m b
k (Mp c) y
= Mp ( c >>= \(h, b) ->
(f y >>= \y' -> return (h y', True))
`mplus` return (h y, b)
)
| Transformation of one immediate subterm with success
gmapMo :: forall m. MonadPlus m => (forall d. Data d => d -> m d) -> a -> m a
We use the same pairing trick as for gmapMp ,
i.e. , we use an extra Bool component to keep track of the
fact whether an immediate subterm was processed successfully .
However , we cut of mapping over subterms once a first subterm
was transformed successfully .
We use the same pairing trick as for gmapMp,
i.e., we use an extra Bool component to keep track of the
fact whether an immediate subterm was processed successfully.
However, we cut of mapping over subterms once a first subterm
was transformed successfully.
-}
gmapMo f x = unMp (gfoldl k z x) >>= \(x',b) ->
if b then return x' else mzero
where
z :: g -> Mp m g
z g = Mp (return (g,False))
k :: Data d => Mp m (d -> b) -> d -> Mp m b
k (Mp c) y
= Mp ( c >>= \(h,b) -> if b
then return (h y, b)
else (f y >>= \y' -> return (h y',True))
`mplus` return (h y, b)
)
newtype ID x = ID { unID :: x }
newtype CONST c a = CONST { unCONST :: c }
data Qi q a = Qi Int (Maybe q)
newtype Qr r a = Qr { unQr :: r -> r }
| The type constructor used in definition of gmapMp
newtype Mp m x = Mp { unMp :: m (x, Bool) }
fromConstr :: Data a => Constr -> a
fromConstr = fromConstrB (errorWithoutStackTrace "Data.Data.fromConstr")
fromConstrB :: Data a
=> (forall d. Data d => d)
-> Constr
-> a
fromConstrB f = unID . gunfold k z
where
k :: forall b r. Data b => ID (b -> r) -> ID r
k c = ID (unID c f)
z :: forall r. r -> ID r
z = ID
| Monadic variation on ' fromConstrB '
fromConstrM :: forall m a. (Monad m, Data a)
=> (forall d. Data d => m d)
-> Constr
-> m a
fromConstrM f = gunfold k z
where
k :: forall b r. Data b => m (b -> r) -> m r
k c = do { c' <- c; b <- f; return (c' b) }
z :: forall r. r -> m r
z = return
Datatype and constructor representations
data DataType = DataType
{ tycon :: String
, datarep :: DataRep
}
deriving Show
data Constr = Constr
{ conrep :: ConstrRep
, constring :: String
for AlgRep only
for AlgRep only
, datatype :: DataType
}
instance Show Constr where
show = constring
instance Eq Constr where
c == c' = constrRep c == constrRep c'
data DataRep = AlgRep [Constr]
| IntRep
| FloatRep
| CharRep
| NoRep
deriving (Eq,Show)
data ConstrRep = AlgConstr ConIndex
| IntConstr Integer
| FloatConstr Rational
| CharConstr Char
deriving (Eq,Show)
counting from 1 in the order they are given in the program text .
type ConIndex = Int
data Fixity = Prefix
deriving (Eq,Show)
dataTypeName :: DataType -> String
dataTypeName = tycon
dataTypeRep :: DataType -> DataRep
dataTypeRep = datarep
constrType :: Constr -> DataType
constrType = datatype
constrRep :: Constr -> ConstrRep
constrRep = conrep
repConstr :: DataType -> ConstrRep -> Constr
repConstr dt cr =
case (dataTypeRep dt, cr) of
(AlgRep cs, AlgConstr i) -> cs !! (i-1)
(IntRep, IntConstr i) -> mkIntegralConstr dt i
(FloatRep, FloatConstr f) -> mkRealConstr dt f
(CharRep, CharConstr c) -> mkCharConstr dt c
_ -> errorWithoutStackTrace "Data.Data.repConstr: The given ConstrRep does not fit to the given DataType."
mkDataType :: String -> [Constr] -> DataType
mkDataType str cs = DataType
{ tycon = str
, datarep = AlgRep cs
}
mkConstr :: DataType -> String -> [String] -> Fixity -> Constr
mkConstr dt str fields fix =
Constr
{ conrep = AlgConstr idx
, constring = str
, confields = fields
, confixity = fix
, datatype = dt
}
where
idx = head [ i | (c,i) <- dataTypeConstrs dt `zip` [1..],
showConstr c == str ]
dataTypeConstrs :: DataType -> [Constr]
dataTypeConstrs dt = case datarep dt of
(AlgRep cons) -> cons
_ -> errorWithoutStackTrace $ "Data.Data.dataTypeConstrs is not supported for "
++ dataTypeName dt ++
", as it is not an algebraic data type."
constrFields :: Constr -> [String]
constrFields = confields
constrFixity :: Constr -> Fixity
constrFixity = confixity
showConstr :: Constr -> String
showConstr = constring
readConstr :: DataType -> String -> Maybe Constr
readConstr dt str =
case dataTypeRep dt of
AlgRep cons -> idx cons
IntRep -> mkReadCon (\i -> (mkPrimCon dt str (IntConstr i)))
FloatRep -> mkReadCon ffloat
CharRep -> mkReadCon (\c -> (mkPrimCon dt str (CharConstr c)))
NoRep -> Nothing
where
mkReadCon :: Read t => (t -> Constr) -> Maybe Constr
mkReadCon f = case (reads str) of
[(t,"")] -> Just (f t)
_ -> Nothing
Traverse list of algebraic datatype constructors
idx :: [Constr] -> Maybe Constr
idx cons = let fit = filter ((==) str . showConstr) cons
in if fit == []
then Nothing
else Just (head fit)
ffloat :: Double -> Constr
ffloat = mkPrimCon dt str . FloatConstr . toRational
isAlgType :: DataType -> Bool
isAlgType dt = case datarep dt of
(AlgRep _) -> True
_ -> False
indexConstr :: DataType -> ConIndex -> Constr
indexConstr dt idx = case datarep dt of
(AlgRep cs) -> cs !! (idx-1)
_ -> errorWithoutStackTrace $ "Data.Data.indexConstr is not supported for "
++ dataTypeName dt ++
", as it is not an algebraic data type."
constrIndex :: Constr -> ConIndex
constrIndex con = case constrRep con of
(AlgConstr idx) -> idx
_ -> errorWithoutStackTrace $ "Data.Data.constrIndex is not supported for "
++ dataTypeName (constrType con) ++
", as it is not an algebraic data type."
maxConstrIndex :: DataType -> ConIndex
maxConstrIndex dt = case dataTypeRep dt of
AlgRep cs -> length cs
_ -> errorWithoutStackTrace $ "Data.Data.maxConstrIndex is not supported for "
++ dataTypeName dt ++
", as it is not an algebraic data type."
mkIntType :: String -> DataType
mkIntType = mkPrimType IntRep
mkFloatType :: String -> DataType
mkFloatType = mkPrimType FloatRep
| Constructs the ' ' type
mkCharType :: String -> DataType
mkCharType = mkPrimType CharRep
| Helper for ' mkIntType ' , ' mkFloatType '
mkPrimType :: DataRep -> String -> DataType
mkPrimType dr str = DataType
{ tycon = str
, datarep = dr
}
mkPrimCon :: DataType -> String -> ConstrRep -> Constr
mkPrimCon dt str cr = Constr
{ datatype = dt
, conrep = cr
, constring = str
, confields = errorWithoutStackTrace "Data.Data.confields"
, confixity = errorWithoutStackTrace "Data.Data.confixity"
}
mkIntegralConstr :: (Integral a, Show a) => DataType -> a -> Constr
mkIntegralConstr dt i = case datarep dt of
IntRep -> mkPrimCon dt (show i) (IntConstr (toInteger i))
_ -> errorWithoutStackTrace $ "Data.Data.mkIntegralConstr is not supported for "
++ dataTypeName dt ++
", as it is not an Integral data type."
mkRealConstr :: (Real a, Show a) => DataType -> a -> Constr
mkRealConstr dt f = case datarep dt of
FloatRep -> mkPrimCon dt (show f) (FloatConstr (toRational f))
_ -> errorWithoutStackTrace $ "Data.Data.mkRealConstr is not supported for "
++ dataTypeName dt ++
", as it is not an Real data type."
| Makes a constructor for ' ' .
mkCharConstr :: DataType -> Char -> Constr
mkCharConstr dt c = case datarep dt of
CharRep -> mkPrimCon dt (show c) (CharConstr c)
_ -> errorWithoutStackTrace $ "Data.Data.mkCharConstr is not supported for "
++ dataTypeName dt ++
", as it is not an Char data type."
mkNoRepType :: String -> DataType
mkNoRepType str = DataType
{ tycon = str
, datarep = NoRep
}
isNorepType :: DataType -> Bool
isNorepType dt = case datarep dt of
NoRep -> True
_ -> False
tyconUQname :: String -> String
tyconUQname x = let x' = dropWhile (not . (==) '.') x
in if x' == [] then x else tyconUQname (tail x')
tyconModule :: String -> String
tyconModule x = let (a,b) = break ((==) '.') x
in if b == ""
then b
else a ++ tyconModule' (tail b)
where
tyconModule' y = let y' = tyconModule y
in if y' == "" then "" else ('.':y')
falseConstr :: Constr
falseConstr = mkConstr boolDataType "False" [] Prefix
trueConstr :: Constr
trueConstr = mkConstr boolDataType "True" [] Prefix
boolDataType :: DataType
boolDataType = mkDataType "Prelude.Bool" [falseConstr,trueConstr]
instance Data Bool where
toConstr False = falseConstr
toConstr True = trueConstr
gunfold _ z c = case constrIndex c of
1 -> z False
2 -> z True
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor "
++ show c
++ " is not of type Bool."
dataTypeOf _ = boolDataType
charType :: DataType
charType = mkCharType "Prelude.Char"
instance Data Char where
toConstr x = mkCharConstr charType x
gunfold _ z c = case constrRep c of
(CharConstr x) -> z x
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Char."
dataTypeOf _ = charType
floatType :: DataType
floatType = mkFloatType "Prelude.Float"
instance Data Float where
toConstr = mkRealConstr floatType
gunfold _ z c = case constrRep c of
(FloatConstr x) -> z (realToFrac x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Float."
dataTypeOf _ = floatType
doubleType :: DataType
doubleType = mkFloatType "Prelude.Double"
instance Data Double where
toConstr = mkRealConstr doubleType
gunfold _ z c = case constrRep c of
(FloatConstr x) -> z (realToFrac x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Double."
dataTypeOf _ = doubleType
intType :: DataType
intType = mkIntType "Prelude.Int"
instance Data Int where
toConstr x = mkIntegralConstr intType x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int."
dataTypeOf _ = intType
integerType :: DataType
integerType = mkIntType "Prelude.Integer"
instance Data Integer where
toConstr = mkIntegralConstr integerType
gunfold _ z c = case constrRep c of
(IntConstr x) -> z x
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Integer."
dataTypeOf _ = integerType
int8Type :: DataType
int8Type = mkIntType "Data.Int.Int8"
instance Data Int8 where
toConstr x = mkIntegralConstr int8Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int8."
dataTypeOf _ = int8Type
int16Type :: DataType
int16Type = mkIntType "Data.Int.Int16"
instance Data Int16 where
toConstr x = mkIntegralConstr int16Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int16."
dataTypeOf _ = int16Type
int32Type :: DataType
int32Type = mkIntType "Data.Int.Int32"
instance Data Int32 where
toConstr x = mkIntegralConstr int32Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int32."
dataTypeOf _ = int32Type
int64Type :: DataType
int64Type = mkIntType "Data.Int.Int64"
instance Data Int64 where
toConstr x = mkIntegralConstr int64Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Int64."
dataTypeOf _ = int64Type
wordType :: DataType
wordType = mkIntType "Data.Word.Word"
instance Data Word where
toConstr x = mkIntegralConstr wordType x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word"
dataTypeOf _ = wordType
word8Type :: DataType
word8Type = mkIntType "Data.Word.Word8"
instance Data Word8 where
toConstr x = mkIntegralConstr word8Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word8."
dataTypeOf _ = word8Type
word16Type :: DataType
word16Type = mkIntType "Data.Word.Word16"
instance Data Word16 where
toConstr x = mkIntegralConstr word16Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word16."
dataTypeOf _ = word16Type
word32Type :: DataType
word32Type = mkIntType "Data.Word.Word32"
instance Data Word32 where
toConstr x = mkIntegralConstr word32Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word32."
dataTypeOf _ = word32Type
word64Type :: DataType
word64Type = mkIntType "Data.Word.Word64"
instance Data Word64 where
toConstr x = mkIntegralConstr word64Type x
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Word64."
dataTypeOf _ = word64Type
ratioConstr :: Constr
ratioConstr = mkConstr ratioDataType ":%" [] Infix
ratioDataType :: DataType
ratioDataType = mkDataType "GHC.Real.Ratio" [ratioConstr]
instance (Data a, Integral a) => Data (Ratio a) where
gfoldl k z (a :% b) = z (%) `k` a `k` b
toConstr _ = ratioConstr
gunfold k z c | constrIndex c == 1 = k (k (z (%)))
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(Ratio)"
dataTypeOf _ = ratioDataType
nilConstr :: Constr
nilConstr = mkConstr listDataType "[]" [] Prefix
consConstr :: Constr
consConstr = mkConstr listDataType "(:)" [] Infix
listDataType :: DataType
listDataType = mkDataType "Prelude.[]" [nilConstr,consConstr]
instance Data a => Data [a] where
gfoldl _ z [] = z []
gfoldl f z (x:xs) = z (:) `f` x `f` xs
toConstr [] = nilConstr
toConstr (_:_) = consConstr
gunfold k z c = case constrIndex c of
1 -> z []
2 -> k (k (z (:)))
_ -> errorWithoutStackTrace "Data.Data.gunfold(List)"
dataTypeOf _ = listDataType
dataCast1 f = gcast1 f
gmapT _ [] = []
gmapT f (x:xs) = (f x:f xs)
gmapQ _ [] = []
gmapQ f (x:xs) = [f x,f xs]
gmapM _ [] = return []
gmapM f (x:xs) = f x >>= \x' -> f xs >>= \xs' -> return (x':xs')
nothingConstr :: Constr
nothingConstr = mkConstr maybeDataType "Nothing" [] Prefix
justConstr :: Constr
justConstr = mkConstr maybeDataType "Just" [] Prefix
maybeDataType :: DataType
maybeDataType = mkDataType "Prelude.Maybe" [nothingConstr,justConstr]
instance Data a => Data (Maybe a) where
gfoldl _ z Nothing = z Nothing
gfoldl f z (Just x) = z Just `f` x
toConstr Nothing = nothingConstr
toConstr (Just _) = justConstr
gunfold k z c = case constrIndex c of
1 -> z Nothing
2 -> k (z Just)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Maybe)"
dataTypeOf _ = maybeDataType
dataCast1 f = gcast1 f
ltConstr :: Constr
ltConstr = mkConstr orderingDataType "LT" [] Prefix
eqConstr :: Constr
eqConstr = mkConstr orderingDataType "EQ" [] Prefix
gtConstr :: Constr
gtConstr = mkConstr orderingDataType "GT" [] Prefix
orderingDataType :: DataType
orderingDataType = mkDataType "Prelude.Ordering" [ltConstr,eqConstr,gtConstr]
instance Data Ordering where
gfoldl _ z LT = z LT
gfoldl _ z EQ = z EQ
gfoldl _ z GT = z GT
toConstr LT = ltConstr
toConstr EQ = eqConstr
toConstr GT = gtConstr
gunfold _ z c = case constrIndex c of
1 -> z LT
2 -> z EQ
3 -> z GT
_ -> errorWithoutStackTrace "Data.Data.gunfold(Ordering)"
dataTypeOf _ = orderingDataType
leftConstr :: Constr
leftConstr = mkConstr eitherDataType "Left" [] Prefix
rightConstr :: Constr
rightConstr = mkConstr eitherDataType "Right" [] Prefix
eitherDataType :: DataType
eitherDataType = mkDataType "Prelude.Either" [leftConstr,rightConstr]
instance (Data a, Data b) => Data (Either a b) where
gfoldl f z (Left a) = z Left `f` a
gfoldl f z (Right a) = z Right `f` a
toConstr (Left _) = leftConstr
toConstr (Right _) = rightConstr
gunfold k z c = case constrIndex c of
1 -> k (z Left)
2 -> k (z Right)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Either)"
dataTypeOf _ = eitherDataType
dataCast2 f = gcast2 f
tuple0Constr :: Constr
tuple0Constr = mkConstr tuple0DataType "()" [] Prefix
tuple0DataType :: DataType
tuple0DataType = mkDataType "Prelude.()" [tuple0Constr]
instance Data () where
toConstr () = tuple0Constr
gunfold _ z c | constrIndex c == 1 = z ()
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(unit)"
dataTypeOf _ = tuple0DataType
tuple2Constr :: Constr
tuple2Constr = mkConstr tuple2DataType "(,)" [] Infix
tuple2DataType :: DataType
tuple2DataType = mkDataType "Prelude.(,)" [tuple2Constr]
instance (Data a, Data b) => Data (a,b) where
gfoldl f z (a,b) = z (,) `f` a `f` b
toConstr (_,_) = tuple2Constr
gunfold k z c | constrIndex c == 1 = k (k (z (,)))
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(tup2)"
dataTypeOf _ = tuple2DataType
dataCast2 f = gcast2 f
tuple3Constr :: Constr
tuple3Constr = mkConstr tuple3DataType "(,,)" [] Infix
tuple3DataType :: DataType
tuple3DataType = mkDataType "Prelude.(,,)" [tuple3Constr]
instance (Data a, Data b, Data c) => Data (a,b,c) where
gfoldl f z (a,b,c) = z (,,) `f` a `f` b `f` c
toConstr (_,_,_) = tuple3Constr
gunfold k z c | constrIndex c == 1 = k (k (k (z (,,))))
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(tup3)"
dataTypeOf _ = tuple3DataType
tuple4Constr :: Constr
tuple4Constr = mkConstr tuple4DataType "(,,,)" [] Infix
tuple4DataType :: DataType
tuple4DataType = mkDataType "Prelude.(,,,)" [tuple4Constr]
instance (Data a, Data b, Data c, Data d)
=> Data (a,b,c,d) where
gfoldl f z (a,b,c,d) = z (,,,) `f` a `f` b `f` c `f` d
toConstr (_,_,_,_) = tuple4Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (z (,,,)))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup4)"
dataTypeOf _ = tuple4DataType
tuple5Constr :: Constr
tuple5Constr = mkConstr tuple5DataType "(,,,,)" [] Infix
tuple5DataType :: DataType
tuple5DataType = mkDataType "Prelude.(,,,,)" [tuple5Constr]
instance (Data a, Data b, Data c, Data d, Data e)
=> Data (a,b,c,d,e) where
gfoldl f z (a,b,c,d,e) = z (,,,,) `f` a `f` b `f` c `f` d `f` e
toConstr (_,_,_,_,_) = tuple5Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (k (z (,,,,))))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup5)"
dataTypeOf _ = tuple5DataType
tuple6Constr :: Constr
tuple6Constr = mkConstr tuple6DataType "(,,,,,)" [] Infix
tuple6DataType :: DataType
tuple6DataType = mkDataType "Prelude.(,,,,,)" [tuple6Constr]
instance (Data a, Data b, Data c, Data d, Data e, Data f)
=> Data (a,b,c,d,e,f) where
gfoldl f z (a,b,c,d,e,f') = z (,,,,,) `f` a `f` b `f` c `f` d `f` e `f` f'
toConstr (_,_,_,_,_,_) = tuple6Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (k (k (z (,,,,,)))))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup6)"
dataTypeOf _ = tuple6DataType
tuple7Constr :: Constr
tuple7Constr = mkConstr tuple7DataType "(,,,,,,)" [] Infix
tuple7DataType :: DataType
tuple7DataType = mkDataType "Prelude.(,,,,,,)" [tuple7Constr]
instance (Data a, Data b, Data c, Data d, Data e, Data f, Data g)
=> Data (a,b,c,d,e,f,g) where
gfoldl f z (a,b,c,d,e,f',g) =
z (,,,,,,) `f` a `f` b `f` c `f` d `f` e `f` f' `f` g
toConstr (_,_,_,_,_,_,_) = tuple7Constr
gunfold k z c = case constrIndex c of
1 -> k (k (k (k (k (k (k (z (,,,,,,))))))))
_ -> errorWithoutStackTrace "Data.Data.gunfold(tup7)"
dataTypeOf _ = tuple7DataType
instance Data a => Data (Ptr a) where
toConstr _ = errorWithoutStackTrace "Data.Data.toConstr(Ptr)"
gunfold _ _ = errorWithoutStackTrace "Data.Data.gunfold(Ptr)"
dataTypeOf _ = mkNoRepType "GHC.Ptr.Ptr"
dataCast1 x = gcast1 x
instance Data a => Data (ForeignPtr a) where
toConstr _ = errorWithoutStackTrace "Data.Data.toConstr(ForeignPtr)"
gunfold _ _ = errorWithoutStackTrace "Data.Data.gunfold(ForeignPtr)"
dataTypeOf _ = mkNoRepType "GHC.ForeignPtr.ForeignPtr"
dataCast1 x = gcast1 x
instance (Data a, Data b, Ix a) => Data (Array a b)
where
gfoldl f z a = z (listArray (bounds a)) `f` (elems a)
toConstr _ = errorWithoutStackTrace "Data.Data.toConstr(Array)"
gunfold _ _ = errorWithoutStackTrace "Data.Data.gunfold(Array)"
dataTypeOf _ = mkNoRepType "Data.Array.Array"
dataCast2 x = gcast2 x
proxyConstr :: Constr
proxyConstr = mkConstr proxyDataType "Proxy" [] Prefix
proxyDataType :: DataType
proxyDataType = mkDataType "Data.Proxy.Proxy" [proxyConstr]
instance (Data t) => Data (Proxy t) where
gfoldl _ z Proxy = z Proxy
toConstr Proxy = proxyConstr
gunfold _ z c = case constrIndex c of
1 -> z Proxy
_ -> errorWithoutStackTrace "Data.Data.gunfold(Proxy)"
dataTypeOf _ = proxyDataType
dataCast1 f = gcast1 f
reflConstr :: Constr
reflConstr = mkConstr equalityDataType "Refl" [] Prefix
equalityDataType :: DataType
equalityDataType = mkDataType "Data.Type.Equality.(:~:)" [reflConstr]
instance (a ~ b, Data a) => Data (a :~: b) where
gfoldl _ z Refl = z Refl
toConstr Refl = reflConstr
gunfold _ z c = case constrIndex c of
1 -> z Refl
_ -> errorWithoutStackTrace "Data.Data.gunfold(:~:)"
dataTypeOf _ = equalityDataType
dataCast2 f = gcast2 f
coercionConstr :: Constr
coercionConstr = mkConstr equalityDataType "Coercion" [] Prefix
coercionDataType :: DataType
coercionDataType = mkDataType "Data.Type.Coercion.Coercion" [coercionConstr]
instance (Coercible a b, Data a, Data b) => Data (Coercion a b) where
gfoldl _ z Coercion = z Coercion
toConstr Coercion = coercionConstr
gunfold _ z c = case constrIndex c of
1 -> z Coercion
_ -> errorWithoutStackTrace "Data.Data.gunfold(Coercion)"
dataTypeOf _ = coercionDataType
dataCast2 f = gcast2 f
versionConstr :: Constr
versionConstr = mkConstr versionDataType "Version" ["versionBranch","versionTags"] Prefix
versionDataType :: DataType
versionDataType = mkDataType "Data.Version.Version" [versionConstr]
instance Data Version where
gfoldl k z (Version bs ts) = z Version `k` bs `k` ts
toConstr (Version _ _) = versionConstr
gunfold k z c = case constrIndex c of
1 -> k (k (z Version))
_ -> errorWithoutStackTrace "Data.Data.gunfold(Version)"
dataTypeOf _ = versionDataType
dualConstr :: Constr
dualConstr = mkConstr dualDataType "Dual" ["getDual"] Prefix
dualDataType :: DataType
dualDataType = mkDataType "Data.Monoid.Dual" [dualConstr]
instance Data a => Data (Dual a) where
gfoldl f z (Dual x) = z Dual `f` x
gunfold k z _ = k (z Dual)
toConstr (Dual _) = dualConstr
dataTypeOf _ = dualDataType
dataCast1 f = gcast1 f
allConstr :: Constr
allConstr = mkConstr allDataType "All" ["getAll"] Prefix
allDataType :: DataType
allDataType = mkDataType "All" [allConstr]
instance Data All where
gfoldl f z (All x) = (z All `f` x)
gunfold k z _ = k (z All)
toConstr (All _) = allConstr
dataTypeOf _ = allDataType
anyConstr :: Constr
anyConstr = mkConstr anyDataType "Any" ["getAny"] Prefix
anyDataType :: DataType
anyDataType = mkDataType "Any" [anyConstr]
instance Data Any where
gfoldl f z (Any x) = (z Any `f` x)
gunfold k z _ = k (z Any)
toConstr (Any _) = anyConstr
dataTypeOf _ = anyDataType
sumConstr :: Constr
sumConstr = mkConstr sumDataType "Sum" ["getSum"] Prefix
sumDataType :: DataType
sumDataType = mkDataType "Data.Monoid.Sum" [sumConstr]
instance Data a => Data (Sum a) where
gfoldl f z (Sum x) = z Sum `f` x
gunfold k z _ = k (z Sum)
toConstr (Sum _) = sumConstr
dataTypeOf _ = sumDataType
dataCast1 f = gcast1 f
productConstr :: Constr
productConstr = mkConstr productDataType "Product" ["getProduct"] Prefix
productDataType :: DataType
productDataType = mkDataType "Data.Monoid.Product" [productConstr]
instance Data a => Data (Product a) where
gfoldl f z (Product x) = z Product `f` x
gunfold k z _ = k (z Product)
toConstr (Product _) = productConstr
dataTypeOf _ = productDataType
dataCast1 f = gcast1 f
firstConstr :: Constr
firstConstr = mkConstr firstDataType "First" ["getFirst"] Prefix
firstDataType :: DataType
firstDataType = mkDataType "Data.Monoid.First" [firstConstr]
instance Data a => Data (First a) where
gfoldl f z (First x) = (z First `f` x)
gunfold k z _ = k (z First)
toConstr (First _) = firstConstr
dataTypeOf _ = firstDataType
dataCast1 f = gcast1 f
lastConstr :: Constr
lastConstr = mkConstr lastDataType "Last" ["getLast"] Prefix
lastDataType :: DataType
lastDataType = mkDataType "Data.Monoid.Last" [lastConstr]
instance Data a => Data (Last a) where
gfoldl f z (Last x) = (z Last `f` x)
gunfold k z _ = k (z Last)
toConstr (Last _) = lastConstr
dataTypeOf _ = lastDataType
dataCast1 f = gcast1 f
altConstr :: Constr
altConstr = mkConstr altDataType "Alt" ["getAlt"] Prefix
altDataType :: DataType
altDataType = mkDataType "Alt" [altConstr]
instance (Data (f a), Data a, Typeable f) => Data (Alt f a) where
gfoldl f z (Alt x) = (z Alt `f` x)
gunfold k z _ = k (z Alt)
toConstr (Alt _) = altConstr
dataTypeOf _ = altDataType
instances for
u1Constr :: Constr
u1Constr = mkConstr u1DataType "U1" [] Prefix
u1DataType :: DataType
u1DataType = mkDataType "GHC.Generics.U1" [u1Constr]
instance Data p => Data (U1 p) where
gfoldl _ z U1 = z U1
toConstr U1 = u1Constr
gunfold _ z c = case constrIndex c of
1 -> z U1
_ -> errorWithoutStackTrace "Data.Data.gunfold(U1)"
dataTypeOf _ = u1DataType
dataCast1 f = gcast1 f
par1Constr :: Constr
par1Constr = mkConstr par1DataType "Par1" [] Prefix
par1DataType :: DataType
par1DataType = mkDataType "GHC.Generics.Par1" [par1Constr]
instance Data p => Data (Par1 p) where
gfoldl k z (Par1 p) = z Par1 `k` p
toConstr (Par1 _) = par1Constr
gunfold k z c = case constrIndex c of
1 -> k (z Par1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Par1)"
dataTypeOf _ = par1DataType
dataCast1 f = gcast1 f
rec1Constr :: Constr
rec1Constr = mkConstr rec1DataType "Rec1" [] Prefix
rec1DataType :: DataType
rec1DataType = mkDataType "GHC.Generics.Rec1" [rec1Constr]
instance (Data (f p), Typeable f, Data p) => Data (Rec1 f p) where
gfoldl k z (Rec1 p) = z Rec1 `k` p
toConstr (Rec1 _) = rec1Constr
gunfold k z c = case constrIndex c of
1 -> k (z Rec1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(Rec1)"
dataTypeOf _ = rec1DataType
dataCast1 f = gcast1 f
k1Constr :: Constr
k1Constr = mkConstr k1DataType "K1" [] Prefix
k1DataType :: DataType
k1DataType = mkDataType "GHC.Generics.K1" [k1Constr]
instance (Typeable i, Data p, Data c) => Data (K1 i c p) where
gfoldl k z (K1 p) = z K1 `k` p
toConstr (K1 _) = k1Constr
gunfold k z c = case constrIndex c of
1 -> k (z K1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(K1)"
dataTypeOf _ = k1DataType
dataCast1 f = gcast1 f
m1Constr :: Constr
m1Constr = mkConstr m1DataType "M1" [] Prefix
m1DataType :: DataType
m1DataType = mkDataType "GHC.Generics.M1" [m1Constr]
instance (Data p, Data (f p), Typeable c, Typeable i, Typeable f)
=> Data (M1 i c f p) where
gfoldl k z (M1 p) = z M1 `k` p
toConstr (M1 _) = m1Constr
gunfold k z c = case constrIndex c of
1 -> k (z M1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(M1)"
dataTypeOf _ = m1DataType
dataCast1 f = gcast1 f
sum1DataType :: DataType
sum1DataType = mkDataType "GHC.Generics.:+:" [l1Constr, r1Constr]
l1Constr :: Constr
l1Constr = mkConstr sum1DataType "L1" [] Prefix
r1Constr :: Constr
r1Constr = mkConstr sum1DataType "R1" [] Prefix
instance (Typeable f, Typeable g, Data p, Data (f p), Data (g p))
=> Data ((f :+: g) p) where
gfoldl k z (L1 a) = z L1 `k` a
gfoldl k z (R1 a) = z R1 `k` a
toConstr L1{} = l1Constr
toConstr R1{} = r1Constr
gunfold k z c = case constrIndex c of
1 -> k (z L1)
2 -> k (z R1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(:+:)"
dataTypeOf _ = sum1DataType
dataCast1 f = gcast1 f
comp1Constr :: Constr
comp1Constr = mkConstr comp1DataType "Comp1" [] Prefix
comp1DataType :: DataType
comp1DataType = mkDataType "GHC.Generics.:.:" [comp1Constr]
instance (Typeable f, Typeable g, Data p, Data (f (g p)))
=> Data ((f :.: g) p) where
gfoldl k z (Comp1 c) = z Comp1 `k` c
toConstr (Comp1 _) = m1Constr
gunfold k z c = case constrIndex c of
1 -> k (z Comp1)
_ -> errorWithoutStackTrace "Data.Data.gunfold(:.:)"
dataTypeOf _ = comp1DataType
dataCast1 f = gcast1 f
v1DataType :: DataType
v1DataType = mkDataType "GHC.Generics.V1" []
instance Data p => Data (V1 p) where
gfoldl _ _ !_ = undefined
toConstr !_ = undefined
gunfold _ _ _ = errorWithoutStackTrace "Data.Data.gunfold(V1)"
dataTypeOf _ = v1DataType
dataCast1 f = gcast1 f
prod1DataType :: DataType
prod1DataType = mkDataType "GHC.Generics.:*:" [prod1Constr]
prod1Constr :: Constr
prod1Constr = mkConstr prod1DataType "Prod1" [] Infix
instance (Typeable f, Typeable g, Data p, Data (f p), Data (g p))
=> Data ((f :*: g) p) where
gfoldl k z (l :*: r) = z (:*:) `k` l `k` r
toConstr _ = prod1Constr
gunfold k z c = case constrIndex c of
1 -> k (k (z (:*:)))
_ -> errorWithoutStackTrace "Data.Data.gunfold(:*:)"
dataCast1 f = gcast1 f
dataTypeOf _ = prod1DataType
prefixConstr :: Constr
prefixConstr = mkConstr fixityDataType "Prefix" [] Prefix
infixConstr :: Constr
infixConstr = mkConstr fixityDataType "Infix" [] Prefix
fixityDataType :: DataType
fixityDataType = mkDataType "GHC.Generics.Fixity" [prefixConstr,infixConstr]
instance Data Generics.Fixity where
gfoldl _ z Generics.Prefix = z Generics.Prefix
gfoldl f z (Generics.Infix a i) = z Generics.Infix `f` a `f` i
toConstr Generics.Prefix = prefixConstr
toConstr Generics.Infix{} = infixConstr
gunfold k z c = case constrIndex c of
1 -> z Generics.Prefix
2 -> k (k (z Generics.Infix))
_ -> errorWithoutStackTrace "Data.Data.gunfold(Fixity)"
dataTypeOf _ = fixityDataType
leftAssociativeConstr :: Constr
leftAssociativeConstr
= mkConstr associativityDataType "LeftAssociative" [] Prefix
rightAssociativeConstr :: Constr
rightAssociativeConstr
= mkConstr associativityDataType "RightAssociative" [] Prefix
notAssociativeConstr :: Constr
notAssociativeConstr
= mkConstr associativityDataType "NotAssociative" [] Prefix
associativityDataType :: DataType
associativityDataType = mkDataType "GHC.Generics.Associativity"
[leftAssociativeConstr,rightAssociativeConstr,notAssociativeConstr]
instance Data Associativity where
gfoldl _ z LeftAssociative = z LeftAssociative
gfoldl _ z RightAssociative = z RightAssociative
gfoldl _ z NotAssociative = z NotAssociative
toConstr LeftAssociative = leftAssociativeConstr
toConstr RightAssociative = rightAssociativeConstr
toConstr NotAssociative = notAssociativeConstr
gunfold _ z c = case constrIndex c of
1 -> z LeftAssociative
2 -> z RightAssociative
3 -> z NotAssociative
_ -> errorWithoutStackTrace
"Data.Data.gunfold(Associativity)"
dataTypeOf _ = associativityDataType
noSourceUnpackednessConstr :: Constr
noSourceUnpackednessConstr
= mkConstr sourceUnpackednessDataType "NoSourceUnpackedness" [] Prefix
sourceNoUnpackConstr :: Constr
sourceNoUnpackConstr
= mkConstr sourceUnpackednessDataType "SourceNoUnpack" [] Prefix
sourceUnpackConstr :: Constr
sourceUnpackConstr
= mkConstr sourceUnpackednessDataType "SourceUnpack" [] Prefix
sourceUnpackednessDataType :: DataType
sourceUnpackednessDataType = mkDataType "GHC.Generics.SourceUnpackedness"
[noSourceUnpackednessConstr,sourceNoUnpackConstr,sourceUnpackConstr]
instance Data SourceUnpackedness where
gfoldl _ z NoSourceUnpackedness = z NoSourceUnpackedness
gfoldl _ z SourceNoUnpack = z SourceNoUnpack
gfoldl _ z SourceUnpack = z SourceUnpack
toConstr NoSourceUnpackedness = noSourceUnpackednessConstr
toConstr SourceNoUnpack = sourceNoUnpackConstr
toConstr SourceUnpack = sourceUnpackConstr
gunfold _ z c = case constrIndex c of
1 -> z NoSourceUnpackedness
2 -> z SourceNoUnpack
3 -> z SourceUnpack
_ -> errorWithoutStackTrace
"Data.Data.gunfold(SourceUnpackedness)"
dataTypeOf _ = sourceUnpackednessDataType
noSourceStrictnessConstr :: Constr
noSourceStrictnessConstr
= mkConstr sourceStrictnessDataType "NoSourceStrictness" [] Prefix
sourceLazyConstr :: Constr
sourceLazyConstr
= mkConstr sourceStrictnessDataType "SourceLazy" [] Prefix
sourceStrictConstr :: Constr
sourceStrictConstr
= mkConstr sourceStrictnessDataType "SourceStrict" [] Prefix
sourceStrictnessDataType :: DataType
sourceStrictnessDataType = mkDataType "GHC.Generics.SourceStrictness"
[noSourceStrictnessConstr,sourceLazyConstr,sourceStrictConstr]
instance Data SourceStrictness where
gfoldl _ z NoSourceStrictness = z NoSourceStrictness
gfoldl _ z SourceLazy = z SourceLazy
gfoldl _ z SourceStrict = z SourceStrict
toConstr NoSourceStrictness = noSourceStrictnessConstr
toConstr SourceLazy = sourceLazyConstr
toConstr SourceStrict = sourceStrictConstr
gunfold _ z c = case constrIndex c of
1 -> z NoSourceStrictness
2 -> z SourceLazy
3 -> z SourceStrict
_ -> errorWithoutStackTrace
"Data.Data.gunfold(SourceStrictness)"
dataTypeOf _ = sourceStrictnessDataType
decidedLazyConstr :: Constr
decidedLazyConstr
= mkConstr decidedStrictnessDataType "DecidedLazy" [] Prefix
decidedStrictConstr :: Constr
decidedStrictConstr
= mkConstr decidedStrictnessDataType "DecidedStrict" [] Prefix
decidedUnpackConstr :: Constr
decidedUnpackConstr
= mkConstr decidedStrictnessDataType "DecidedUnpack" [] Prefix
decidedStrictnessDataType :: DataType
decidedStrictnessDataType = mkDataType "GHC.Generics.DecidedStrictness"
[decidedLazyConstr,decidedStrictConstr,decidedUnpackConstr]
instance Data DecidedStrictness where
gfoldl _ z DecidedLazy = z DecidedLazy
gfoldl _ z DecidedStrict = z DecidedStrict
gfoldl _ z DecidedUnpack = z DecidedUnpack
toConstr DecidedLazy = decidedLazyConstr
toConstr DecidedStrict = decidedStrictConstr
toConstr DecidedUnpack = decidedUnpackConstr
gunfold _ z c = case constrIndex c of
1 -> z DecidedLazy
2 -> z DecidedStrict
3 -> z DecidedUnpack
_ -> errorWithoutStackTrace
"Data.Data.gunfold(DecidedStrictness)"
dataTypeOf _ = decidedStrictnessDataType
|
5c98db63d0b8a923196a5618b31eebb75275652e7820821ae10307d187f13247 | erlscripten/erlscripten | erlps_parse_transform.erl | -module(erlps_parse_transform).
-author("gorbak25").
-export([parse_transform/2]).
parse_transform(Forms, Options) ->
io:format(user, "~p\n", [Options]),
code:ensure_loaded(erlps_logger),
code:ensure_loaded(erlps_utils),
code:ensure_loaded(erlps_purescript_pretty),
code:ensure_loaded(erlps_transpiler),
application:ensure_started(erlscripten),
Attributes = erlps_transpiler:filter_module_attributes(Forms),
{FileName, _} = proplists:get_value(file, Attributes),
try
case proplists:get_value(erlscripten_output, Attributes) of
undefined ->
erlps_logger:die(FileName,
"Please add `-erlscripten_output(DIRECTORY).` to indicate where "
"the autogenerated spago project will be placed\n");
Dir ->
ModuleName = proplists:get_value(module, Attributes),
[BasePath | _] =
string:split(
proplists:get_value(outdir, Options), "_build"),
OutDir = filename:join(BasePath, Dir),
erlps_utils:generate_template(OutDir),
SrcDir = filename:join(OutDir, "src"),
Create the Purescript module
PursModuleFile =
filename:join(SrcDir, erlps_transpiler:erlang_module_to_purs_file(ModuleName)),
file:delete(PursModuleFile),
{ok, Handle} = file:open(PursModuleFile, [write]),
PSAst = erlps_transpiler:transpile_erlang_module(Forms),
TxtModule = erlps_purescript_pretty:format_module(PSAst),
file:write(Handle, TxtModule)
end,
Forms
catch
Error:Reason:StackTrace ->
erlps_logger:die(FileName,
io_lib:format("Error: ~s\nReason: ~p\nStacktrace: ~p\n",
[atom_to_list(Error), Reason, StackTrace]))
end.
| null | https://raw.githubusercontent.com/erlscripten/erlscripten/6b70be39dc51c21388dd7bae1eb46a0d306ecdcc/src/erlps_parse_transform.erl | erlang | -module(erlps_parse_transform).
-author("gorbak25").
-export([parse_transform/2]).
parse_transform(Forms, Options) ->
io:format(user, "~p\n", [Options]),
code:ensure_loaded(erlps_logger),
code:ensure_loaded(erlps_utils),
code:ensure_loaded(erlps_purescript_pretty),
code:ensure_loaded(erlps_transpiler),
application:ensure_started(erlscripten),
Attributes = erlps_transpiler:filter_module_attributes(Forms),
{FileName, _} = proplists:get_value(file, Attributes),
try
case proplists:get_value(erlscripten_output, Attributes) of
undefined ->
erlps_logger:die(FileName,
"Please add `-erlscripten_output(DIRECTORY).` to indicate where "
"the autogenerated spago project will be placed\n");
Dir ->
ModuleName = proplists:get_value(module, Attributes),
[BasePath | _] =
string:split(
proplists:get_value(outdir, Options), "_build"),
OutDir = filename:join(BasePath, Dir),
erlps_utils:generate_template(OutDir),
SrcDir = filename:join(OutDir, "src"),
Create the Purescript module
PursModuleFile =
filename:join(SrcDir, erlps_transpiler:erlang_module_to_purs_file(ModuleName)),
file:delete(PursModuleFile),
{ok, Handle} = file:open(PursModuleFile, [write]),
PSAst = erlps_transpiler:transpile_erlang_module(Forms),
TxtModule = erlps_purescript_pretty:format_module(PSAst),
file:write(Handle, TxtModule)
end,
Forms
catch
Error:Reason:StackTrace ->
erlps_logger:die(FileName,
io_lib:format("Error: ~s\nReason: ~p\nStacktrace: ~p\n",
[atom_to_list(Error), Reason, StackTrace]))
end.
|
|
9e853904bb65f7e230b76a2868f71454e137b9e130411333319ad58be9a7fbf5 | jordanthayer/ocaml-search | deps.ml | * Handling of ocamldep dependency files .
@author eaburns
@since 2010 - 08 - 24
@author eaburns
@since 2010-08-24
*)
open Printf
open Scanf
* { 1 Parsing dependency files }
let isspace = function
| ' ' | '\t' | '\r' | '\n' -> true
| _ -> false
(** [eat_space_not_newline inch] eats all the whitespace that is not
a newline. *)
let eat_space_not_newline inch =
let rec do_eat inch = function
| c when (isspace c) && c <> '\n' -> do_eat inch (input_char inch)
| c -> c
in do_eat inch (input_char inch)
* [ ] gets the next token from the input file .
let next_token inch =
let rec handle_char b inch = function
| '\n' -> Buffer.contents b, true
| c when isspace c ->
Buffer.contents b, false
| c ->
Buffer.add_char b c;
handle_char b inch (input_char inch)
in
let b = Buffer.create 10 in
try handle_char b inch (eat_space_not_newline inch)
with End_of_file ->
if (Buffer.length b) = 0
then raise End_of_file
else Buffer.contents b, false
(** [read_depends see_depend src inch] reads the dependency lines
from the file and calls [see_depend] on each dependency. *)
let read_depends see_depend src inch =
let rec handle_token escaped (t, nl) =
let len = String.length t in
if t = "\\"
then handle_token true (next_token inch)
else begin
let escaped' = if len > 0 then false else escaped in
if len > 0 then see_depend src t;
if not nl || escaped' then handle_token escaped' (next_token inch)
end
in try handle_token false (next_token inch) with End_of_file -> ()
(** [read see_node see_depend inch] reads the dependency file and
calls [see_depend] on each dependency. *)
let read see_node see_depend inch =
let rec do_read see_depend inch =
let t, nl = next_token inch in
let len = String.length t in
if t.[len - 1] <> ':' then failwith "Malformed dependency file";
let src = String.sub t 0 (len - 1) in
see_node src;
if not nl then read_depends see_depend src inch;
do_read see_depend inch
in try do_read see_depend inch with End_of_file -> ()
(** [load see_node see_depend file] loads the given dependency
file. *)
let load see_node see_depend file =
let inch = open_in file in
read see_node see_depend inch;
close_in inch
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/ocm2/deps.ml | ocaml | * [eat_space_not_newline inch] eats all the whitespace that is not
a newline.
* [read_depends see_depend src inch] reads the dependency lines
from the file and calls [see_depend] on each dependency.
* [read see_node see_depend inch] reads the dependency file and
calls [see_depend] on each dependency.
* [load see_node see_depend file] loads the given dependency
file. | * Handling of ocamldep dependency files .
@author eaburns
@since 2010 - 08 - 24
@author eaburns
@since 2010-08-24
*)
open Printf
open Scanf
* { 1 Parsing dependency files }
let isspace = function
| ' ' | '\t' | '\r' | '\n' -> true
| _ -> false
let eat_space_not_newline inch =
let rec do_eat inch = function
| c when (isspace c) && c <> '\n' -> do_eat inch (input_char inch)
| c -> c
in do_eat inch (input_char inch)
* [ ] gets the next token from the input file .
let next_token inch =
let rec handle_char b inch = function
| '\n' -> Buffer.contents b, true
| c when isspace c ->
Buffer.contents b, false
| c ->
Buffer.add_char b c;
handle_char b inch (input_char inch)
in
let b = Buffer.create 10 in
try handle_char b inch (eat_space_not_newline inch)
with End_of_file ->
if (Buffer.length b) = 0
then raise End_of_file
else Buffer.contents b, false
let read_depends see_depend src inch =
let rec handle_token escaped (t, nl) =
let len = String.length t in
if t = "\\"
then handle_token true (next_token inch)
else begin
let escaped' = if len > 0 then false else escaped in
if len > 0 then see_depend src t;
if not nl || escaped' then handle_token escaped' (next_token inch)
end
in try handle_token false (next_token inch) with End_of_file -> ()
let read see_node see_depend inch =
let rec do_read see_depend inch =
let t, nl = next_token inch in
let len = String.length t in
if t.[len - 1] <> ':' then failwith "Malformed dependency file";
let src = String.sub t 0 (len - 1) in
see_node src;
if not nl then read_depends see_depend src inch;
do_read see_depend inch
in try do_read see_depend inch with End_of_file -> ()
let load see_node see_depend file =
let inch = open_in file in
read see_node see_depend inch;
close_in inch
|
16e63ab11a6fe4fb38fba2908342b61d883871fe1fc64b9be672a2ce94dbad22 | sim642/odep | dot_graph.ml | open Common
module G =
struct
module VV = V
include G
let graph_attributes _ = [`Compound true]
let vertex_attributes = function
| VV.Executable _ ->
[`Shape `Diamond]
| Library {local = true; _} ->
[]
| Library {local = false; _} ->
[`Style `Filled]
| Module {name; _} ->
[`Shape `Box; `Label name]
| LocalPackageCluster ->
[`Fixedsize true; `Width 0.; `Height 0.; `Style `Invis; `Label ""]
| OpamPackage _ ->
[`Shape `Box]
let default_vertex_attributes _ = []
let default_edge_attributes _ = []
let rec vertex_name = function
| VV.Executable {name; _} -> name
| Library {name; _} -> name
| Module {name; parent} -> vertex_name parent ^ "__" ^ name
| LocalPackageCluster -> "local_package__"
| OpamPackage name -> name
let local_package_subgraph = string_of_int (Hashtbl.hash (show_package Local))
let get_subgraph = function
| VV.Module {parent; _} ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = string_of_int (V.hash parent); sg_attributes = [`Label (vertex_name parent)]; sg_parent = Some local_package_subgraph}
| (Library {local = true; _} | Executable _) as v ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = string_of_int (V.hash v); sg_attributes = [`Label (vertex_name v)]; sg_parent = Some local_package_subgraph}
| Library {local = false; package; _} ->
begin match package with
| Some package ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = string_of_int (Hashtbl.hash package); sg_attributes = [`Label (show_package package)]; sg_parent = None}
| None -> None
end
| LocalPackageCluster ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = local_package_subgraph; sg_attributes = [`Label (show_package Local)]; sg_parent = None}
| OpamPackage _ ->
None
let vertex_name v = Printf.sprintf "\"%s\"" (vertex_name v)
let edge_attributes (u, v) =
let ltail =
match u with
| VV.Library {local = true; _} | Executable _ ->
let su = Option.get (get_subgraph u) in
[`Ltail su.sg_name]
| _ -> []
in
let lhead =
match v with
| VV.Library {local = true; _} | Executable _ ->
let sv = Option.get (get_subgraph v) in
[`Lhead sv.sg_name]
| _ -> []
in
let minlen =
match u, v with
| (VV.Library {local = true; _} | Executable _), (VV.Library {local = true; _} | Executable _) ->
[`Minlen 2]
| _ -> []
in
match get_subgraph u, get_subgraph v with
| Some su, Some sv when su = sv -> minlen
| _, _ -> ltail @ lhead @ minlen
end
| null | https://raw.githubusercontent.com/sim642/odep/48a98ef26e508510cd25f8fcaabb8f840643bd4e/src/depgraph/dot_graph.ml | ocaml | open Common
module G =
struct
module VV = V
include G
let graph_attributes _ = [`Compound true]
let vertex_attributes = function
| VV.Executable _ ->
[`Shape `Diamond]
| Library {local = true; _} ->
[]
| Library {local = false; _} ->
[`Style `Filled]
| Module {name; _} ->
[`Shape `Box; `Label name]
| LocalPackageCluster ->
[`Fixedsize true; `Width 0.; `Height 0.; `Style `Invis; `Label ""]
| OpamPackage _ ->
[`Shape `Box]
let default_vertex_attributes _ = []
let default_edge_attributes _ = []
let rec vertex_name = function
| VV.Executable {name; _} -> name
| Library {name; _} -> name
| Module {name; parent} -> vertex_name parent ^ "__" ^ name
| LocalPackageCluster -> "local_package__"
| OpamPackage name -> name
let local_package_subgraph = string_of_int (Hashtbl.hash (show_package Local))
let get_subgraph = function
| VV.Module {parent; _} ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = string_of_int (V.hash parent); sg_attributes = [`Label (vertex_name parent)]; sg_parent = Some local_package_subgraph}
| (Library {local = true; _} | Executable _) as v ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = string_of_int (V.hash v); sg_attributes = [`Label (vertex_name v)]; sg_parent = Some local_package_subgraph}
| Library {local = false; package; _} ->
begin match package with
| Some package ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = string_of_int (Hashtbl.hash package); sg_attributes = [`Label (show_package package)]; sg_parent = None}
| None -> None
end
| LocalPackageCluster ->
Some {Ocamlgraph_extra.Graphviz.DotAttributes.sg_name = local_package_subgraph; sg_attributes = [`Label (show_package Local)]; sg_parent = None}
| OpamPackage _ ->
None
let vertex_name v = Printf.sprintf "\"%s\"" (vertex_name v)
let edge_attributes (u, v) =
let ltail =
match u with
| VV.Library {local = true; _} | Executable _ ->
let su = Option.get (get_subgraph u) in
[`Ltail su.sg_name]
| _ -> []
in
let lhead =
match v with
| VV.Library {local = true; _} | Executable _ ->
let sv = Option.get (get_subgraph v) in
[`Lhead sv.sg_name]
| _ -> []
in
let minlen =
match u, v with
| (VV.Library {local = true; _} | Executable _), (VV.Library {local = true; _} | Executable _) ->
[`Minlen 2]
| _ -> []
in
match get_subgraph u, get_subgraph v with
| Some su, Some sv when su = sv -> minlen
| _, _ -> ltail @ lhead @ minlen
end
|
|
50c86cd16ff7ae800297c31125cd7bb6471de3a2653f889d499f075c211a49b0 | robert-strandh/Cluster | operand.lisp | (cl:in-package #:cluster.disassembler)
(declaim (inline %operand-size %operand-name))
(defun %operand-size (operand-descriptor)
(second operand-descriptor))
(defun %operand-name (operand-descriptor)
(first operand-descriptor))
(defgeneric read-operand (interpreter encoding operand-descriptor candidates))
(defmethod read-operand (interpreter (encoding (eql 'c:modrm))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(decode-r/m-with-32/64-addressing interpreter
(%operand-size operand-descriptor)))
(defmethod read-operand (interpreter (encoding (eql 'c:reg))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(cluster:make-gpr-operand
(%operand-size operand-descriptor)
(register-number<-rex+modrm (rex-value (state-object interpreter))
(modrm-byte interpreter))))
(defmethod read-operand (buffer (encoding (eql 'c:imm))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(c:make-immediate-operand
(if (eql 'c:imm (%operand-name operand-descriptor))
(read-unsigned-integer buffer (%operand-size operand-descriptor))
(read-signed-integer buffer (%operand-size operand-descriptor)))))
(defmethod read-operand (buffer (encoding (eql 'c:-)) operand-descriptor
candidates)
(declare (ignore encoding operand-descriptor))
;; we only know of GPR-A and also we aren't sure of how to represent
;; the operand position at this point and candidates is only being
;; passed to this GF for this situtation.
(let ((gpr-a (assoc 'c:gpr-a (c:operands (first candidates)))))
(assert (not (null gpr-a)))
(c:make-gpr-operand (cadr gpr-a) 0)))
(defmethod read-operand (buffer (encoding (eql 'c:label))
operand-descriptor candidates)
(declare (ignore encoding candidates))
;; Label does not mean RIP-relative addressing via modrm
;; it means displacement immediatley following the instruction opcodes
with no modrm / sib present .
in the intel manual this is described as ' rel ' ( 8 , 16 or 32 ) in 3.1.1.3
(let ((displacement
(read-signed-integer buffer (%operand-size operand-descriptor))))
(intern-label buffer displacement)))
(defmethod read-operand (interpreter (encoding (eql 'c:+r))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(c:make-gpr-operand
(%operand-size operand-descriptor)
(+ (ldb (byte 3 0) (last-opcode-byte interpreter))
(ash (rex.b (rex-value (state-object interpreter)))
3))))
| null | https://raw.githubusercontent.com/robert-strandh/Cluster/370410b1c685f2afd77f959a46ba49923a31a33c/Disassembler/operand.lisp | lisp | we only know of GPR-A and also we aren't sure of how to represent
the operand position at this point and candidates is only being
passed to this GF for this situtation.
Label does not mean RIP-relative addressing via modrm
it means displacement immediatley following the instruction opcodes | (cl:in-package #:cluster.disassembler)
(declaim (inline %operand-size %operand-name))
(defun %operand-size (operand-descriptor)
(second operand-descriptor))
(defun %operand-name (operand-descriptor)
(first operand-descriptor))
(defgeneric read-operand (interpreter encoding operand-descriptor candidates))
(defmethod read-operand (interpreter (encoding (eql 'c:modrm))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(decode-r/m-with-32/64-addressing interpreter
(%operand-size operand-descriptor)))
(defmethod read-operand (interpreter (encoding (eql 'c:reg))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(cluster:make-gpr-operand
(%operand-size operand-descriptor)
(register-number<-rex+modrm (rex-value (state-object interpreter))
(modrm-byte interpreter))))
(defmethod read-operand (buffer (encoding (eql 'c:imm))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(c:make-immediate-operand
(if (eql 'c:imm (%operand-name operand-descriptor))
(read-unsigned-integer buffer (%operand-size operand-descriptor))
(read-signed-integer buffer (%operand-size operand-descriptor)))))
(defmethod read-operand (buffer (encoding (eql 'c:-)) operand-descriptor
candidates)
(declare (ignore encoding operand-descriptor))
(let ((gpr-a (assoc 'c:gpr-a (c:operands (first candidates)))))
(assert (not (null gpr-a)))
(c:make-gpr-operand (cadr gpr-a) 0)))
(defmethod read-operand (buffer (encoding (eql 'c:label))
operand-descriptor candidates)
(declare (ignore encoding candidates))
with no modrm / sib present .
in the intel manual this is described as ' rel ' ( 8 , 16 or 32 ) in 3.1.1.3
(let ((displacement
(read-signed-integer buffer (%operand-size operand-descriptor))))
(intern-label buffer displacement)))
(defmethod read-operand (interpreter (encoding (eql 'c:+r))
operand-descriptor candidates)
(declare (ignore encoding candidates))
(c:make-gpr-operand
(%operand-size operand-descriptor)
(+ (ldb (byte 3 0) (last-opcode-byte interpreter))
(ash (rex.b (rex-value (state-object interpreter)))
3))))
|
c7917554ab6f83b8a0f0d6eb1b969a06b8660cc31b48a1b7d612a6e45516406c | janestreet/universe | low_level_process.ml | open Core
open Poly
module Sys = Caml.Sys
let rec temp_failure_retry f =
try
f ()
with Unix.Unix_error (EINTR, _, _) -> temp_failure_retry f
let close_non_intr fd =
temp_failure_retry (fun () -> Unix.close fd)
(* Creates a unix pipe with both sides set close on exec *)
let cloexec_pipe () =
let (fd1,fd2) as res = Unix.pipe () in
Unix.set_close_on_exec fd1;
Unix.set_close_on_exec fd2;
res
module Process_info = struct
type t = {
pid : Pid.t;
stdin : Unix.File_descr.t;
stdout : Unix.File_descr.t;
stderr : Unix.File_descr.t;
}
end
We use a slightly more powerful version of create process than the one in
core . This version is not quite as carefuly code reviewed but allows us to
have more control over the forked side of the process ( e.g. : ) .
core. This version is not quite as carefuly code reviewed but allows us to
have more control over the forked side of the process (e.g.: chdir).
*)
let internal_create_process ?working_dir ?setuid ?setgid ~env ~prog ~args () =
let close_on_err = ref [] in
try
let (in_read, in_write) = cloexec_pipe () in
close_on_err := in_read :: in_write :: !close_on_err;
let (out_read, out_write) = cloexec_pipe () in
close_on_err := out_read :: out_write :: !close_on_err;
let (err_read, err_write) = cloexec_pipe () in
close_on_err := err_read :: err_write :: !close_on_err;
let pid = Unix_extended.fork_exec
prog
args
?working_dir
?setuid
?setgid
~env
~stdin:in_read
~stdout:out_write
~stderr:err_write
in
close_non_intr in_read;
close_non_intr out_write;
close_non_intr err_write;
{
Process_info.pid = pid;
stdin = in_write;
stdout = out_read;
stderr = err_read
}
with e ->
List.iter
~f:(fun fd -> try close_non_intr fd with _ -> ())
!close_on_err;
raise e
(**
Remembers the last n-characters appended to it....
*)
module Tail_buffer = struct
(** remembers the output in a circular buffer.
looped is used to tell whether we loop around the
boundary of the buffer.
*)
type t = {
buffer : Bytes.t;
length : int;
mutable looped : bool;
mutable position : int;
}
let contents b =
if not b.looped then
Bytes.To_string.sub b.buffer ~pos:0 ~len:b.position
else
let dst = Bytes.create (b.length + 3) in
Bytes.set dst 0 '.';
Bytes.set dst 1 '.';
Bytes.set dst 2 '.';
Bytes.blit
~src:b.buffer
~dst ~dst_pos:3
~src_pos:b.position
~len:(b.length - b.position);
Bytes.blit ~src:b.buffer
~dst
~dst_pos:(b.length - b.position + 3)
~src_pos:0
~len:(b.position);
Bytes.unsafe_to_string ~no_mutation_while_string_reachable:dst
let create len = {
buffer = Bytes.create len;
length = len;
looped = false;
position = 0
}
let add b src len =
if b.length <= len then begin
Bytes.blit
~src
~dst:b.buffer
~dst_pos:0
~src_pos:(len - b.length)
~len:(b.length);
b.looped <- true;
b.position <- 0
end else
let leftover = b.length - b.position in
if (len < leftover) then begin
Bytes.blit ~src ~dst:b.buffer ~dst_pos:b.position ~src_pos:0 ~len;
b.position <- b.position + len;
end else begin
Bytes.blit ~src ~dst:b.buffer ~dst_pos:b.position ~src_pos:0
~len:leftover;
b.looped <- true;
let len = (len-leftover) in
Bytes.blit ~src ~dst:b.buffer ~dst_pos:0 ~src_pos:leftover ~len;
b.position <- len
end
end
module Status = struct
type t =
[ `Timeout of Time.Span.t
| `Exited of int
| `Signaled of Signal.t
(* WStopped is impossible*)
]
[@@deriving sexp_of]
let to_string = function
| `Exited i -> sprintf "exited with code %d" i
| `Signaled s ->
sprintf !"died after receiving %{Signal} (signal number %d)"
s (Signal.to_system_int s)
| `Timeout s -> sprintf !"Timed out (ran for %{Time.Span})" s
end
module Command_result = struct
type t= {
status: Status.t;
stdout_tail : string;
stderr_tail : string
}
end
let waitpid_nohang pid =
match Unix.wait_nohang (`Pid pid) with
| None -> None
| Some (v, res) -> assert Pid.(v = pid); Some res
(** wait for a given pid to exit;
returns true when the process exits and false if the process is still runing
after waiting for [span]
*)
let wait_for_exit ~is_child span pid =
let end_time = Time.add (Time.now ()) span in
let exited () =
if is_child then begin
match waitpid_nohang pid with
| None -> true
| Some _ -> false
end else
(* This is the equivalent of calling the C kill with 0 (test whether a process
exists) *)
match Signal.send (Signal.of_system_int 0) (`Pid pid) with
| `Ok -> true
| `No_such_process -> false
in
let rec loop () =
if Time.(>) (Time.now ()) end_time then
false
(*We need to explicitely waitpid the child otherwise we are sending
signals to a zombie*)
else if not (exited ()) then true
else begin
Time.pause (sec 0.1);
loop ()
end
in
loop ()
let kill
?(is_child=false)
?(wait_for=sec 2.0)
?(signal = Signal.term)
pid
=
Signal.send_exn signal (`Pid pid);
if not (wait_for_exit ~is_child wait_for pid) then begin
begin
match
Signal.send Signal.kill (`Pid pid)
with
| `No_such_process ->
if is_child then
failwith "Process.kill got `No_such_process even though the process was a \
child we never waited for"
| `Ok -> ()
end;
if not (wait_for_exit ~is_child wait_for pid) then begin
failwithf "Process.kill failed to kill %i%s"
(Pid.to_int pid)
(if is_child then "" else
" (or the process wasn't collected by its parent)")
()
end
end
type t = {
mutable open_fds : Unix.File_descr.t list;
mutable in_fds : Unix.File_descr.t list;
mutable out_fds : Unix.File_descr.t list;
keep_open : bool;
buf : Bytes.t;
in_cnt : String.t;
in_len : int;
out_callbacks : (Unix.File_descr.t*(Bytes.t -> int -> unit)) list;
pid : Pid.t;
mutable in_pos : int;
}
let close_pooled state fd =
if List.mem state.open_fds fd ~equal:Unix.File_descr.equal then
close_non_intr fd;
state.open_fds <- List.filter ~f:((<>) fd) state.open_fds;
state.out_fds <- List.filter ~f:((<>) fd) state.out_fds;
state.in_fds <- List.filter ~f:((<>) fd) state.in_fds
let process_io ~read ~write state =
List.iter write
~f:(fun fd ->
(try
let len =
temp_failure_retry (fun () ->
Unix.single_write_substring fd
~buf:state.in_cnt
~pos:state.in_pos
~len:(state.in_len - state.in_pos))
in
state.in_pos <- state.in_pos + len;
(* Close the process's in_channel iff we are done writing to it*)
if len = 0 then
if state.keep_open then
state.in_fds <- List.filter ~f:((<>) fd) state.in_fds
else
close_pooled state fd
with Unix.Unix_error (EPIPE, _, _) -> close_pooled state fd));
List.iter read
~f:(fun fd ->
let len =
temp_failure_retry
(fun () -> Unix.read fd
~buf:state.buf
~pos:0
~len:(Bytes.length state.buf))
in
if len = 0 then
close_pooled state fd
else
let callback =
List.Assoc.find_exn ~equal:Unix.File_descr.equal state.out_callbacks fd in
callback state.buf len)
let available_fds =
let use_select state ~timeout =
let { Unix.Select_fds. read; write; _; } =
temp_failure_retry (fun () ->
Unix.select
~read:state.out_fds
~write:state.in_fds
~except:[]
~timeout ())
in
read,write
in
let use_epoll epoll_create = fun state ~timeout ->
let module Epoll = Linux_ext.Epoll in
let timeout =
match timeout with
| (`Immediately | `Never) as timeout -> timeout
| `After span -> `After span
in
let epoll_t =
let fds = List.map ~f:Unix.File_descr.to_int (state.in_fds @ state.out_fds) in
let max_ready_events = List.length fds in
let num_file_descrs = 1 + List.fold ~init:max_ready_events ~f:Int.max fds in
epoll_create ~num_file_descrs ~max_ready_events
in
List.iter state.in_fds ~f:(fun fd -> Epoll.set epoll_t fd Epoll.Flags.out);
List.iter state.out_fds ~f:(fun fd -> Epoll.set epoll_t fd Epoll.Flags.in_);
let read, write =
match temp_failure_retry (fun () -> Epoll.wait epoll_t ~timeout) with
| `Timeout -> ([], [])
| `Ok -> Epoll.fold_ready epoll_t ~init:([], []) ~f:(fun (read, write) fd flags ->
let take_matching_flags acc fd flags ~wanted =
if Epoll.Flags.do_intersect wanted flags
then fd :: acc
else acc
in
let read = take_matching_flags read fd flags ~wanted:Epoll.Flags.in_ in
let write = take_matching_flags write fd flags ~wanted:Epoll.Flags.out in
(read, write))
in
Epoll.close epoll_t;
(read, write)
in
match Linux_ext.Epoll.create with
| Error _ -> use_select
| Ok epoll_create -> use_epoll epoll_create
;;
let create
~keep_open
~use_extra_path
~working_dir
~setuid
~setgid
~prog
~args
~stdoutf
~stderrf
~input_string
~env =
let full_prog = Shell_internal.path_expand ?use_extra_path prog in
let process_info =
internal_create_process
?working_dir ?setuid ?setgid
~env ~prog:full_prog ~args ()
in
let out_fd = process_info.Process_info.stdout
and in_fd = process_info.Process_info.stdin
and err_fd = process_info.Process_info.stderr
and pid = process_info.Process_info.pid in
{
keep_open;
open_fds = [in_fd;out_fd;err_fd];
in_fds = [in_fd];
out_fds = [err_fd;out_fd];
buf = Bytes.create 4096;
in_cnt = input_string;
in_pos = 0;
in_len = String.length input_string;
out_callbacks = [out_fd,stdoutf;
err_fd,stderrf];
pid
}
let rec finish_reading state =
match available_fds state ~timeout:`Immediately with
| [] ,_ -> ()
| read,_ ->
process_io state ~read ~write:[];
finish_reading state
let rec run_loop ~start_time ~timeout state =
let read,write = available_fds state ~timeout:(`After (Time_ns.Span.of_sec 0.1)) in
begin
try
process_io state ~read ~write
with e ->
kill ~is_child:true state.pid;
raise e
end;
let elapsed = Time.diff (Time.now ()) start_time in
match timeout with
| Some timeout when Time.Span.(elapsed > timeout) ->
kill ~is_child:true state.pid;
finish_reading state;
`Timeout elapsed
| None | Some _ ->
match waitpid_nohang state.pid with
| None -> run_loop ~start_time ~timeout state
| Some status ->
finish_reading state;
match status with
| Ok () -> `Exited 0
| Error (`Exit_non_zero i) -> `Exited i
| Error (`Signal s) -> `Signaled s
let run
?timeout
?use_extra_path
?working_dir
?setuid
?setgid
?(env=`Extend [])
?input:(input_string="")
?(keep_open=false)
?(stdoutf=(fun _string _len -> ()))
?(stderrf=(fun _string _len -> ()))
?(tail_len = 2048) ~prog ~args
() =
let stdout_tail = Tail_buffer.create tail_len
and stderr_tail = Tail_buffer.create tail_len in
let stdoutf sbuf len =
stdoutf sbuf len;
Tail_buffer.add stdout_tail sbuf len
and stderrf sbuf len =
stderrf sbuf len;
Tail_buffer.add stderr_tail sbuf len
in
let status =
protectx (Sys.signal Sys.sigpipe Sys.Signal_ignore,
create
~keep_open
~use_extra_path
~working_dir
~setuid
~setgid
~stderrf
~stdoutf
~prog
~args
~env
~input_string)
~f:(fun (_old_sigpipe,state) ->
run_loop state
~start_time:(Time.now ())
~timeout;)
~finally:(fun (old_sigpipe,state) ->
List.iter state.open_fds
~f:close_non_intr;
ignore (Sys.signal Sys.sigpipe old_sigpipe : Sys.signal_behavior))
in
{Command_result.
status = status;
stdout_tail = Tail_buffer.contents stdout_tail;
stderr_tail = Tail_buffer.contents stderr_tail }
(* Externally export this *)
let kill ?is_child ?wait_for ?(signal=Signal.term) pid =
kill
?is_child
?wait_for
~signal
pid
let%test_module _ = (module struct
let with_fds n ~f =
let restore_max_fds =
let module RLimit = Core.Unix.RLimit in
let max_fds = RLimit.get RLimit.num_file_descriptors in
match max_fds.RLimit.cur with
| RLimit.Infinity -> None
| RLimit.Limit limit when Int64.(of_int Int.(2 * n) < limit) -> None
| RLimit.Limit _ ->
RLimit.set RLimit.num_file_descriptors
{ max_fds with RLimit.cur = RLimit.Limit (Int64.of_int (2 * n)) };
Some max_fds
in
let fds = List.init n ~f:(fun _ -> Unix.openfile ~mode:[ Unix.O_RDONLY ] "/dev/null") in
let retval = Or_error.try_with f in
List.iter fds ~f:(fun fd -> Unix.close fd);
Option.iter restore_max_fds ~f:(fun max_fds ->
let module RLimit = Core.Unix.RLimit in
RLimit.set RLimit.num_file_descriptors max_fds);
Or_error.ok_exn retval
let run_process () = ignore (run ~prog:"true" ~args:[] ())
let%test_unit _ = with_fds 10 ~f:run_process
let%test_unit _ = with_fds 1055 ~f:(fun () ->
[%test_eq: bool]
(Result.is_ok Linux_ext.Epoll.create)
(Result.is_ok (Result.try_with run_process)))
end)
| null | https://raw.githubusercontent.com/janestreet/universe/b6cb56fdae83f5d55f9c809f1c2a2b50ea213126/shell/low_level_process/src/low_level_process.ml | ocaml | Creates a unix pipe with both sides set close on exec
*
Remembers the last n-characters appended to it....
* remembers the output in a circular buffer.
looped is used to tell whether we loop around the
boundary of the buffer.
WStopped is impossible
* wait for a given pid to exit;
returns true when the process exits and false if the process is still runing
after waiting for [span]
This is the equivalent of calling the C kill with 0 (test whether a process
exists)
We need to explicitely waitpid the child otherwise we are sending
signals to a zombie
Close the process's in_channel iff we are done writing to it
Externally export this | open Core
open Poly
module Sys = Caml.Sys
let rec temp_failure_retry f =
try
f ()
with Unix.Unix_error (EINTR, _, _) -> temp_failure_retry f
let close_non_intr fd =
temp_failure_retry (fun () -> Unix.close fd)
let cloexec_pipe () =
let (fd1,fd2) as res = Unix.pipe () in
Unix.set_close_on_exec fd1;
Unix.set_close_on_exec fd2;
res
module Process_info = struct
type t = {
pid : Pid.t;
stdin : Unix.File_descr.t;
stdout : Unix.File_descr.t;
stderr : Unix.File_descr.t;
}
end
We use a slightly more powerful version of create process than the one in
core . This version is not quite as carefuly code reviewed but allows us to
have more control over the forked side of the process ( e.g. : ) .
core. This version is not quite as carefuly code reviewed but allows us to
have more control over the forked side of the process (e.g.: chdir).
*)
let internal_create_process ?working_dir ?setuid ?setgid ~env ~prog ~args () =
let close_on_err = ref [] in
try
let (in_read, in_write) = cloexec_pipe () in
close_on_err := in_read :: in_write :: !close_on_err;
let (out_read, out_write) = cloexec_pipe () in
close_on_err := out_read :: out_write :: !close_on_err;
let (err_read, err_write) = cloexec_pipe () in
close_on_err := err_read :: err_write :: !close_on_err;
let pid = Unix_extended.fork_exec
prog
args
?working_dir
?setuid
?setgid
~env
~stdin:in_read
~stdout:out_write
~stderr:err_write
in
close_non_intr in_read;
close_non_intr out_write;
close_non_intr err_write;
{
Process_info.pid = pid;
stdin = in_write;
stdout = out_read;
stderr = err_read
}
with e ->
List.iter
~f:(fun fd -> try close_non_intr fd with _ -> ())
!close_on_err;
raise e
module Tail_buffer = struct
type t = {
buffer : Bytes.t;
length : int;
mutable looped : bool;
mutable position : int;
}
let contents b =
if not b.looped then
Bytes.To_string.sub b.buffer ~pos:0 ~len:b.position
else
let dst = Bytes.create (b.length + 3) in
Bytes.set dst 0 '.';
Bytes.set dst 1 '.';
Bytes.set dst 2 '.';
Bytes.blit
~src:b.buffer
~dst ~dst_pos:3
~src_pos:b.position
~len:(b.length - b.position);
Bytes.blit ~src:b.buffer
~dst
~dst_pos:(b.length - b.position + 3)
~src_pos:0
~len:(b.position);
Bytes.unsafe_to_string ~no_mutation_while_string_reachable:dst
let create len = {
buffer = Bytes.create len;
length = len;
looped = false;
position = 0
}
let add b src len =
if b.length <= len then begin
Bytes.blit
~src
~dst:b.buffer
~dst_pos:0
~src_pos:(len - b.length)
~len:(b.length);
b.looped <- true;
b.position <- 0
end else
let leftover = b.length - b.position in
if (len < leftover) then begin
Bytes.blit ~src ~dst:b.buffer ~dst_pos:b.position ~src_pos:0 ~len;
b.position <- b.position + len;
end else begin
Bytes.blit ~src ~dst:b.buffer ~dst_pos:b.position ~src_pos:0
~len:leftover;
b.looped <- true;
let len = (len-leftover) in
Bytes.blit ~src ~dst:b.buffer ~dst_pos:0 ~src_pos:leftover ~len;
b.position <- len
end
end
module Status = struct
type t =
[ `Timeout of Time.Span.t
| `Exited of int
| `Signaled of Signal.t
]
[@@deriving sexp_of]
let to_string = function
| `Exited i -> sprintf "exited with code %d" i
| `Signaled s ->
sprintf !"died after receiving %{Signal} (signal number %d)"
s (Signal.to_system_int s)
| `Timeout s -> sprintf !"Timed out (ran for %{Time.Span})" s
end
module Command_result = struct
type t= {
status: Status.t;
stdout_tail : string;
stderr_tail : string
}
end
let waitpid_nohang pid =
match Unix.wait_nohang (`Pid pid) with
| None -> None
| Some (v, res) -> assert Pid.(v = pid); Some res
let wait_for_exit ~is_child span pid =
let end_time = Time.add (Time.now ()) span in
let exited () =
if is_child then begin
match waitpid_nohang pid with
| None -> true
| Some _ -> false
end else
match Signal.send (Signal.of_system_int 0) (`Pid pid) with
| `Ok -> true
| `No_such_process -> false
in
let rec loop () =
if Time.(>) (Time.now ()) end_time then
false
else if not (exited ()) then true
else begin
Time.pause (sec 0.1);
loop ()
end
in
loop ()
let kill
?(is_child=false)
?(wait_for=sec 2.0)
?(signal = Signal.term)
pid
=
Signal.send_exn signal (`Pid pid);
if not (wait_for_exit ~is_child wait_for pid) then begin
begin
match
Signal.send Signal.kill (`Pid pid)
with
| `No_such_process ->
if is_child then
failwith "Process.kill got `No_such_process even though the process was a \
child we never waited for"
| `Ok -> ()
end;
if not (wait_for_exit ~is_child wait_for pid) then begin
failwithf "Process.kill failed to kill %i%s"
(Pid.to_int pid)
(if is_child then "" else
" (or the process wasn't collected by its parent)")
()
end
end
type t = {
mutable open_fds : Unix.File_descr.t list;
mutable in_fds : Unix.File_descr.t list;
mutable out_fds : Unix.File_descr.t list;
keep_open : bool;
buf : Bytes.t;
in_cnt : String.t;
in_len : int;
out_callbacks : (Unix.File_descr.t*(Bytes.t -> int -> unit)) list;
pid : Pid.t;
mutable in_pos : int;
}
let close_pooled state fd =
if List.mem state.open_fds fd ~equal:Unix.File_descr.equal then
close_non_intr fd;
state.open_fds <- List.filter ~f:((<>) fd) state.open_fds;
state.out_fds <- List.filter ~f:((<>) fd) state.out_fds;
state.in_fds <- List.filter ~f:((<>) fd) state.in_fds
let process_io ~read ~write state =
List.iter write
~f:(fun fd ->
(try
let len =
temp_failure_retry (fun () ->
Unix.single_write_substring fd
~buf:state.in_cnt
~pos:state.in_pos
~len:(state.in_len - state.in_pos))
in
state.in_pos <- state.in_pos + len;
if len = 0 then
if state.keep_open then
state.in_fds <- List.filter ~f:((<>) fd) state.in_fds
else
close_pooled state fd
with Unix.Unix_error (EPIPE, _, _) -> close_pooled state fd));
List.iter read
~f:(fun fd ->
let len =
temp_failure_retry
(fun () -> Unix.read fd
~buf:state.buf
~pos:0
~len:(Bytes.length state.buf))
in
if len = 0 then
close_pooled state fd
else
let callback =
List.Assoc.find_exn ~equal:Unix.File_descr.equal state.out_callbacks fd in
callback state.buf len)
let available_fds =
let use_select state ~timeout =
let { Unix.Select_fds. read; write; _; } =
temp_failure_retry (fun () ->
Unix.select
~read:state.out_fds
~write:state.in_fds
~except:[]
~timeout ())
in
read,write
in
let use_epoll epoll_create = fun state ~timeout ->
let module Epoll = Linux_ext.Epoll in
let timeout =
match timeout with
| (`Immediately | `Never) as timeout -> timeout
| `After span -> `After span
in
let epoll_t =
let fds = List.map ~f:Unix.File_descr.to_int (state.in_fds @ state.out_fds) in
let max_ready_events = List.length fds in
let num_file_descrs = 1 + List.fold ~init:max_ready_events ~f:Int.max fds in
epoll_create ~num_file_descrs ~max_ready_events
in
List.iter state.in_fds ~f:(fun fd -> Epoll.set epoll_t fd Epoll.Flags.out);
List.iter state.out_fds ~f:(fun fd -> Epoll.set epoll_t fd Epoll.Flags.in_);
let read, write =
match temp_failure_retry (fun () -> Epoll.wait epoll_t ~timeout) with
| `Timeout -> ([], [])
| `Ok -> Epoll.fold_ready epoll_t ~init:([], []) ~f:(fun (read, write) fd flags ->
let take_matching_flags acc fd flags ~wanted =
if Epoll.Flags.do_intersect wanted flags
then fd :: acc
else acc
in
let read = take_matching_flags read fd flags ~wanted:Epoll.Flags.in_ in
let write = take_matching_flags write fd flags ~wanted:Epoll.Flags.out in
(read, write))
in
Epoll.close epoll_t;
(read, write)
in
match Linux_ext.Epoll.create with
| Error _ -> use_select
| Ok epoll_create -> use_epoll epoll_create
;;
let create
~keep_open
~use_extra_path
~working_dir
~setuid
~setgid
~prog
~args
~stdoutf
~stderrf
~input_string
~env =
let full_prog = Shell_internal.path_expand ?use_extra_path prog in
let process_info =
internal_create_process
?working_dir ?setuid ?setgid
~env ~prog:full_prog ~args ()
in
let out_fd = process_info.Process_info.stdout
and in_fd = process_info.Process_info.stdin
and err_fd = process_info.Process_info.stderr
and pid = process_info.Process_info.pid in
{
keep_open;
open_fds = [in_fd;out_fd;err_fd];
in_fds = [in_fd];
out_fds = [err_fd;out_fd];
buf = Bytes.create 4096;
in_cnt = input_string;
in_pos = 0;
in_len = String.length input_string;
out_callbacks = [out_fd,stdoutf;
err_fd,stderrf];
pid
}
let rec finish_reading state =
match available_fds state ~timeout:`Immediately with
| [] ,_ -> ()
| read,_ ->
process_io state ~read ~write:[];
finish_reading state
let rec run_loop ~start_time ~timeout state =
let read,write = available_fds state ~timeout:(`After (Time_ns.Span.of_sec 0.1)) in
begin
try
process_io state ~read ~write
with e ->
kill ~is_child:true state.pid;
raise e
end;
let elapsed = Time.diff (Time.now ()) start_time in
match timeout with
| Some timeout when Time.Span.(elapsed > timeout) ->
kill ~is_child:true state.pid;
finish_reading state;
`Timeout elapsed
| None | Some _ ->
match waitpid_nohang state.pid with
| None -> run_loop ~start_time ~timeout state
| Some status ->
finish_reading state;
match status with
| Ok () -> `Exited 0
| Error (`Exit_non_zero i) -> `Exited i
| Error (`Signal s) -> `Signaled s
let run
?timeout
?use_extra_path
?working_dir
?setuid
?setgid
?(env=`Extend [])
?input:(input_string="")
?(keep_open=false)
?(stdoutf=(fun _string _len -> ()))
?(stderrf=(fun _string _len -> ()))
?(tail_len = 2048) ~prog ~args
() =
let stdout_tail = Tail_buffer.create tail_len
and stderr_tail = Tail_buffer.create tail_len in
let stdoutf sbuf len =
stdoutf sbuf len;
Tail_buffer.add stdout_tail sbuf len
and stderrf sbuf len =
stderrf sbuf len;
Tail_buffer.add stderr_tail sbuf len
in
let status =
protectx (Sys.signal Sys.sigpipe Sys.Signal_ignore,
create
~keep_open
~use_extra_path
~working_dir
~setuid
~setgid
~stderrf
~stdoutf
~prog
~args
~env
~input_string)
~f:(fun (_old_sigpipe,state) ->
run_loop state
~start_time:(Time.now ())
~timeout;)
~finally:(fun (old_sigpipe,state) ->
List.iter state.open_fds
~f:close_non_intr;
ignore (Sys.signal Sys.sigpipe old_sigpipe : Sys.signal_behavior))
in
{Command_result.
status = status;
stdout_tail = Tail_buffer.contents stdout_tail;
stderr_tail = Tail_buffer.contents stderr_tail }
let kill ?is_child ?wait_for ?(signal=Signal.term) pid =
kill
?is_child
?wait_for
~signal
pid
let%test_module _ = (module struct
let with_fds n ~f =
let restore_max_fds =
let module RLimit = Core.Unix.RLimit in
let max_fds = RLimit.get RLimit.num_file_descriptors in
match max_fds.RLimit.cur with
| RLimit.Infinity -> None
| RLimit.Limit limit when Int64.(of_int Int.(2 * n) < limit) -> None
| RLimit.Limit _ ->
RLimit.set RLimit.num_file_descriptors
{ max_fds with RLimit.cur = RLimit.Limit (Int64.of_int (2 * n)) };
Some max_fds
in
let fds = List.init n ~f:(fun _ -> Unix.openfile ~mode:[ Unix.O_RDONLY ] "/dev/null") in
let retval = Or_error.try_with f in
List.iter fds ~f:(fun fd -> Unix.close fd);
Option.iter restore_max_fds ~f:(fun max_fds ->
let module RLimit = Core.Unix.RLimit in
RLimit.set RLimit.num_file_descriptors max_fds);
Or_error.ok_exn retval
let run_process () = ignore (run ~prog:"true" ~args:[] ())
let%test_unit _ = with_fds 10 ~f:run_process
let%test_unit _ = with_fds 1055 ~f:(fun () ->
[%test_eq: bool]
(Result.is_ok Linux_ext.Epoll.create)
(Result.is_ok (Result.try_with run_process)))
end)
|
c54d08d21a6f4674fce531d69aa8bc609aa44471a4604e0e723996445d6ba840 | rixed/ramen | HeavyHitters.ml | Simple implementation of a polymorphic set that keeps only the most
* important entries using the " heavy hitters " selection technique .
* This technique is an approximation . For an item to be guaranteed to be
* featured in the top N , its total contribution must be > = 1 / N of the total .
* Other than that , it depends on the actual sequence .
* To avoid noise , every item which guaranteed minimal weight is below that of
* the Nth item of the top will be discarded .
*
* It is therefore recommended to track more than N items . By default , this
* tracks 10 times more items than N. So for instance , to obtain the top 10
* contributors this would actually track the top 100 , and build reliably the
* list of all items which contribution is larger than 1/100th of the total ,
* then returning the top 10 .
*
* So to get the top 10 contributors , one would actually select the top 10
* if the contributors which contributions was above 1/100th or 1/1000th
* of the total . But then , we are not so sure that those top contributors
* are actually out of the ordinary . Oftentimes we want the top outliers .
* We could compute the stddev and use it to filter out the top , except the
* top does not return the actual weights , only the contributors ( for
* simplicity , especially since there is no lambda functions yet ) . Thus this
* additional parameter [ sigmas ] , which , if not zero , will make the top
* also computes the stddev and ultimately compares the guaranteed weight
* of each top contributors and filter out those that does not deviate
* more than that many sigmas from the mean .
* important entries using the "heavy hitters" selection technique.
* This technique is an approximation. For an item to be guaranteed to be
* featured in the top N, its total contribution must be >= 1/N of the total.
* Other than that, it depends on the actual sequence.
* To avoid noise, every item which guaranteed minimal weight is below that of
* the Nth item of the top will be discarded.
*
* It is therefore recommended to track more than N items. By default, this
* tracks 10 times more items than N. So for instance, to obtain the top 10
* contributors this would actually track the top 100, and build reliably the
* list of all items which contribution is larger than 1/100th of the total,
* then returning the top 10.
*
* So to get the top 10 contributors, one would actually select the top 10
* if the contributors which contributions was above 1/100th or 1/1000th
* of the total. But then, we are not so sure that those top contributors
* are actually out of the ordinary. Oftentimes we want the top outliers.
* We could compute the stddev and use it to filter out the top, except the
* top does not return the actual weights, only the contributors (for
* simplicity, especially since there is no lambda functions yet). Thus this
* additional parameter [sigmas], which, if not zero, will make the top
* also computes the stddev and ultimately compares the guaranteed weight
* of each top contributors and filter out those that does not deviate
* more than that many sigmas from the mean. *)
open Batteries
open RamenLog
open RamenHelpersNoLog
$ inject
open Batteries
open TestHelpers
open Batteries
open TestHelpers
*)
let debug = false
Weight map : from weight to anything , ordered bigger weights first :
module WMap = Map.Make (struct
type t = float ref (* ref so we can downscale *)
let compare w1 w2 = Float.compare !w2 !w1
end)
type 'a t =
{ max_size : int ;
mutable cur_size : int ;
(* Optionally, select only those outliers above that many sigmas: *)
sigmas : float ;
mutable sum_weight1 : Kahan.t ;
mutable sum_weight2 : Kahan.t ;
mutable count : int64 ;
(* Fade off contributors by decaying weights in time (actually, inflating
* new weights as time passes) *)
decay : float ; (* decay factor (0 for no decay) *)
mutable time_origin : float option ;
(* value to weight and overestimation: *)
mutable w_of_x : ('a, float * float) Map.t ;
(* max weight to value to overestimation. Since we need iteration to go
* from bigger to smaller weight we need a custom map: *)
mutable xs_of_w : (('a, float) Map.t) WMap.t }
let make ~max_size ~decay ~sigmas =
{ max_size ; cur_size = 0 ; sigmas = abs_float sigmas ;
sum_weight1 = Kahan.init ; sum_weight2 = Kahan.init ; count = 0L ;
decay ; time_origin = None ;
w_of_x = Map.empty ; xs_of_w = WMap.empty }
(* Downscale all stored weight by [d] and reset time_origin.
* That's OK to modify the map keys because relative ordering is not going
* to change: *)
(* TODO: stats about rescale frequency *)
let downscale s t d =
!logger.debug "HeavyHitters: downscaling %d entries by %g"
s.cur_size d ;
s.w_of_x <-
Map.map (fun (w, o) -> w *. d, o *. d) s.w_of_x ;
WMap.iter (fun w _xs -> w := !w *. d) s.xs_of_w ;
s.sum_weight1 <- Kahan.mul s.sum_weight1 d ;
s.sum_weight2 <- Kahan.mul s.sum_weight2 (d *. d) ;
s.time_origin <- Some t
let add s t w x =
(* Decaying old weights is the same as inflating new weights.
* But then after a while new inflated weights will become too big to
* be accurately tracked, so when this happen we _rescale_ the top,
* that is we actually decay the history and reset the origin of time
* so that new entries will only be inflated by exp(0), and so on. *)
(* Shall we downscale? *)
let inflation =
match s.time_origin with
| None -> s.time_origin <- Some t ; 1.
| Some t0 ->
let infl = exp ((t -. t0) *. s.decay) in
(* Make this a parameter for trading off CPU vs accuracy? *)
let max_infl = 1e6 in
if infl < max_infl then infl else (
downscale s t infl ;
1.
) in
let w = w *. inflation in
let add_in_xs_of_w x w o m =
if debug then Printf.printf "TOP: add entry %s of weight %f\n" (dump x) w ;
WMap.modify_opt (ref w) (function
| None -> Some (Map.singleton x o)
| Some xs ->
assert (not (Map.mem x xs)) ;
Some (Map.add x o xs)
) m
and rem_from_xs_of_w x w m =
WMap.modify_opt (ref w) (function
| None -> assert false
| Some xs ->
(match Map.extract x xs with
| exception Not_found ->
!logger.error "xs_of_w for w=%f does not have x=%s (only %a)"
w (dump x)
(Map.print print_dump Float.print) xs ;
assert false
| _, xs ->
if Map.is_empty xs then None else Some xs)
) m
in
(* Shortcut for the frequent case when w=0: *)
if w <> 0. then (
let victim_x = ref None in
s.w_of_x <-
Map.modify_opt x (function
| None ->
let victim_w = ref 0. in
if s.cur_size >= s.max_size then (
(* pick the victim and remove it from xs_of_w: *)
let victim_w', xs = WMap.max_binding s.xs_of_w in
let (victim_x', _victim_o), xs' = Map.pop xs in
victim_w := !victim_w' ;
victim_x := Some victim_x' ;
s.xs_of_w <-
if Map.is_empty xs' then
WMap.remove victim_w' s.xs_of_w
else
WMap.update victim_w' victim_w' xs' s.xs_of_w
) else s.cur_size <- s.cur_size + 1 ;
let w = w +. !victim_w in
s.xs_of_w <- add_in_xs_of_w x w !victim_w s.xs_of_w ;
Some (w, !victim_w)
| Some (w', o) ->
let w = w +. w' in
s.xs_of_w <-
rem_from_xs_of_w x w' s.xs_of_w |>
add_in_xs_of_w x w o ;
Some (w, o)
) s.w_of_x ;
Option.may (fun x ->
match Map.extract x s.w_of_x with
| exception Not_found ->
!logger.error "w_of_x does not have x=%s, only %a"
(dump x)
(Enum.print print_dump) (Map.keys s.w_of_x |> Enum.take 99) ;
assert false
| _, w_of_x ->
s.w_of_x <- w_of_x
) !victim_x ;
(* Also compute the mean if sigmas is not null: *)
if s.sigmas > 0. then (
s.sum_weight1 <- Kahan.add s.sum_weight1 w ;
s.sum_weight2 <- Kahan.add s.sum_weight1 (w *. w) ;
s.count <- Int64.add s.count 1L
) ;
assert (s.cur_size <= s.max_size) (*;
assert (Map.cardinal s.w_of_x = s.cur_size) ;
assert (WMap.cardinal s.xs_of_w <= s.cur_size)*)
) (* w <> 0. *)
For each monitored item of rank k < = n , we must ask ourselves : could there
* be an item with rank k > n , or a non - monitored items , with more weight ? For
* this we must compare guaranteed weight of items with the weight of item
* of rank n+1 ; but we do n't know which item that is unless we order them .
* be an item with rank k > n, or a non-monitored items, with more weight? For
* this we must compare guaranteed weight of items with the max weight of item
* of rank n+1; but we don't know which item that is unless we order them. *)
(* FIXME: super slow, maintain the entries in increased max weight order. *)
(* Iter the entries in decreasing weight order.
* Note: BatMap iterates in increasing keys order despite de doc says it's
* unspecified, but since we reverse the comparison operator we fold from
* heaviest to lightest. *)
let fold u f s =
WMap.fold (fun w xs u ->
if debug then Printf.printf "TOP: folding over all entries of weight %f\n" !w ;
Map.foldi (fun x o u ->
if debug then Printf.printf "TOP: ... %s\n" (dump x) ;
f !w x o u
) xs u
) s.xs_of_w u
Iter over the top [ n ' ] entries ( < = [ n ] but close ) in order of weight ,
* lightest first ( so that it 's easy to build the reverse list ) , ignoring
* those entries below the specified amount of sigmas :
* lightest first (so that it's easy to build the reverse list), ignoring
* those entries below the specified amount of sigmas: *)
let fold_top n u f s =
let res = ref []
and cutoff = ref None in
let cutoff_fun () =
if s.sigmas > 0. then
let sum_weight1 = Kahan.finalize s.sum_weight1
and sum_weight2 = Kahan.finalize s.sum_weight2
and count = Int64.to_float s.count in
let mean = sum_weight1 /. count in
let sigma = sqrt (count *. sum_weight2 -. mean *. mean) /. count in
let cutoff_sigma = mean +. s.sigmas *. sigma in
match !cutoff with
| None ->
fun u (w, _min_w, x) ->
if w >= cutoff_sigma then f u x else u
| Some c ->
fun u (w, min_w, x) ->
if min_w >= c && w >= cutoff_sigma then f u x else u
else
match !cutoff with
| None ->
fun u (_w, _min_w, x) -> f u x
| Some c ->
fun u (_w, min_w, x) ->
if min_w >= c then f u x else u
in
(try
let _ =
fold 1 (fun w x o rank ->
(* We need item at rank n+1 to find top-n *)
if rank <= n then (
if debug then
Printf.printf "TOP rank=%d<=%d is %s, weight %f\n" rank n (dump x) w ;
May be filtered once we know the cutoff :
res := (w, (w -. o), x) :: !res ; (* res is lightest to heaviest *)
rank + 1
) else (
assert (rank = n + 1) ;
if debug then
Printf.printf "TOP rank=%d>%d is %s, weight %f\n" rank n (dump x) w ;
cutoff := Some w ;
raise Exit
)
) s in
(* We reach here when we had less entries than n, in which case we do not
* need a cut-off since we know all the entries: *)
if debug then
Printf.printf "TOP: Couldn't reach rank %d, cur_size=%d\n" n s.cur_size ;
with Exit -> ()) ;
(* Now filter the entries if we have a cutoff, and build the result: *)
List.fold_left (cutoff_fun ()) u !res
Returns the top as a list ordered by weight ( heavier first )
let top n s =
fold_top n [] (fun lst x -> x :: lst) s
(* Tells the rank of a given value in the top, or None: *)
let rank n x s =
if n < 1 then invalid_arg "rank" ;
let res = ref None in
(try
fold_top n 1 (fun k x' ->
if x = x' then (
res := Some k ;
raise Exit
) else k + 1
) s |> ignore
with Exit -> ()) ;
!res
(* Tells if x is in the top [n]: *)
let is_in_top n x s =
rank n x s <> None
(*$R is_in_top
(* Check that what we add into an empty top is in the top: *)
let top_size = 100 in
let s = make ~max_size:(top_size * 10) ~decay:8.3e-5 ~sigmas:0. in
add s 1. 1. 42 ;
assert_bool "42 is in top" (is_in_top top_size 42 s)
*)
$ R add
let ( + + ) = Enum.append in
let xs = Enum . (
repeat ~times:10 42 + +
repeat ~times:10 43 + +
repeat ~times:10 44 + +
take 70 ( Random.enum_int 999999 ) ) | >
Array.of_enum in
Array.shuffle xs ;
let s = make ~decay:0 . ~sigmas:0 . in
let now = Unix.time ( ) in
Array.iteri ( fun i x - >
let t = now + . float_of_int i in
add s t 1 . x
) xs ;
let s = top 3 s in
( * Printf.printf " Solution : % a\n% ! " ( List.print Int.print ) s ;
let (++) = Enum.append in
let xs = Enum.(
repeat ~times:10 42 ++
repeat ~times:10 43 ++
repeat ~times:10 44 ++
take 70 (Random.enum_int 999999)) |>
Array.of_enum in
Array.shuffle xs ;
let s = make ~max_size:30 ~decay:0. ~sigmas:0. in
let now = Unix.time () in
Array.iteri (fun i x ->
let t = now +. float_of_int i in
add s t 1. x
) xs ;
let s = top 3 s in
(*Printf.printf "Solution: %a\n%!" (List.print Int.print) s ;*)
assert_bool "Result size is limited" (List.length s <= 10) ;
assert_bool "42 is present" (List.mem 42 s) ;
assert_bool "43 is present" (List.mem 43 s) ;
assert_bool "44 is present" (List.mem 44 s)
*)
$ R add
let k = 5 in
let retry = 10 in
let test_once k =
let max_size = k * 10 and zc = 1 . in
let s = make ~max_size ~decay:0 . ~sigmas:0 . in
zipf_distrib 1000 zc | > Enum.take 10_000 | > Enum.iter ( add s 0 . 1 . ) ;
let s = top k s in
assert_bool " Result size is limited " ( s < = k ) ;
( * All items from 0 to k-1 ( included ) are present most of the time :
let k = 5 in
let retry = 10 in
let test_once k =
let max_size = k * 10 and zc = 1. in
let s = make ~max_size ~decay:0. ~sigmas:0. in
zipf_distrib 1000 zc |> Enum.take 10_000 |> Enum.iter (add s 0. 1.) ;
let s = top k s in
assert_bool "Result size is limited" (List.length s <= k) ;
(* All items from 0 to k-1 (included) are present most of the time: *)
let missing = ref 0 in
for i = 0 to k-1 do
if not (List.mem i s) then incr missing
done ;
if !missing > 0 then
Printf.printf "Solution: %a\n%!" (List.print Int.print) s ;
assert_bool "no more than 2 items are missing" (!missing <= 2) ;
!missing = 0 in
let num_find_all = ref 0 in
for i = 0 to retry-1 do
if test_once k then incr num_find_all
done ;
let success_rate = float_of_int !num_find_all /. float_of_int retry in
assert_bool "must be accurate most of the times" (success_rate > 0.6)
*)
| null | https://raw.githubusercontent.com/rixed/ramen/454b0eb63beab1c4d5aab8e7957623071b35f892/src/HeavyHitters.ml | ocaml | ref so we can downscale
Optionally, select only those outliers above that many sigmas:
Fade off contributors by decaying weights in time (actually, inflating
* new weights as time passes)
decay factor (0 for no decay)
value to weight and overestimation:
max weight to value to overestimation. Since we need iteration to go
* from bigger to smaller weight we need a custom map:
Downscale all stored weight by [d] and reset time_origin.
* That's OK to modify the map keys because relative ordering is not going
* to change:
TODO: stats about rescale frequency
Decaying old weights is the same as inflating new weights.
* But then after a while new inflated weights will become too big to
* be accurately tracked, so when this happen we _rescale_ the top,
* that is we actually decay the history and reset the origin of time
* so that new entries will only be inflated by exp(0), and so on.
Shall we downscale?
Make this a parameter for trading off CPU vs accuracy?
Shortcut for the frequent case when w=0:
pick the victim and remove it from xs_of_w:
Also compute the mean if sigmas is not null:
;
assert (Map.cardinal s.w_of_x = s.cur_size) ;
assert (WMap.cardinal s.xs_of_w <= s.cur_size)
w <> 0.
FIXME: super slow, maintain the entries in increased max weight order.
Iter the entries in decreasing weight order.
* Note: BatMap iterates in increasing keys order despite de doc says it's
* unspecified, but since we reverse the comparison operator we fold from
* heaviest to lightest.
We need item at rank n+1 to find top-n
res is lightest to heaviest
We reach here when we had less entries than n, in which case we do not
* need a cut-off since we know all the entries:
Now filter the entries if we have a cutoff, and build the result:
Tells the rank of a given value in the top, or None:
Tells if x is in the top [n]:
$R is_in_top
(* Check that what we add into an empty top is in the top:
Printf.printf "Solution: %a\n%!" (List.print Int.print) s ;
All items from 0 to k-1 (included) are present most of the time: | Simple implementation of a polymorphic set that keeps only the most
* important entries using the " heavy hitters " selection technique .
* This technique is an approximation . For an item to be guaranteed to be
* featured in the top N , its total contribution must be > = 1 / N of the total .
* Other than that , it depends on the actual sequence .
* To avoid noise , every item which guaranteed minimal weight is below that of
* the Nth item of the top will be discarded .
*
* It is therefore recommended to track more than N items . By default , this
* tracks 10 times more items than N. So for instance , to obtain the top 10
* contributors this would actually track the top 100 , and build reliably the
* list of all items which contribution is larger than 1/100th of the total ,
* then returning the top 10 .
*
* So to get the top 10 contributors , one would actually select the top 10
* if the contributors which contributions was above 1/100th or 1/1000th
* of the total . But then , we are not so sure that those top contributors
* are actually out of the ordinary . Oftentimes we want the top outliers .
* We could compute the stddev and use it to filter out the top , except the
* top does not return the actual weights , only the contributors ( for
* simplicity , especially since there is no lambda functions yet ) . Thus this
* additional parameter [ sigmas ] , which , if not zero , will make the top
* also computes the stddev and ultimately compares the guaranteed weight
* of each top contributors and filter out those that does not deviate
* more than that many sigmas from the mean .
* important entries using the "heavy hitters" selection technique.
* This technique is an approximation. For an item to be guaranteed to be
* featured in the top N, its total contribution must be >= 1/N of the total.
* Other than that, it depends on the actual sequence.
* To avoid noise, every item which guaranteed minimal weight is below that of
* the Nth item of the top will be discarded.
*
* It is therefore recommended to track more than N items. By default, this
* tracks 10 times more items than N. So for instance, to obtain the top 10
* contributors this would actually track the top 100, and build reliably the
* list of all items which contribution is larger than 1/100th of the total,
* then returning the top 10.
*
* So to get the top 10 contributors, one would actually select the top 10
* if the contributors which contributions was above 1/100th or 1/1000th
* of the total. But then, we are not so sure that those top contributors
* are actually out of the ordinary. Oftentimes we want the top outliers.
* We could compute the stddev and use it to filter out the top, except the
* top does not return the actual weights, only the contributors (for
* simplicity, especially since there is no lambda functions yet). Thus this
* additional parameter [sigmas], which, if not zero, will make the top
* also computes the stddev and ultimately compares the guaranteed weight
* of each top contributors and filter out those that does not deviate
* more than that many sigmas from the mean. *)
open Batteries
open RamenLog
open RamenHelpersNoLog
$ inject
open Batteries
open TestHelpers
open Batteries
open TestHelpers
*)
let debug = false
Weight map : from weight to anything , ordered bigger weights first :
module WMap = Map.Make (struct
let compare w1 w2 = Float.compare !w2 !w1
end)
type 'a t =
{ max_size : int ;
mutable cur_size : int ;
sigmas : float ;
mutable sum_weight1 : Kahan.t ;
mutable sum_weight2 : Kahan.t ;
mutable count : int64 ;
mutable time_origin : float option ;
mutable w_of_x : ('a, float * float) Map.t ;
mutable xs_of_w : (('a, float) Map.t) WMap.t }
let make ~max_size ~decay ~sigmas =
{ max_size ; cur_size = 0 ; sigmas = abs_float sigmas ;
sum_weight1 = Kahan.init ; sum_weight2 = Kahan.init ; count = 0L ;
decay ; time_origin = None ;
w_of_x = Map.empty ; xs_of_w = WMap.empty }
let downscale s t d =
!logger.debug "HeavyHitters: downscaling %d entries by %g"
s.cur_size d ;
s.w_of_x <-
Map.map (fun (w, o) -> w *. d, o *. d) s.w_of_x ;
WMap.iter (fun w _xs -> w := !w *. d) s.xs_of_w ;
s.sum_weight1 <- Kahan.mul s.sum_weight1 d ;
s.sum_weight2 <- Kahan.mul s.sum_weight2 (d *. d) ;
s.time_origin <- Some t
let add s t w x =
let inflation =
match s.time_origin with
| None -> s.time_origin <- Some t ; 1.
| Some t0 ->
let infl = exp ((t -. t0) *. s.decay) in
let max_infl = 1e6 in
if infl < max_infl then infl else (
downscale s t infl ;
1.
) in
let w = w *. inflation in
let add_in_xs_of_w x w o m =
if debug then Printf.printf "TOP: add entry %s of weight %f\n" (dump x) w ;
WMap.modify_opt (ref w) (function
| None -> Some (Map.singleton x o)
| Some xs ->
assert (not (Map.mem x xs)) ;
Some (Map.add x o xs)
) m
and rem_from_xs_of_w x w m =
WMap.modify_opt (ref w) (function
| None -> assert false
| Some xs ->
(match Map.extract x xs with
| exception Not_found ->
!logger.error "xs_of_w for w=%f does not have x=%s (only %a)"
w (dump x)
(Map.print print_dump Float.print) xs ;
assert false
| _, xs ->
if Map.is_empty xs then None else Some xs)
) m
in
if w <> 0. then (
let victim_x = ref None in
s.w_of_x <-
Map.modify_opt x (function
| None ->
let victim_w = ref 0. in
if s.cur_size >= s.max_size then (
let victim_w', xs = WMap.max_binding s.xs_of_w in
let (victim_x', _victim_o), xs' = Map.pop xs in
victim_w := !victim_w' ;
victim_x := Some victim_x' ;
s.xs_of_w <-
if Map.is_empty xs' then
WMap.remove victim_w' s.xs_of_w
else
WMap.update victim_w' victim_w' xs' s.xs_of_w
) else s.cur_size <- s.cur_size + 1 ;
let w = w +. !victim_w in
s.xs_of_w <- add_in_xs_of_w x w !victim_w s.xs_of_w ;
Some (w, !victim_w)
| Some (w', o) ->
let w = w +. w' in
s.xs_of_w <-
rem_from_xs_of_w x w' s.xs_of_w |>
add_in_xs_of_w x w o ;
Some (w, o)
) s.w_of_x ;
Option.may (fun x ->
match Map.extract x s.w_of_x with
| exception Not_found ->
!logger.error "w_of_x does not have x=%s, only %a"
(dump x)
(Enum.print print_dump) (Map.keys s.w_of_x |> Enum.take 99) ;
assert false
| _, w_of_x ->
s.w_of_x <- w_of_x
) !victim_x ;
if s.sigmas > 0. then (
s.sum_weight1 <- Kahan.add s.sum_weight1 w ;
s.sum_weight2 <- Kahan.add s.sum_weight1 (w *. w) ;
s.count <- Int64.add s.count 1L
) ;
For each monitored item of rank k < = n , we must ask ourselves : could there
* be an item with rank k > n , or a non - monitored items , with more weight ? For
* this we must compare guaranteed weight of items with the weight of item
* of rank n+1 ; but we do n't know which item that is unless we order them .
* be an item with rank k > n, or a non-monitored items, with more weight? For
* this we must compare guaranteed weight of items with the max weight of item
* of rank n+1; but we don't know which item that is unless we order them. *)
let fold u f s =
WMap.fold (fun w xs u ->
if debug then Printf.printf "TOP: folding over all entries of weight %f\n" !w ;
Map.foldi (fun x o u ->
if debug then Printf.printf "TOP: ... %s\n" (dump x) ;
f !w x o u
) xs u
) s.xs_of_w u
Iter over the top [ n ' ] entries ( < = [ n ] but close ) in order of weight ,
* lightest first ( so that it 's easy to build the reverse list ) , ignoring
* those entries below the specified amount of sigmas :
* lightest first (so that it's easy to build the reverse list), ignoring
* those entries below the specified amount of sigmas: *)
let fold_top n u f s =
let res = ref []
and cutoff = ref None in
let cutoff_fun () =
if s.sigmas > 0. then
let sum_weight1 = Kahan.finalize s.sum_weight1
and sum_weight2 = Kahan.finalize s.sum_weight2
and count = Int64.to_float s.count in
let mean = sum_weight1 /. count in
let sigma = sqrt (count *. sum_weight2 -. mean *. mean) /. count in
let cutoff_sigma = mean +. s.sigmas *. sigma in
match !cutoff with
| None ->
fun u (w, _min_w, x) ->
if w >= cutoff_sigma then f u x else u
| Some c ->
fun u (w, min_w, x) ->
if min_w >= c && w >= cutoff_sigma then f u x else u
else
match !cutoff with
| None ->
fun u (_w, _min_w, x) -> f u x
| Some c ->
fun u (_w, min_w, x) ->
if min_w >= c then f u x else u
in
(try
let _ =
fold 1 (fun w x o rank ->
if rank <= n then (
if debug then
Printf.printf "TOP rank=%d<=%d is %s, weight %f\n" rank n (dump x) w ;
May be filtered once we know the cutoff :
rank + 1
) else (
assert (rank = n + 1) ;
if debug then
Printf.printf "TOP rank=%d>%d is %s, weight %f\n" rank n (dump x) w ;
cutoff := Some w ;
raise Exit
)
) s in
if debug then
Printf.printf "TOP: Couldn't reach rank %d, cur_size=%d\n" n s.cur_size ;
with Exit -> ()) ;
List.fold_left (cutoff_fun ()) u !res
Returns the top as a list ordered by weight ( heavier first )
let top n s =
fold_top n [] (fun lst x -> x :: lst) s
let rank n x s =
if n < 1 then invalid_arg "rank" ;
let res = ref None in
(try
fold_top n 1 (fun k x' ->
if x = x' then (
res := Some k ;
raise Exit
) else k + 1
) s |> ignore
with Exit -> ()) ;
!res
let is_in_top n x s =
rank n x s <> None
let top_size = 100 in
let s = make ~max_size:(top_size * 10) ~decay:8.3e-5 ~sigmas:0. in
add s 1. 1. 42 ;
assert_bool "42 is in top" (is_in_top top_size 42 s)
*)
$ R add
let ( + + ) = Enum.append in
let xs = Enum . (
repeat ~times:10 42 + +
repeat ~times:10 43 + +
repeat ~times:10 44 + +
take 70 ( Random.enum_int 999999 ) ) | >
Array.of_enum in
Array.shuffle xs ;
let s = make ~decay:0 . ~sigmas:0 . in
let now = Unix.time ( ) in
Array.iteri ( fun i x - >
let t = now + . float_of_int i in
add s t 1 . x
) xs ;
let s = top 3 s in
( * Printf.printf " Solution : % a\n% ! " ( List.print Int.print ) s ;
let (++) = Enum.append in
let xs = Enum.(
repeat ~times:10 42 ++
repeat ~times:10 43 ++
repeat ~times:10 44 ++
take 70 (Random.enum_int 999999)) |>
Array.of_enum in
Array.shuffle xs ;
let s = make ~max_size:30 ~decay:0. ~sigmas:0. in
let now = Unix.time () in
Array.iteri (fun i x ->
let t = now +. float_of_int i in
add s t 1. x
) xs ;
let s = top 3 s in
assert_bool "Result size is limited" (List.length s <= 10) ;
assert_bool "42 is present" (List.mem 42 s) ;
assert_bool "43 is present" (List.mem 43 s) ;
assert_bool "44 is present" (List.mem 44 s)
*)
$ R add
let k = 5 in
let retry = 10 in
let test_once k =
let max_size = k * 10 and zc = 1 . in
let s = make ~max_size ~decay:0 . ~sigmas:0 . in
zipf_distrib 1000 zc | > Enum.take 10_000 | > Enum.iter ( add s 0 . 1 . ) ;
let s = top k s in
assert_bool " Result size is limited " ( s < = k ) ;
( * All items from 0 to k-1 ( included ) are present most of the time :
let k = 5 in
let retry = 10 in
let test_once k =
let max_size = k * 10 and zc = 1. in
let s = make ~max_size ~decay:0. ~sigmas:0. in
zipf_distrib 1000 zc |> Enum.take 10_000 |> Enum.iter (add s 0. 1.) ;
let s = top k s in
assert_bool "Result size is limited" (List.length s <= k) ;
let missing = ref 0 in
for i = 0 to k-1 do
if not (List.mem i s) then incr missing
done ;
if !missing > 0 then
Printf.printf "Solution: %a\n%!" (List.print Int.print) s ;
assert_bool "no more than 2 items are missing" (!missing <= 2) ;
!missing = 0 in
let num_find_all = ref 0 in
for i = 0 to retry-1 do
if test_once k then incr num_find_all
done ;
let success_rate = float_of_int !num_find_all /. float_of_int retry in
assert_bool "must be accurate most of the times" (success_rate > 0.6)
*)
|
5b550f412398b757b6a3f129336822d9971abc3e0a33b240d7c9d4cad6ab163b | clojure/test.check | test_specs.clj | (ns clojure.test.check.test-specs
)
(if (let [{:keys [major minor]} *clojure-version*]
(and (= 1 major) (< minor 9)))
;; don't bother testing this on older clojures
(def valid-reporter-fn-call? (constantly true))
(do
(require '[clojure.spec.alpha :as s])
(eval
'(do
(s/def ::base
(s/keys :req-un [::type ::seed ::num-tests
::property]))
(defmulti type->spec :type)
(defmethod type->spec :trial
[_]
(s/merge ::base
(s/keys :req-un [::args
::result
::result-data])))
(defmethod type->spec :failure
[_]
(s/merge ::base
(s/keys :req-un [::fail
::failing-size
::result
::result-data])))
(s/def ::shrunk
(s/keys :req-un [::depth ::result ::result-data ::smallest ::total-nodes-visited]))
(s/def ::shrinking
(s/merge ::shrunk (s/keys :req-un [::args])))
(defmethod type->spec :shrink-step
[_]
(s/merge ::base
(s/keys :req-un [::fail
::failing-size
::result
::result-data
::shrinking])))
(defmethod type->spec :shrunk
[_]
(s/merge ::base
(s/keys :req-un [::fail
::failing-size
::result
::result-data
::shrunk])))
(defmethod type->spec :complete
[_]
(s/merge ::base
(s/keys :req-un [::result])))
(s/def ::value (s/multi-spec type->spec :type))
(defn valid-reporter-fn-call?
[m]
(or
(s/valid? ::value m)
(s/explain ::value m)))))))
| null | https://raw.githubusercontent.com/clojure/test.check/c05034f911fa140913958b79aa51017d3f2f4426/src/test/clojure/clojure/test/check/test_specs.clj | clojure | don't bother testing this on older clojures | (ns clojure.test.check.test-specs
)
(if (let [{:keys [major minor]} *clojure-version*]
(and (= 1 major) (< minor 9)))
(def valid-reporter-fn-call? (constantly true))
(do
(require '[clojure.spec.alpha :as s])
(eval
'(do
(s/def ::base
(s/keys :req-un [::type ::seed ::num-tests
::property]))
(defmulti type->spec :type)
(defmethod type->spec :trial
[_]
(s/merge ::base
(s/keys :req-un [::args
::result
::result-data])))
(defmethod type->spec :failure
[_]
(s/merge ::base
(s/keys :req-un [::fail
::failing-size
::result
::result-data])))
(s/def ::shrunk
(s/keys :req-un [::depth ::result ::result-data ::smallest ::total-nodes-visited]))
(s/def ::shrinking
(s/merge ::shrunk (s/keys :req-un [::args])))
(defmethod type->spec :shrink-step
[_]
(s/merge ::base
(s/keys :req-un [::fail
::failing-size
::result
::result-data
::shrinking])))
(defmethod type->spec :shrunk
[_]
(s/merge ::base
(s/keys :req-un [::fail
::failing-size
::result
::result-data
::shrunk])))
(defmethod type->spec :complete
[_]
(s/merge ::base
(s/keys :req-un [::result])))
(s/def ::value (s/multi-spec type->spec :type))
(defn valid-reporter-fn-call?
[m]
(or
(s/valid? ::value m)
(s/explain ::value m)))))))
|
3f78ad1eb59448b79e519b9796d2c7845888ebd87d28a4436fbcb9944e39381b | realworldocaml/book | ascii_output.ml | open! Core
open! Import
module Rule = struct
let apply s ~rule ~refined:_ =
Ansi_output.Rule.apply s ~rule:(Format.Rule.strip_styles rule) ~refined:false
;;
end
let print ~print_global_header ~file_names ~rules ~print ~location_style hunks =
let rules = Format.Rules.strip_styles rules in
Ansi_output.print ~print_global_header ~file_names ~rules ~print ~location_style hunks
;;
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/patdiff/kernel/src/ascii_output.ml | ocaml | open! Core
open! Import
module Rule = struct
let apply s ~rule ~refined:_ =
Ansi_output.Rule.apply s ~rule:(Format.Rule.strip_styles rule) ~refined:false
;;
end
let print ~print_global_header ~file_names ~rules ~print ~location_style hunks =
let rules = Format.Rules.strip_styles rules in
Ansi_output.print ~print_global_header ~file_names ~rules ~print ~location_style hunks
;;
|
|
afb765d9f333f2d3a8f27232008708452469e24c415d7f88a47caf8165223f43 | stabilized/clojurescript | repl.cljs | Copyright ( c ) . All rights reserved .
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ^{:doc "Receive - Eval - Print - Loop
Receive a block of JS (presumably generated by a ClojureScript compiler)
Evaluate it naively
Print the result of evaluation to a string
Send the resulting string back to the server Loop!"
:author "Bobby Calderwood and Alex Redington"}
clojure.browser.repl
(:require [clojure.browser.net :as net]
[clojure.browser.event :as event]
repl - connection callback will receive goog.require('cljs.repl ' )
;; and monkey-patched require expects to be able to derive it
;; via goog.basePath, so this namespace should be compiled together
with :
[cljs.repl]))
(def xpc-connection (atom nil))
(defn repl-print [data]
(if-let [conn @xpc-connection]
(net/transmit conn :print (pr-str data))))
(defn evaluate-javascript
"Process a single block of JavaScript received from the server"
[conn block]
(let [result (try {:status :success :value (str (js* "eval(~{block})"))}
(catch :default e
{:status :exception :value (pr-str e)
:stacktrace (if (.hasOwnProperty e "stack")
(.-stack e)
"No stacktrace available.")}))]
(pr-str result)))
(defn send-result [connection url data]
(net/transmit connection url "POST" data nil 0))
(defn send-print
"Send data to be printed in the REPL. If there is an error, try again
up to 10 times."
([url data]
(send-print url data 0))
([url data n]
(let [conn (net/xhr-connection)]
(event/listen conn :error
(fn [_]
(if (< n 10)
(send-print url data (inc n))
(.log js/console (str "Could not send " data " after " n " attempts.")))))
(net/transmit conn url "POST" data nil 0))))
(def order (atom 0))
(defn wrap-message [t data]
(pr-str {:type t :content data :order (swap! order inc)}))
(defn start-evaluator
"Start the REPL server connection."
[url]
(if-let [repl-connection (net/xpc-connection)]
(let [connection (net/xhr-connection)]
(event/listen connection
:success
(fn [e]
(net/transmit
repl-connection
:evaluate-javascript
(.getResponseText (.-currentTarget e)
()))))
(net/register-service repl-connection
:send-result
(fn [data]
(send-result connection url (wrap-message :result data))))
(net/register-service repl-connection
:print
(fn [data]
(send-print url (wrap-message :print data))))
(net/connect repl-connection
(constantly nil))
(js/setTimeout #(send-result connection url (wrap-message :ready "ready")) 50))
(js/alert "No 'xpc' param provided to child iframe.")))
(defn connect
"Connects to a REPL server from an HTML document. After the
connection is made, the REPL will evaluate forms in the context of
the document that called this function."
[repl-server-url]
(let [repl-connection
(net/xpc-connection
{:peer_uri repl-server-url})]
(swap! xpc-connection (constantly repl-connection))
(net/register-service repl-connection
:evaluate-javascript
(fn [js]
(net/transmit
repl-connection
:send-result
(evaluate-javascript repl-connection js))))
(net/connect repl-connection
(constantly nil)
(fn [iframe]
(set! (.-display (.-style iframe))
"none")))
;; Monkey-patch goog.require if running under optimizations :none - David
(when-not js/COMPILED
(set! *loaded-libs*
(let [gntp (.. js/goog -dependencies_ -nameToPath)]
(into #{}
(filter
(fn [name]
(aget (.. js/goog -dependencies_ -visited) (aget gntp name)))
(js-keys gntp)))))
(set! (.-isProvided_ js/goog) (fn [_] false))
(set! (.-require js/goog)
(fn [name reload]
(when (or (not (contains? *loaded-libs* name)) reload)
(set! *loaded-libs* (conj (or *loaded-libs* #{}) name))
(.appendChild js/document.body
(let [script (.createElement js/document "script")]
(set! (.-type script) "text/javascript")
(set! (.-src script)
(str "goog/"
(aget (.. js/goog -dependencies_ -nameToPath) name)))
script))))))))
| null | https://raw.githubusercontent.com/stabilized/clojurescript/f38f141525576b2a89cde190f25f9cf2fc4c418a/src/cljs/clojure/browser/repl.cljs | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
and monkey-patched require expects to be able to derive it
via goog.basePath, so this namespace should be compiled together
Monkey-patch goog.require if running under optimizations :none - David | Copyright ( c ) . All rights reserved .
Eclipse Public License 1.0 ( -1.0.php )
(ns ^{:doc "Receive - Eval - Print - Loop
Receive a block of JS (presumably generated by a ClojureScript compiler)
Evaluate it naively
Print the result of evaluation to a string
Send the resulting string back to the server Loop!"
:author "Bobby Calderwood and Alex Redington"}
clojure.browser.repl
(:require [clojure.browser.net :as net]
[clojure.browser.event :as event]
repl - connection callback will receive goog.require('cljs.repl ' )
with :
[cljs.repl]))
(def xpc-connection (atom nil))
(defn repl-print [data]
(if-let [conn @xpc-connection]
(net/transmit conn :print (pr-str data))))
(defn evaluate-javascript
"Process a single block of JavaScript received from the server"
[conn block]
(let [result (try {:status :success :value (str (js* "eval(~{block})"))}
(catch :default e
{:status :exception :value (pr-str e)
:stacktrace (if (.hasOwnProperty e "stack")
(.-stack e)
"No stacktrace available.")}))]
(pr-str result)))
(defn send-result [connection url data]
(net/transmit connection url "POST" data nil 0))
(defn send-print
"Send data to be printed in the REPL. If there is an error, try again
up to 10 times."
([url data]
(send-print url data 0))
([url data n]
(let [conn (net/xhr-connection)]
(event/listen conn :error
(fn [_]
(if (< n 10)
(send-print url data (inc n))
(.log js/console (str "Could not send " data " after " n " attempts.")))))
(net/transmit conn url "POST" data nil 0))))
(def order (atom 0))
(defn wrap-message [t data]
(pr-str {:type t :content data :order (swap! order inc)}))
(defn start-evaluator
"Start the REPL server connection."
[url]
(if-let [repl-connection (net/xpc-connection)]
(let [connection (net/xhr-connection)]
(event/listen connection
:success
(fn [e]
(net/transmit
repl-connection
:evaluate-javascript
(.getResponseText (.-currentTarget e)
()))))
(net/register-service repl-connection
:send-result
(fn [data]
(send-result connection url (wrap-message :result data))))
(net/register-service repl-connection
:print
(fn [data]
(send-print url (wrap-message :print data))))
(net/connect repl-connection
(constantly nil))
(js/setTimeout #(send-result connection url (wrap-message :ready "ready")) 50))
(js/alert "No 'xpc' param provided to child iframe.")))
(defn connect
"Connects to a REPL server from an HTML document. After the
connection is made, the REPL will evaluate forms in the context of
the document that called this function."
[repl-server-url]
(let [repl-connection
(net/xpc-connection
{:peer_uri repl-server-url})]
(swap! xpc-connection (constantly repl-connection))
(net/register-service repl-connection
:evaluate-javascript
(fn [js]
(net/transmit
repl-connection
:send-result
(evaluate-javascript repl-connection js))))
(net/connect repl-connection
(constantly nil)
(fn [iframe]
(set! (.-display (.-style iframe))
"none")))
(when-not js/COMPILED
(set! *loaded-libs*
(let [gntp (.. js/goog -dependencies_ -nameToPath)]
(into #{}
(filter
(fn [name]
(aget (.. js/goog -dependencies_ -visited) (aget gntp name)))
(js-keys gntp)))))
(set! (.-isProvided_ js/goog) (fn [_] false))
(set! (.-require js/goog)
(fn [name reload]
(when (or (not (contains? *loaded-libs* name)) reload)
(set! *loaded-libs* (conj (or *loaded-libs* #{}) name))
(.appendChild js/document.body
(let [script (.createElement js/document "script")]
(set! (.-type script) "text/javascript")
(set! (.-src script)
(str "goog/"
(aget (.. js/goog -dependencies_ -nameToPath) name)))
script))))))))
|
06b04a331d7f42fe546cc6cdf69dcc603ecb27a3d1ce9e41123b82fb4089f8f8 | cl-rabbit/cl-bunny | connection-base.lisp | (in-package :cl-bunny)
(defvar *connection* nil
"Current AMQP connection")
(defparameter *connection-type* 'threaded-iolib-connection)
(defparameter *notification-lambda* nil
"If not NIL expected to be lambda (lambda) -> bb:promise.")
(defparameter *debug-connection* nil)
(defclass connection (connection-in-pool channel-base)
((spec :initarg :spec :reader connection-spec)
(channel-id-allocator :type channel-id-allocator
:reader connection-channel-id-allocator)
(channels :type hash-table
:initform (make-hash-table :synchronized t)
:reader connection-channels)
(channel-max :accessor connection-channel-max% :initform +channel-max+)
(frame-max :accessor connection-frame-max% :initform +frame-max+)
(heartbeat :accessor connection-heartbeat% :initform +heartbeat-interval+)
;; events
(on-close :initform (make-instance 'bunny-event)
:initarg :on-close
:accessor connection-on-close%)
(on-error :initform (make-instance 'bunny-event)
:initarg :on-error
:accessor connection-on-error%)))
(defmethod channel-id ((channel connection))
0)
(defmethod channel-connection ((channel connection))
connection)
(defmethod channel-id ((channel fixnum))
channel)
(defgeneric connection-channel-max% (connection))
(defgeneric connection-frame-max% (connection))
(defgeneric connection-heartbeat% (connection))
(defgeneric connection-server-properties% (connection))
(defun connection-channel-max (&optional (connection *connection*))
(connection-channel-max% connection))
(defun connection-frame-max (&optional (connection *connection*))
(connection-frame-max% connection))
(defun connection-heartbeat (&optional (connection *connection*))
(connection-heartbeat% connection))
(defun connection-server-properties (&optional (connection *connection*))
(connection-server-properties% connection))
(defgeneric connection-open-p% (connection)
(:method ((connection connection))
(eq (channel-state connection) :open)))
(defun connection-open-p (&optional (connection *connection*))
(connection-open-p% connection))
(defun check-connection-alive (connection)
(when (connection-open-p connection)
connection))
(defun connection-on-close (&optional (connection *connection*))
(connection-on-close% connection))
(defun get-connection-type (spec)
(or *connection-type*
(if (= 0 (connection-spec-heartbeat-interval spec))
'iolib-connection
'threaded-iolib-connection)))
(defgeneric connection.new% (connection-type spec pool-tag))
(defun connection.new (&optional (spec "amqp://") &key (heartbeat +heartbeat-interval+) pool-tag)
(assert (or (positive-integer-p heartbeat)
:default))
(let ((spec (make-connection-spec spec)))
(unless (= heartbeat +heartbeat-interval+)
(setf (connection-spec-heartbeat-interval spec) heartbeat))
(connection.new% (get-connection-type spec) spec (or pool-tag (with-output-to-string (s) (print-amqp-object spec s))))))
(defgeneric connection.open% (connection)
(:method ((connection connection))
(connection.init connection)
connection))
(defun connection.open (&optional (connection *connection*))
(connection.open% connection))
(defgeneric connection.close% (connection timeout))
(defun connection.close (&key (connection *connection*) (timeout *force-timeout*))
(connection.close% connection timeout))
(defgeneric register-channel (connection channel))
(defmethod register-channel ((connection connection) channel)
(unless (channel-id channel)
(setf (slot-value channel 'channel-id) (next-channel-id (connection-channel-id-allocator connection))))
(setf (gethash (channel-id channel) (connection-channels connection)) channel))
(defun connection.register-channel (channel)
(register-channel (channel-connection channel) channel))
(defgeneric deregister-channel (connection channel))
(defmethod deregister-channel ((connection connection) channel)
(remhash (channel-id channel) (connection-channels connection))
(release-channel-id (connection-channel-id-allocator connection) (channel-id channel)))
(defun connection.deregister-channel (channel)
(deregister-channel (channel-connection channel) channel))
(defgeneric get-channel (connection channel-id))
(defmethod get-channel ((connection connection) channel-id)
(if (= 0 channel-id)
connection
(gethash channel-id (connection-channels connection))))
(defun connection.get-channel (channel-id &key (connection *connection*))
(get-channel connection channel-id))
(defgeneric connection.send (connection channel method))
(defgeneric connection.receive (connection method))
(defmethod connection.receive ((connection connection) (method amqp-method-connection-close))
(log:debug "Received connection.closed ~a" method)
(connection.close-ok% connection nil))
(defmethod connection.receive ((connection connection) (method amqp-method-connection-blocked))
(log:error "Connection blocked ~a" method))
(defmethod connection.receive ((connection connection) (method amqp-method-connection-unblocked))
(log:error "Connection unblocked ~a" method))
(defun parse-with-connection-params-list (params)
(if (and (keywordp (first params))
(evenp (length params)))
(append (list nil) params)
params))
(defun parse-with-connection-params (params)
(etypecase params
(string (list params :shared nil))
(symbol (list params :shared nil))
(list (parse-with-connection-params-list params))))
(defmacro with-connection (params &body body)
(destructuring-bind (spec &key shared (type '*connection-type*)) (parse-with-connection-params params)
(with-gensyms (connection-spec-val shared-val)
`(let* ((,connection-spec-val ,spec)
(,shared-val ,shared)
(*connection* (let ((*connection-type* (or ,type
(if ,shared-val 'threaded-iolib-connection
'iolib-connection))))
(if ,shared-val
(connections-pool.find-or-run (if (eq t ,shared-val) ,connection-spec-val ,shared-val) ,connection-spec-val)
(connection.open (connection.new ,connection-spec-val))))))
(unwind-protect
(progn
,@body)
(unless ,shared-val
(connection.close)))))))
(defgeneric connection.consume% (connection timeout one-shot))
(defun connection.consume (&key (connection *connection*) (timeout 1) one-shot)
(assert connection)
(assert (connection-open-p connection) () 'connection-closed-error :connection connection)
(connection.consume% connection timeout one-shot))
(defgeneric execute-on-connection-thread (connection channel lambda))
(defmethod execute-on-connection-thread ((connection connection) channel lambda)
"Single-thread sync connection"
(assert (connection-open-p connection) () 'connection-closed-error :connection connection)
(funcall lambda))
| null | https://raw.githubusercontent.com/cl-rabbit/cl-bunny/6da7fe161efc8d6bb0b8b09ac8efad03553d765c/src/base/connection-base.lisp | lisp | events | (in-package :cl-bunny)
(defvar *connection* nil
"Current AMQP connection")
(defparameter *connection-type* 'threaded-iolib-connection)
(defparameter *notification-lambda* nil
"If not NIL expected to be lambda (lambda) -> bb:promise.")
(defparameter *debug-connection* nil)
(defclass connection (connection-in-pool channel-base)
((spec :initarg :spec :reader connection-spec)
(channel-id-allocator :type channel-id-allocator
:reader connection-channel-id-allocator)
(channels :type hash-table
:initform (make-hash-table :synchronized t)
:reader connection-channels)
(channel-max :accessor connection-channel-max% :initform +channel-max+)
(frame-max :accessor connection-frame-max% :initform +frame-max+)
(heartbeat :accessor connection-heartbeat% :initform +heartbeat-interval+)
(on-close :initform (make-instance 'bunny-event)
:initarg :on-close
:accessor connection-on-close%)
(on-error :initform (make-instance 'bunny-event)
:initarg :on-error
:accessor connection-on-error%)))
(defmethod channel-id ((channel connection))
0)
(defmethod channel-connection ((channel connection))
connection)
(defmethod channel-id ((channel fixnum))
channel)
(defgeneric connection-channel-max% (connection))
(defgeneric connection-frame-max% (connection))
(defgeneric connection-heartbeat% (connection))
(defgeneric connection-server-properties% (connection))
(defun connection-channel-max (&optional (connection *connection*))
(connection-channel-max% connection))
(defun connection-frame-max (&optional (connection *connection*))
(connection-frame-max% connection))
(defun connection-heartbeat (&optional (connection *connection*))
(connection-heartbeat% connection))
(defun connection-server-properties (&optional (connection *connection*))
(connection-server-properties% connection))
(defgeneric connection-open-p% (connection)
(:method ((connection connection))
(eq (channel-state connection) :open)))
(defun connection-open-p (&optional (connection *connection*))
(connection-open-p% connection))
(defun check-connection-alive (connection)
(when (connection-open-p connection)
connection))
(defun connection-on-close (&optional (connection *connection*))
(connection-on-close% connection))
(defun get-connection-type (spec)
(or *connection-type*
(if (= 0 (connection-spec-heartbeat-interval spec))
'iolib-connection
'threaded-iolib-connection)))
(defgeneric connection.new% (connection-type spec pool-tag))
(defun connection.new (&optional (spec "amqp://") &key (heartbeat +heartbeat-interval+) pool-tag)
(assert (or (positive-integer-p heartbeat)
:default))
(let ((spec (make-connection-spec spec)))
(unless (= heartbeat +heartbeat-interval+)
(setf (connection-spec-heartbeat-interval spec) heartbeat))
(connection.new% (get-connection-type spec) spec (or pool-tag (with-output-to-string (s) (print-amqp-object spec s))))))
(defgeneric connection.open% (connection)
(:method ((connection connection))
(connection.init connection)
connection))
(defun connection.open (&optional (connection *connection*))
(connection.open% connection))
(defgeneric connection.close% (connection timeout))
(defun connection.close (&key (connection *connection*) (timeout *force-timeout*))
(connection.close% connection timeout))
(defgeneric register-channel (connection channel))
(defmethod register-channel ((connection connection) channel)
(unless (channel-id channel)
(setf (slot-value channel 'channel-id) (next-channel-id (connection-channel-id-allocator connection))))
(setf (gethash (channel-id channel) (connection-channels connection)) channel))
(defun connection.register-channel (channel)
(register-channel (channel-connection channel) channel))
(defgeneric deregister-channel (connection channel))
(defmethod deregister-channel ((connection connection) channel)
(remhash (channel-id channel) (connection-channels connection))
(release-channel-id (connection-channel-id-allocator connection) (channel-id channel)))
(defun connection.deregister-channel (channel)
(deregister-channel (channel-connection channel) channel))
(defgeneric get-channel (connection channel-id))
(defmethod get-channel ((connection connection) channel-id)
(if (= 0 channel-id)
connection
(gethash channel-id (connection-channels connection))))
(defun connection.get-channel (channel-id &key (connection *connection*))
(get-channel connection channel-id))
(defgeneric connection.send (connection channel method))
(defgeneric connection.receive (connection method))
(defmethod connection.receive ((connection connection) (method amqp-method-connection-close))
(log:debug "Received connection.closed ~a" method)
(connection.close-ok% connection nil))
(defmethod connection.receive ((connection connection) (method amqp-method-connection-blocked))
(log:error "Connection blocked ~a" method))
(defmethod connection.receive ((connection connection) (method amqp-method-connection-unblocked))
(log:error "Connection unblocked ~a" method))
(defun parse-with-connection-params-list (params)
(if (and (keywordp (first params))
(evenp (length params)))
(append (list nil) params)
params))
(defun parse-with-connection-params (params)
(etypecase params
(string (list params :shared nil))
(symbol (list params :shared nil))
(list (parse-with-connection-params-list params))))
(defmacro with-connection (params &body body)
(destructuring-bind (spec &key shared (type '*connection-type*)) (parse-with-connection-params params)
(with-gensyms (connection-spec-val shared-val)
`(let* ((,connection-spec-val ,spec)
(,shared-val ,shared)
(*connection* (let ((*connection-type* (or ,type
(if ,shared-val 'threaded-iolib-connection
'iolib-connection))))
(if ,shared-val
(connections-pool.find-or-run (if (eq t ,shared-val) ,connection-spec-val ,shared-val) ,connection-spec-val)
(connection.open (connection.new ,connection-spec-val))))))
(unwind-protect
(progn
,@body)
(unless ,shared-val
(connection.close)))))))
(defgeneric connection.consume% (connection timeout one-shot))
(defun connection.consume (&key (connection *connection*) (timeout 1) one-shot)
(assert connection)
(assert (connection-open-p connection) () 'connection-closed-error :connection connection)
(connection.consume% connection timeout one-shot))
(defgeneric execute-on-connection-thread (connection channel lambda))
(defmethod execute-on-connection-thread ((connection connection) channel lambda)
"Single-thread sync connection"
(assert (connection-open-p connection) () 'connection-closed-error :connection connection)
(funcall lambda))
|
771f4c81f44c7937f0b5a0a77cdf84f50108f8860f1bd8029f034b1a2fd3dcf3 | mirage/ocaml-rpc | ppx_deriving_rpc.ml | open Ppxlib
open Ast_builder.Default
open Common
let argn = Printf.sprintf "a%d"
let rpc_of str = "rpc_of_" ^ str
let of_rpc str = str ^ "_of_rpc"
let map_lident f = function
| Lident x -> Lident (f x)
| Ldot (path, lid) -> Ldot (path, f lid)
| Lapply _ -> Location.raise_errorf "rpcty - map_lident: Lapply unhandled"
(* [is_option typ] returns true if the type 'typ' is an option type.
This is required because of the slightly odd way we serialise records containing optional fields. *)
let is_option typ =
match typ with
| [%type: [%t? _] option] -> true
| _ -> false
(* When marshalling (foo * bar) lists we check to see whether it can be better represented by a
dictionary - we do this by checking (possibly at run time) whether the 'foo' can be unmarshalled from
a string - this following function, given the type 'foo', returns the run time check *)
let is_string loc typ =
match typ with
| [%type: string] -> [%expr true]
| [%type: int] -> [%expr false]
| [%type: bool] -> [%expr false]
| { ptyp_desc = Ptyp_constr (lid, []); _ } ->
[%expr
let open Rpc in
try
let _ = [%e type_constr_conv lid ~loc ~f:of_rpc [ [%expr Rpc.String ""] ]] in
true
with
| _ -> false]
| _ -> [%expr false]
let is_dict loc attr =
match Attribute.get Attrs.is_dict attr with
| Some () -> [%expr true]
| None -> [%expr false]
module Of_rpc = struct
let rec expr_of_typ ~loc typ =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
match typ with
| { ptyp_desc = Ptyp_constr ({ txt = Lident name; _ }, _); _ }
when list_assoc_mem (core_types loc) ~equal:String.equal name ->
type_constr_conv (Located.mk (Ldot (Lident "Rpc", name))) ~f:of_rpc []
| { ptyp_desc = Ptyp_constr ({ txt = Lident "char"; _ }, _); _ } ->
[%expr Rpc.char_of_rpc]
| { ptyp_desc =
Ptyp_constr
({ txt = Lident "list"; _ }, [ { ptyp_desc = Ptyp_tuple [ typ1; typ2 ]; _ } ])
; _
} ->
[%expr
if [%e is_dict loc typ] || [%e is_string loc typ1]
then
function
| Rpc.Dict l ->
Rpcmarshal.tailrec_map
(fun (k, v) ->
[%e expr_of_typ ~loc typ1] (Rpc.String k), [%e expr_of_typ ~loc typ2] v)
l
| y ->
failwith
(Printf.sprintf "Expecting Rpc.Dict, but found '%s'" (Rpc.to_string y))
else
function
| Rpc.Enum l ->
Rpcmarshal.tailrec_map
(function
| Rpc.Enum [ k; v ] ->
[%e expr_of_typ ~loc typ1] k, [%e expr_of_typ ~loc typ2] v
| y ->
failwith
(Printf.sprintf
"Expecting Rpc.Enum (within an Enum), but found '%s'"
(Rpc.to_string y)))
l
| y ->
failwith
(Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
Tuple lists might be representable by a dictionary , if the first type in the tuple is string - like
| [%type: [%t? typ] list] ->
[%expr
function
| Rpc.Enum l -> Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] l
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| [%type: [%t? typ] array] ->
[%expr
function
| Rpc.Enum l ->
Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] l |> Array.of_list
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| { ptyp_desc = Ptyp_tuple typs; _ } ->
let pattern = ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs in
let exprs =
ListLabels.mapi
~f:(fun i typ -> [%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
[%expr
function
| Rpc.Enum [%p plist pattern] -> [%e pexp_tuple exprs]
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| [%type: [%t? typ] option] ->
let e = expr_of_typ ~loc typ in
[%expr
function
| Rpc.Enum [] -> None
| Rpc.Enum [ y ] -> Some ([%e e] y)
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| { ptyp_desc = Ptyp_constr ({ txt = lid; _ }, args); _ } ->
let args =
List.rev @@ ListLabels.rev_map ~f:(fun x -> Nolabel, expr_of_typ ~loc x) args
in
let f = pexp_ident (Located.mk (map_lident of_rpc lid)) in
pexp_apply f args
| { ptyp_desc = Ptyp_var name; _ } -> [%expr [%e evar ("poly_" ^ name)]]
| { ptyp_desc = Ptyp_variant (fields, _, _); _ } ->
let inherits, tags =
list_partition_tf
~f:(function
| { prf_desc = Rinherit _; _ } -> true
| _ -> false)
fields
in
let bad = [%expr failwith "Unknown tag/contents"] in
let default_expr =
match Attribute.get Attrs.ct_default typ with
| None -> bad
| Some expr ->
[%expr
match rpc' with
| String _ | Enum (String _ :: _) -> [%e expr]
| _ -> [%e bad]]
in
let tag_cases =
tags
|> ListLabels.rev_map ~f:(fun field ->
let { prf_desc; _ } = field in
match prf_desc with
| Rtag (label, true, []) ->
let label' = String.lowercase_ascii label.txt in
let name =
match Attribute.get Attrs.rt_name field with
| Some s -> s
| None -> label'
in
case
~lhs:[%pat? Rpc.String [%p pstring name]]
~guard:None
~rhs:(pexp_variant label.txt None)
| Rtag (label, false, [ { ptyp_desc = Ptyp_tuple typs; _ } ]) ->
let label' = String.lowercase_ascii label.txt in
let name =
match Attribute.get Attrs.rt_name field with
| Some s -> s
| None -> label'
in
let exprs =
ListLabels.mapi
~f:(fun i typ ->
[%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
case
~lhs:
[%pat?
Rpc.Enum
[ Rpc.String [%p pstring name]
; Rpc.Enum
[%p
plist (ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs)]
]]
~guard:None
~rhs:(pexp_variant label.txt (Some (pexp_tuple exprs)))
| Rtag (label, false, [ typ ]) ->
let label' = String.lowercase_ascii label.txt in
let name =
match Attribute.get Attrs.rt_name field with
| Some s -> s
| None -> label'
in
case
~lhs:[%pat? Rpc.Enum [ Rpc.String [%p pstring name]; y ]]
~guard:None
~rhs:
[%expr
[%e expr_of_typ ~loc typ] y
|> fun x -> [%e pexp_variant label.txt (Some [%expr x])]]
| _ -> failwith "Cannot derive variant case")
|> List.rev
and inherits_case =
(*let toplevel_typ = typ in*)
let expr =
List.rev
@@ ListLabels.rev_map
~f:(function
| { prf_desc = Rinherit typ; _ } -> typ
| _ -> assert false)
inherits
|> ListLabels.fold_left
~f:(fun expr typ ->
[%expr
try [%e expr_of_typ ~loc typ] rpc (* :> [%t toplevel_typ]*) with
| _ -> [%e expr]])
~init:default_expr
in
case ~lhs:[%pat? _] ~guard:None ~rhs:expr
in
[%expr
fun (rpc : Rpc.t) ->
let rpc' =
match rpc with
| Rpc.Enum (Rpc.String x :: xs) ->
Rpc.Enum (Rpc.String (String.lowercase_ascii x) :: xs)
| Rpc.String x -> Rpc.String (String.lowercase_ascii x)
| y -> y
in
[%e pexp_match [%expr rpc'] (tag_cases @ [ inherits_case ])]]
| { ptyp_desc = Ptyp_any; _ } -> failwith "Ptyp_any not handled"
| { ptyp_desc = Ptyp_poly (_, _); _ } -> failwith "Ptyp_poly not handled"
| { ptyp_desc = Ptyp_extension _; _ } -> failwith "Ptyp_extension not handled"
| { ptyp_desc = Ptyp_arrow (_, _, _); _ } -> failwith "Ptyp_arrow not handled"
| { ptyp_desc = Ptyp_object (_, _); _ } -> failwith "Ptyp_object not handled"
| { ptyp_desc = Ptyp_alias (_, _); _ } -> failwith "Ptyp_alias not handled"
| { ptyp_desc = Ptyp_class (_, _); _ } -> failwith "Ptyp_class not handled"
| { ptyp_desc = Ptyp_package _; _ } -> failwith "Ptyp_package not handled"
let str_of_type ~loc type_decl =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
let of_rpc =
match type_decl.ptype_kind, type_decl.ptype_manifest with
| Ptype_abstract, Some manifest -> expr_of_typ ~loc manifest
| Ptype_record labels, _ ->
let record =
ListLabels.fold_left
~f:(fun expr (i, label) ->
let { pld_name = { txt = name; _ }; _ } = label in
let key =
String.lowercase_ascii
(match Attribute.get Attrs.key label with
| Some s -> s
| None -> name)
in
let pat = pvar (argn i) in
let expr' = evar (argn i) in
let str = estring key in
[%expr
let [%p pat] =
match [%e expr'] with
| Some x -> x
| None ->
failwith (Printf.sprintf "Undefined field: Expecting '%s'" [%e str])
in
[%e expr]])
~init:
[%expr
[%e
pexp_record
(labels
|> ListLabels.mapi ~f:(fun i { pld_name = { txt = name; _ }; _ } ->
{ txt = Lident name; loc }, evar (argn i)))
None]]
(labels |> ListLabels.mapi ~f:(fun i label -> i, label))
in
let wrap_opt pld_type x =
if is_option pld_type then [%expr Rpc.Enum [ [%e x] ]] else x
in
let cases =
(labels
|> ListLabels.mapi ~f:(fun i label ->
let { pld_name = { txt = name; _ }; pld_type; _ } = label in
let key =
String.lowercase_ascii
(match Attribute.get Attrs.key label with
| Some s -> s
| None -> name)
in
let thunks =
labels
|> ListLabels.mapi ~f:(fun j _ ->
if i = j
then
[%expr
Some
[%e
pexp_apply
(expr_of_typ ~loc pld_type)
[ Nolabel, wrap_opt pld_type (evar "x") ]]]
else evar (argn j))
in
case
~lhs:[%pat? ([%p pstring key], x) :: xs]
~guard:None
~rhs:[%expr loop xs [%e pexp_tuple thunks]]))
@ [ case ~lhs:[%pat? []] ~guard:None ~rhs:record
; case ~lhs:[%pat? _ :: xs] ~guard:None ~rhs:[%expr loop xs _state]
]
and thunks =
labels
|> ListLabels.rev_map ~f:(fun { pld_name = _; pld_type; _ } ->
if is_option pld_type then [%expr Some None] else [%expr None])
|> List.rev
in
[%expr
fun x ->
match x with
| Rpc.Dict dict ->
let d' =
List.rev
@@ ListLabels.rev_map ~f:(fun (k, v) -> String.lowercase_ascii k, v) dict
in
let rec loop
xs
([%p ppat_tuple (ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) labels)]
as _state)
=
[%e pexp_match [%expr xs] cases]
in
loop d' [%e pexp_tuple thunks]
| y ->
failwith
(Printf.sprintf "Expecting Rpc.Dict, but found '%s'" (Rpc.to_string y))]
| Ptype_abstract, None -> failwith "Unhandled"
| Ptype_open, _ -> failwith "Unhandled"
| Ptype_variant constrs, _ ->
let cases =
constrs
|> ListLabels.rev_map ~f:(fun constr ->
let { pcd_name = { txt = name; _ }; pcd_args; _ } = constr in
let name' =
match Attribute.get Attrs.constr_name constr with
| Some n -> n
| None -> name
in
match pcd_args with
| Pcstr_tuple typs ->
let subpattern =
ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs |> plist
in
let exprs =
ListLabels.mapi
~f:(fun i typ ->
[%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
let contents =
match exprs with
| [] -> None
| [ x ] -> Some x
| xs -> Some (pexp_tuple xs)
in
let rpc_of = pexp_construct { txt = Lident name; loc } contents in
let main =
[%pat? Rpc.String [%p pstring (String.lowercase_ascii name')]]
in
let pattern =
match typs with
| [] -> main
| _ -> [%pat? Rpc.Enum ([%p main] :: [%p subpattern])]
in
case ~lhs:pattern ~guard:None ~rhs:rpc_of
| Pcstr_record _ -> failwith "record variants are not supported")
|> List.rev
in
let default =
case
~lhs:[%pat? y]
~guard:None
~rhs:
[%expr
failwith
(Printf.sprintf
"Unhandled pattern when unmarshalling variant type: found '%s'"
(Rpc.to_string y))]
in
[%expr
fun rpc ->
let rpc' = Rpc.lowerfn rpc in
[%e pexp_function (cases @ [ default ])] rpc']
in
of_rpc
end
module Rpc_of = struct
let rec expr_of_typ ~loc typ =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
match typ with
| { ptyp_desc = Ptyp_constr ({ txt = Lident name; _ }, _); _ }
when list_assoc_mem (Common.core_types loc) ~equal:String.equal name ->
[%expr Rpc.([%e pexp_ident (Located.mk (lident (rpc_of name)))])]
| { ptyp_desc = Ptyp_constr ({ txt = Lident "char"; _ }, _); _ } ->
[%expr
Rpc.(
function
| c -> Rpc.Int (Int64.of_int (Char.code c)))]
Tuple lists might be representable by a dictionary , if the first type in the tuple is string - like
| { ptyp_desc =
Ptyp_constr
({ txt = Lident "list"; _ }, [ { ptyp_desc = Ptyp_tuple [ typ1; typ2 ]; _ } ])
; _
} ->
[%expr
if [%e is_dict loc typ] || [%e is_string loc typ1]
then
fun l ->
Rpc.Dict
(List.rev
@@ ListLabels.rev_map
~f:(fun (k, v) ->
( Rpc.string_of_rpc ([%e expr_of_typ ~loc typ1] k)
, [%e expr_of_typ ~loc typ2] v ))
l)
else
fun l ->
Rpc.Enum
(List.rev
@@ ListLabels.rev_map
~f:(fun (a, b) ->
Rpc.Enum [ [%e expr_of_typ ~loc typ1] a; [%e expr_of_typ ~loc typ2] b ])
l)]
| [%type: [%t? typ] list] ->
[%expr fun l -> Rpc.Enum (Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] l)]
| [%type: [%t? typ] array] ->
[%expr
fun l ->
Rpc.Enum (Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] (Array.to_list l))]
| { ptyp_desc = Ptyp_tuple typs; _ } ->
let args =
ListLabels.mapi
~f:(fun i typ -> pexp_apply (expr_of_typ ~loc typ) [ Nolabel, evar (argn i) ])
typs
in
[%expr
fun [%p ppat_tuple (ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs)] ->
Rpc.Enum [%e elist args]]
| [%type: [%t? typ] option] ->
let e = expr_of_typ ~loc typ in
[%expr
fun x ->
match x with
| None -> Rpc.Enum []
| Some y -> Rpc.Enum [ [%e e] y ]]
| { ptyp_desc = Ptyp_constr ({ txt = lid; _ }, args); _ } ->
let args =
List.rev @@ ListLabels.rev_map ~f:(fun e -> Nolabel, expr_of_typ ~loc e) args
in
let f = pexp_ident (Located.mk (map_lident rpc_of lid)) in
pexp_apply f args
| { ptyp_desc = Ptyp_variant (fields, _, _); _ } ->
let cases =
fields
|> ListLabels.rev_map ~f:(fun field ->
let { prf_desc; _ } = field in
match prf_desc with
| Rtag (label, true, []) ->
let l =
match Attribute.get Attrs.rt_name field with
| Some x -> x
| None -> label.txt
in
case
~lhs:(ppat_variant label.txt None)
~guard:None
~rhs:[%expr Rpc.String [%e estring l]]
| Rtag (label, false, [ { ptyp_desc = Ptyp_tuple typs; _ } ]) ->
let l =
elist
(ListLabels.mapi
~f:(fun i typ ->
pexp_apply (expr_of_typ ~loc typ) [ Nolabel, evar (argn i) ])
typs)
in
let label =
match Attribute.get Attrs.rt_name field with
| Some x -> x
| None -> label.txt
in
case
~lhs:
(ppat_variant
label
(ppat_tuple_opt
(ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs)))
~guard:None
~rhs:
[%expr Rpc.Enum [ Rpc.String [%e estring label]; Rpc.Enum [%e l] ]]
| Rtag (label, false, [ typ ]) ->
let label =
match Attribute.get Attrs.rt_name field with
| Some x -> x
| None -> label.txt
in
case
~lhs:(ppat_variant label (Some [%pat? x]))
~guard:None
~rhs:
[%expr
Rpc.Enum
[ Rpc.String [%e estring label]; [%e expr_of_typ ~loc typ] x ]]
| Rinherit ({ ptyp_desc = Ptyp_constr (tname, _); _ } as typ) ->
case
~lhs:[%pat? [%p ppat_type tname] as x]
~guard:None
~rhs:[%expr [%e expr_of_typ ~loc typ] x]
| _ -> failwith "cannot be derived for")
|> List.rev
in
pexp_function cases
| { ptyp_desc = Ptyp_any; _ } -> failwith "Ptyp_any not handled"
| { ptyp_desc = Ptyp_var name; _ } -> [%expr [%e evar ("poly_" ^ name)]]
| { ptyp_desc = Ptyp_poly (_, _); _ } -> failwith "Ptyp_poly not handled"
| { ptyp_desc = Ptyp_extension _; _ } -> failwith "Ptyp_extension not handled"
| { ptyp_desc = Ptyp_arrow (_, _, _); _ } -> failwith "Ptyp_arrow not handled"
| { ptyp_desc = Ptyp_object (_, _); _ } -> failwith "Ptyp_object not handled"
| { ptyp_desc = Ptyp_alias (_, _); _ } -> failwith "Ptyp_alias not handled"
| { ptyp_desc = Ptyp_class (_, _); _ } -> failwith "Ptyp_class not handled"
| { ptyp_desc = Ptyp_package _; _ } -> failwith "Ptyp_package not handled"
(* | _ -> failwith "Error"*)
let str_of_type ~loc type_decl =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
let to_rpc =
match type_decl.ptype_kind, type_decl.ptype_manifest with
| Ptype_abstract, Some manifest -> expr_of_typ ~loc manifest
| Ptype_record labels, _ ->
let fields =
labels
|> ListLabels.rev_map ~f:(fun label ->
let { pld_name = { txt = name; _ }; pld_type; _ } = label in
let rpc_name =
match Attribute.get Attrs.key label with
| Some s -> s
| None -> name
in
if is_option pld_type
then
[%expr
let rpc =
[%e expr_of_typ ~loc pld_type]
[%e pexp_field (evar "x") { txt = Lident name; loc }]
in
match rpc with
| Rpc.Enum [ x ] -> Some ([%e estring rpc_name], x)
| Rpc.Enum [] -> None
| _ ->
failwith
(Printf.sprintf
"Programmer error when marshalling %s.%s"
[%e estring type_decl.ptype_name.txt]
[%e estring name])]
(* Should never happen *)
else
[%expr
Some
( [%e estring rpc_name]
, [%e expr_of_typ ~loc pld_type]
[%e pexp_field (evar "x") { txt = Lident name; loc }] )])
|> List.rev
in
[%expr
fun x ->
Rpc.Dict
(ListLabels.fold_right
~f:(fun x acc ->
match x with
| Some x -> x :: acc
| None -> acc)
[%e elist fields]
~init:[])]
| Ptype_abstract, None -> failwith "Unhandled"
| Ptype_open, _ -> failwith "Unhandled"
| Ptype_variant constrs, _ ->
let cases =
constrs
|> ListLabels.rev_map ~f:(fun constr ->
let { pcd_name = { txt = name; _ }; pcd_args; _ } = constr in
match pcd_args with
| Pcstr_tuple typs ->
let args =
ListLabels.mapi
~f:(fun i typ ->
[%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
let argsl = elist args in
let pattern = ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs in
let name' =
match Attribute.get Attrs.constr_name constr with
| Some s -> s
| None -> name
in
let rpc_of =
match args with
| [] -> [%expr Rpc.String [%e estring name']]
| _ -> [%expr Rpc.Enum (Rpc.String [%e estring name'] :: [%e argsl])]
in
case
~lhs:
(ppat_construct
{ txt = Lident name; loc }
(ppat_tuple_opt pattern))
~guard:None
~rhs:rpc_of
| Pcstr_record _ -> failwith "record variants are not supported")
|> List.rev
in
pexp_function cases
in
to_rpc
end
let rpc_strs_of_type ~loc type_decl =
let polymorphize = Common.poly_fun_of_type_decl type_decl in
let name = type_decl.ptype_name.txt in
[ value_binding
~loc
~pat:(pvar ~loc (rpc_of name))
~expr:
(pexp_fun
~loc
Nolabel
None
(pvar ~loc "__x__")
[%expr [%e polymorphize ~loc (Rpc_of.str_of_type ~loc type_decl)] __x__])
; value_binding
~loc
~pat:(pvar ~loc (of_rpc name))
~expr:
(pexp_fun
~loc
Nolabel
None
(pvar ~loc "__x__")
[%expr [%e polymorphize ~loc (Of_rpc.str_of_type ~loc type_decl)] __x__])
]
let my_str_type_decl ~loc ~path:_ (rec_flag, tds) =
pstr_value_list
~loc
rec_flag
(List.concat (List.rev @@ ListLabels.rev_map ~f:(rpc_strs_of_type ~loc) tds))
let str_type_decl = Deriving.Generator.make_noarg my_str_type_decl
let deriver = Deriving.add "rpc" ~str_type_decl
| null | https://raw.githubusercontent.com/mirage/ocaml-rpc/fdbf7f5c3e4f0c75837f0a96d5d4d6458805fd57/ppx/ppx_deriving_rpc.ml | ocaml | [is_option typ] returns true if the type 'typ' is an option type.
This is required because of the slightly odd way we serialise records containing optional fields.
When marshalling (foo * bar) lists we check to see whether it can be better represented by a
dictionary - we do this by checking (possibly at run time) whether the 'foo' can be unmarshalled from
a string - this following function, given the type 'foo', returns the run time check
let toplevel_typ = typ in
:> [%t toplevel_typ]
| _ -> failwith "Error"
Should never happen | open Ppxlib
open Ast_builder.Default
open Common
let argn = Printf.sprintf "a%d"
let rpc_of str = "rpc_of_" ^ str
let of_rpc str = str ^ "_of_rpc"
let map_lident f = function
| Lident x -> Lident (f x)
| Ldot (path, lid) -> Ldot (path, f lid)
| Lapply _ -> Location.raise_errorf "rpcty - map_lident: Lapply unhandled"
let is_option typ =
match typ with
| [%type: [%t? _] option] -> true
| _ -> false
let is_string loc typ =
match typ with
| [%type: string] -> [%expr true]
| [%type: int] -> [%expr false]
| [%type: bool] -> [%expr false]
| { ptyp_desc = Ptyp_constr (lid, []); _ } ->
[%expr
let open Rpc in
try
let _ = [%e type_constr_conv lid ~loc ~f:of_rpc [ [%expr Rpc.String ""] ]] in
true
with
| _ -> false]
| _ -> [%expr false]
let is_dict loc attr =
match Attribute.get Attrs.is_dict attr with
| Some () -> [%expr true]
| None -> [%expr false]
module Of_rpc = struct
let rec expr_of_typ ~loc typ =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
match typ with
| { ptyp_desc = Ptyp_constr ({ txt = Lident name; _ }, _); _ }
when list_assoc_mem (core_types loc) ~equal:String.equal name ->
type_constr_conv (Located.mk (Ldot (Lident "Rpc", name))) ~f:of_rpc []
| { ptyp_desc = Ptyp_constr ({ txt = Lident "char"; _ }, _); _ } ->
[%expr Rpc.char_of_rpc]
| { ptyp_desc =
Ptyp_constr
({ txt = Lident "list"; _ }, [ { ptyp_desc = Ptyp_tuple [ typ1; typ2 ]; _ } ])
; _
} ->
[%expr
if [%e is_dict loc typ] || [%e is_string loc typ1]
then
function
| Rpc.Dict l ->
Rpcmarshal.tailrec_map
(fun (k, v) ->
[%e expr_of_typ ~loc typ1] (Rpc.String k), [%e expr_of_typ ~loc typ2] v)
l
| y ->
failwith
(Printf.sprintf "Expecting Rpc.Dict, but found '%s'" (Rpc.to_string y))
else
function
| Rpc.Enum l ->
Rpcmarshal.tailrec_map
(function
| Rpc.Enum [ k; v ] ->
[%e expr_of_typ ~loc typ1] k, [%e expr_of_typ ~loc typ2] v
| y ->
failwith
(Printf.sprintf
"Expecting Rpc.Enum (within an Enum), but found '%s'"
(Rpc.to_string y)))
l
| y ->
failwith
(Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
Tuple lists might be representable by a dictionary , if the first type in the tuple is string - like
| [%type: [%t? typ] list] ->
[%expr
function
| Rpc.Enum l -> Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] l
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| [%type: [%t? typ] array] ->
[%expr
function
| Rpc.Enum l ->
Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] l |> Array.of_list
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| { ptyp_desc = Ptyp_tuple typs; _ } ->
let pattern = ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs in
let exprs =
ListLabels.mapi
~f:(fun i typ -> [%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
[%expr
function
| Rpc.Enum [%p plist pattern] -> [%e pexp_tuple exprs]
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| [%type: [%t? typ] option] ->
let e = expr_of_typ ~loc typ in
[%expr
function
| Rpc.Enum [] -> None
| Rpc.Enum [ y ] -> Some ([%e e] y)
| y ->
failwith (Printf.sprintf "Expecting Rpc.Enum, but found '%s'" (Rpc.to_string y))]
| { ptyp_desc = Ptyp_constr ({ txt = lid; _ }, args); _ } ->
let args =
List.rev @@ ListLabels.rev_map ~f:(fun x -> Nolabel, expr_of_typ ~loc x) args
in
let f = pexp_ident (Located.mk (map_lident of_rpc lid)) in
pexp_apply f args
| { ptyp_desc = Ptyp_var name; _ } -> [%expr [%e evar ("poly_" ^ name)]]
| { ptyp_desc = Ptyp_variant (fields, _, _); _ } ->
let inherits, tags =
list_partition_tf
~f:(function
| { prf_desc = Rinherit _; _ } -> true
| _ -> false)
fields
in
let bad = [%expr failwith "Unknown tag/contents"] in
let default_expr =
match Attribute.get Attrs.ct_default typ with
| None -> bad
| Some expr ->
[%expr
match rpc' with
| String _ | Enum (String _ :: _) -> [%e expr]
| _ -> [%e bad]]
in
let tag_cases =
tags
|> ListLabels.rev_map ~f:(fun field ->
let { prf_desc; _ } = field in
match prf_desc with
| Rtag (label, true, []) ->
let label' = String.lowercase_ascii label.txt in
let name =
match Attribute.get Attrs.rt_name field with
| Some s -> s
| None -> label'
in
case
~lhs:[%pat? Rpc.String [%p pstring name]]
~guard:None
~rhs:(pexp_variant label.txt None)
| Rtag (label, false, [ { ptyp_desc = Ptyp_tuple typs; _ } ]) ->
let label' = String.lowercase_ascii label.txt in
let name =
match Attribute.get Attrs.rt_name field with
| Some s -> s
| None -> label'
in
let exprs =
ListLabels.mapi
~f:(fun i typ ->
[%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
case
~lhs:
[%pat?
Rpc.Enum
[ Rpc.String [%p pstring name]
; Rpc.Enum
[%p
plist (ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs)]
]]
~guard:None
~rhs:(pexp_variant label.txt (Some (pexp_tuple exprs)))
| Rtag (label, false, [ typ ]) ->
let label' = String.lowercase_ascii label.txt in
let name =
match Attribute.get Attrs.rt_name field with
| Some s -> s
| None -> label'
in
case
~lhs:[%pat? Rpc.Enum [ Rpc.String [%p pstring name]; y ]]
~guard:None
~rhs:
[%expr
[%e expr_of_typ ~loc typ] y
|> fun x -> [%e pexp_variant label.txt (Some [%expr x])]]
| _ -> failwith "Cannot derive variant case")
|> List.rev
and inherits_case =
let expr =
List.rev
@@ ListLabels.rev_map
~f:(function
| { prf_desc = Rinherit typ; _ } -> typ
| _ -> assert false)
inherits
|> ListLabels.fold_left
~f:(fun expr typ ->
[%expr
| _ -> [%e expr]])
~init:default_expr
in
case ~lhs:[%pat? _] ~guard:None ~rhs:expr
in
[%expr
fun (rpc : Rpc.t) ->
let rpc' =
match rpc with
| Rpc.Enum (Rpc.String x :: xs) ->
Rpc.Enum (Rpc.String (String.lowercase_ascii x) :: xs)
| Rpc.String x -> Rpc.String (String.lowercase_ascii x)
| y -> y
in
[%e pexp_match [%expr rpc'] (tag_cases @ [ inherits_case ])]]
| { ptyp_desc = Ptyp_any; _ } -> failwith "Ptyp_any not handled"
| { ptyp_desc = Ptyp_poly (_, _); _ } -> failwith "Ptyp_poly not handled"
| { ptyp_desc = Ptyp_extension _; _ } -> failwith "Ptyp_extension not handled"
| { ptyp_desc = Ptyp_arrow (_, _, _); _ } -> failwith "Ptyp_arrow not handled"
| { ptyp_desc = Ptyp_object (_, _); _ } -> failwith "Ptyp_object not handled"
| { ptyp_desc = Ptyp_alias (_, _); _ } -> failwith "Ptyp_alias not handled"
| { ptyp_desc = Ptyp_class (_, _); _ } -> failwith "Ptyp_class not handled"
| { ptyp_desc = Ptyp_package _; _ } -> failwith "Ptyp_package not handled"
let str_of_type ~loc type_decl =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
let of_rpc =
match type_decl.ptype_kind, type_decl.ptype_manifest with
| Ptype_abstract, Some manifest -> expr_of_typ ~loc manifest
| Ptype_record labels, _ ->
let record =
ListLabels.fold_left
~f:(fun expr (i, label) ->
let { pld_name = { txt = name; _ }; _ } = label in
let key =
String.lowercase_ascii
(match Attribute.get Attrs.key label with
| Some s -> s
| None -> name)
in
let pat = pvar (argn i) in
let expr' = evar (argn i) in
let str = estring key in
[%expr
let [%p pat] =
match [%e expr'] with
| Some x -> x
| None ->
failwith (Printf.sprintf "Undefined field: Expecting '%s'" [%e str])
in
[%e expr]])
~init:
[%expr
[%e
pexp_record
(labels
|> ListLabels.mapi ~f:(fun i { pld_name = { txt = name; _ }; _ } ->
{ txt = Lident name; loc }, evar (argn i)))
None]]
(labels |> ListLabels.mapi ~f:(fun i label -> i, label))
in
let wrap_opt pld_type x =
if is_option pld_type then [%expr Rpc.Enum [ [%e x] ]] else x
in
let cases =
(labels
|> ListLabels.mapi ~f:(fun i label ->
let { pld_name = { txt = name; _ }; pld_type; _ } = label in
let key =
String.lowercase_ascii
(match Attribute.get Attrs.key label with
| Some s -> s
| None -> name)
in
let thunks =
labels
|> ListLabels.mapi ~f:(fun j _ ->
if i = j
then
[%expr
Some
[%e
pexp_apply
(expr_of_typ ~loc pld_type)
[ Nolabel, wrap_opt pld_type (evar "x") ]]]
else evar (argn j))
in
case
~lhs:[%pat? ([%p pstring key], x) :: xs]
~guard:None
~rhs:[%expr loop xs [%e pexp_tuple thunks]]))
@ [ case ~lhs:[%pat? []] ~guard:None ~rhs:record
; case ~lhs:[%pat? _ :: xs] ~guard:None ~rhs:[%expr loop xs _state]
]
and thunks =
labels
|> ListLabels.rev_map ~f:(fun { pld_name = _; pld_type; _ } ->
if is_option pld_type then [%expr Some None] else [%expr None])
|> List.rev
in
[%expr
fun x ->
match x with
| Rpc.Dict dict ->
let d' =
List.rev
@@ ListLabels.rev_map ~f:(fun (k, v) -> String.lowercase_ascii k, v) dict
in
let rec loop
xs
([%p ppat_tuple (ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) labels)]
as _state)
=
[%e pexp_match [%expr xs] cases]
in
loop d' [%e pexp_tuple thunks]
| y ->
failwith
(Printf.sprintf "Expecting Rpc.Dict, but found '%s'" (Rpc.to_string y))]
| Ptype_abstract, None -> failwith "Unhandled"
| Ptype_open, _ -> failwith "Unhandled"
| Ptype_variant constrs, _ ->
let cases =
constrs
|> ListLabels.rev_map ~f:(fun constr ->
let { pcd_name = { txt = name; _ }; pcd_args; _ } = constr in
let name' =
match Attribute.get Attrs.constr_name constr with
| Some n -> n
| None -> name
in
match pcd_args with
| Pcstr_tuple typs ->
let subpattern =
ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs |> plist
in
let exprs =
ListLabels.mapi
~f:(fun i typ ->
[%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
let contents =
match exprs with
| [] -> None
| [ x ] -> Some x
| xs -> Some (pexp_tuple xs)
in
let rpc_of = pexp_construct { txt = Lident name; loc } contents in
let main =
[%pat? Rpc.String [%p pstring (String.lowercase_ascii name')]]
in
let pattern =
match typs with
| [] -> main
| _ -> [%pat? Rpc.Enum ([%p main] :: [%p subpattern])]
in
case ~lhs:pattern ~guard:None ~rhs:rpc_of
| Pcstr_record _ -> failwith "record variants are not supported")
|> List.rev
in
let default =
case
~lhs:[%pat? y]
~guard:None
~rhs:
[%expr
failwith
(Printf.sprintf
"Unhandled pattern when unmarshalling variant type: found '%s'"
(Rpc.to_string y))]
in
[%expr
fun rpc ->
let rpc' = Rpc.lowerfn rpc in
[%e pexp_function (cases @ [ default ])] rpc']
in
of_rpc
end
module Rpc_of = struct
let rec expr_of_typ ~loc typ =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
match typ with
| { ptyp_desc = Ptyp_constr ({ txt = Lident name; _ }, _); _ }
when list_assoc_mem (Common.core_types loc) ~equal:String.equal name ->
[%expr Rpc.([%e pexp_ident (Located.mk (lident (rpc_of name)))])]
| { ptyp_desc = Ptyp_constr ({ txt = Lident "char"; _ }, _); _ } ->
[%expr
Rpc.(
function
| c -> Rpc.Int (Int64.of_int (Char.code c)))]
Tuple lists might be representable by a dictionary , if the first type in the tuple is string - like
| { ptyp_desc =
Ptyp_constr
({ txt = Lident "list"; _ }, [ { ptyp_desc = Ptyp_tuple [ typ1; typ2 ]; _ } ])
; _
} ->
[%expr
if [%e is_dict loc typ] || [%e is_string loc typ1]
then
fun l ->
Rpc.Dict
(List.rev
@@ ListLabels.rev_map
~f:(fun (k, v) ->
( Rpc.string_of_rpc ([%e expr_of_typ ~loc typ1] k)
, [%e expr_of_typ ~loc typ2] v ))
l)
else
fun l ->
Rpc.Enum
(List.rev
@@ ListLabels.rev_map
~f:(fun (a, b) ->
Rpc.Enum [ [%e expr_of_typ ~loc typ1] a; [%e expr_of_typ ~loc typ2] b ])
l)]
| [%type: [%t? typ] list] ->
[%expr fun l -> Rpc.Enum (Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] l)]
| [%type: [%t? typ] array] ->
[%expr
fun l ->
Rpc.Enum (Rpcmarshal.tailrec_map [%e expr_of_typ ~loc typ] (Array.to_list l))]
| { ptyp_desc = Ptyp_tuple typs; _ } ->
let args =
ListLabels.mapi
~f:(fun i typ -> pexp_apply (expr_of_typ ~loc typ) [ Nolabel, evar (argn i) ])
typs
in
[%expr
fun [%p ppat_tuple (ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs)] ->
Rpc.Enum [%e elist args]]
| [%type: [%t? typ] option] ->
let e = expr_of_typ ~loc typ in
[%expr
fun x ->
match x with
| None -> Rpc.Enum []
| Some y -> Rpc.Enum [ [%e e] y ]]
| { ptyp_desc = Ptyp_constr ({ txt = lid; _ }, args); _ } ->
let args =
List.rev @@ ListLabels.rev_map ~f:(fun e -> Nolabel, expr_of_typ ~loc e) args
in
let f = pexp_ident (Located.mk (map_lident rpc_of lid)) in
pexp_apply f args
| { ptyp_desc = Ptyp_variant (fields, _, _); _ } ->
let cases =
fields
|> ListLabels.rev_map ~f:(fun field ->
let { prf_desc; _ } = field in
match prf_desc with
| Rtag (label, true, []) ->
let l =
match Attribute.get Attrs.rt_name field with
| Some x -> x
| None -> label.txt
in
case
~lhs:(ppat_variant label.txt None)
~guard:None
~rhs:[%expr Rpc.String [%e estring l]]
| Rtag (label, false, [ { ptyp_desc = Ptyp_tuple typs; _ } ]) ->
let l =
elist
(ListLabels.mapi
~f:(fun i typ ->
pexp_apply (expr_of_typ ~loc typ) [ Nolabel, evar (argn i) ])
typs)
in
let label =
match Attribute.get Attrs.rt_name field with
| Some x -> x
| None -> label.txt
in
case
~lhs:
(ppat_variant
label
(ppat_tuple_opt
(ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs)))
~guard:None
~rhs:
[%expr Rpc.Enum [ Rpc.String [%e estring label]; Rpc.Enum [%e l] ]]
| Rtag (label, false, [ typ ]) ->
let label =
match Attribute.get Attrs.rt_name field with
| Some x -> x
| None -> label.txt
in
case
~lhs:(ppat_variant label (Some [%pat? x]))
~guard:None
~rhs:
[%expr
Rpc.Enum
[ Rpc.String [%e estring label]; [%e expr_of_typ ~loc typ] x ]]
| Rinherit ({ ptyp_desc = Ptyp_constr (tname, _); _ } as typ) ->
case
~lhs:[%pat? [%p ppat_type tname] as x]
~guard:None
~rhs:[%expr [%e expr_of_typ ~loc typ] x]
| _ -> failwith "cannot be derived for")
|> List.rev
in
pexp_function cases
| { ptyp_desc = Ptyp_any; _ } -> failwith "Ptyp_any not handled"
| { ptyp_desc = Ptyp_var name; _ } -> [%expr [%e evar ("poly_" ^ name)]]
| { ptyp_desc = Ptyp_poly (_, _); _ } -> failwith "Ptyp_poly not handled"
| { ptyp_desc = Ptyp_extension _; _ } -> failwith "Ptyp_extension not handled"
| { ptyp_desc = Ptyp_arrow (_, _, _); _ } -> failwith "Ptyp_arrow not handled"
| { ptyp_desc = Ptyp_object (_, _); _ } -> failwith "Ptyp_object not handled"
| { ptyp_desc = Ptyp_alias (_, _); _ } -> failwith "Ptyp_alias not handled"
| { ptyp_desc = Ptyp_class (_, _); _ } -> failwith "Ptyp_class not handled"
| { ptyp_desc = Ptyp_package _; _ } -> failwith "Ptyp_package not handled"
let str_of_type ~loc type_decl =
let module Ast_builder = (val Ast_builder.make loc) in
let open Ast_builder in
let to_rpc =
match type_decl.ptype_kind, type_decl.ptype_manifest with
| Ptype_abstract, Some manifest -> expr_of_typ ~loc manifest
| Ptype_record labels, _ ->
let fields =
labels
|> ListLabels.rev_map ~f:(fun label ->
let { pld_name = { txt = name; _ }; pld_type; _ } = label in
let rpc_name =
match Attribute.get Attrs.key label with
| Some s -> s
| None -> name
in
if is_option pld_type
then
[%expr
let rpc =
[%e expr_of_typ ~loc pld_type]
[%e pexp_field (evar "x") { txt = Lident name; loc }]
in
match rpc with
| Rpc.Enum [ x ] -> Some ([%e estring rpc_name], x)
| Rpc.Enum [] -> None
| _ ->
failwith
(Printf.sprintf
"Programmer error when marshalling %s.%s"
[%e estring type_decl.ptype_name.txt]
[%e estring name])]
else
[%expr
Some
( [%e estring rpc_name]
, [%e expr_of_typ ~loc pld_type]
[%e pexp_field (evar "x") { txt = Lident name; loc }] )])
|> List.rev
in
[%expr
fun x ->
Rpc.Dict
(ListLabels.fold_right
~f:(fun x acc ->
match x with
| Some x -> x :: acc
| None -> acc)
[%e elist fields]
~init:[])]
| Ptype_abstract, None -> failwith "Unhandled"
| Ptype_open, _ -> failwith "Unhandled"
| Ptype_variant constrs, _ ->
let cases =
constrs
|> ListLabels.rev_map ~f:(fun constr ->
let { pcd_name = { txt = name; _ }; pcd_args; _ } = constr in
match pcd_args with
| Pcstr_tuple typs ->
let args =
ListLabels.mapi
~f:(fun i typ ->
[%expr [%e expr_of_typ ~loc typ] [%e evar (argn i)]])
typs
in
let argsl = elist args in
let pattern = ListLabels.mapi ~f:(fun i _ -> pvar (argn i)) typs in
let name' =
match Attribute.get Attrs.constr_name constr with
| Some s -> s
| None -> name
in
let rpc_of =
match args with
| [] -> [%expr Rpc.String [%e estring name']]
| _ -> [%expr Rpc.Enum (Rpc.String [%e estring name'] :: [%e argsl])]
in
case
~lhs:
(ppat_construct
{ txt = Lident name; loc }
(ppat_tuple_opt pattern))
~guard:None
~rhs:rpc_of
| Pcstr_record _ -> failwith "record variants are not supported")
|> List.rev
in
pexp_function cases
in
to_rpc
end
let rpc_strs_of_type ~loc type_decl =
let polymorphize = Common.poly_fun_of_type_decl type_decl in
let name = type_decl.ptype_name.txt in
[ value_binding
~loc
~pat:(pvar ~loc (rpc_of name))
~expr:
(pexp_fun
~loc
Nolabel
None
(pvar ~loc "__x__")
[%expr [%e polymorphize ~loc (Rpc_of.str_of_type ~loc type_decl)] __x__])
; value_binding
~loc
~pat:(pvar ~loc (of_rpc name))
~expr:
(pexp_fun
~loc
Nolabel
None
(pvar ~loc "__x__")
[%expr [%e polymorphize ~loc (Of_rpc.str_of_type ~loc type_decl)] __x__])
]
let my_str_type_decl ~loc ~path:_ (rec_flag, tds) =
pstr_value_list
~loc
rec_flag
(List.concat (List.rev @@ ListLabels.rev_map ~f:(rpc_strs_of_type ~loc) tds))
let str_type_decl = Deriving.Generator.make_noarg my_str_type_decl
let deriver = Deriving.add "rpc" ~str_type_decl
|
3920bb930995ce53b13af96248827bb0e8c07ffa1921d4af98955248dcc93736 | cxphoe/SICP-solutions | 2.45.rkt | (define (split part-combine subpart-combine)
((lambda (painter n)
(if (< n 0)
painter
(let ((smaller ((split part-combine subpart-combine) painter (- n 1))))
(part-combine painter (subpart-combine painter painter))))))) | null | https://raw.githubusercontent.com/cxphoe/SICP-solutions/d35bb688db0320f6efb3b3bde1a14ce21da319bd/Chapter%202-Building%20Abstractions%20with%20Data/2.Hierarchical%20Data%20and%20the%20Closure%20Property/2.45.rkt | racket | (define (split part-combine subpart-combine)
((lambda (painter n)
(if (< n 0)
painter
(let ((smaller ((split part-combine subpart-combine) painter (- n 1))))
(part-combine painter (subpart-combine painter painter))))))) |
|
993775d76794b98f03fe9918644e3c705f1a1fd87e0052f560cdda94a58a34d3 | oxidizing/sihl | web_flash.ml | let log_src = Logs.Src.create "sihl.middleware.flash"
module Logs = (val Logs.src_log log_src : Logs.LOG)
module Flash = struct
open Sexplib.Conv
type t =
{ alert : string option
; notice : string option
; custom : (string * string) list
}
[@@deriving yojson, sexp, eq]
let equals f1 f2 =
Option.equal String.equal f1.alert f2.alert
&& Option.equal String.equal f1.notice f2.notice
&& CCList.equal (CCPair.equal String.equal String.equal) f1.custom f2.custom
;;
let of_json (json : string) : t option =
try Some (of_yojson (Yojson.Safe.from_string json) |> Result.get_ok) with
| _ -> None
;;
let to_json (flash : t) : string = flash |> to_yojson |> Yojson.Safe.to_string
end
module Env = struct
let key : Flash.t Opium.Context.key =
Opium.Context.Key.create ("flash", Flash.sexp_of_t)
;;
end
let find' req = Opium.Context.find Env.key req.Opium.Request.env
let find_alert req = Option.bind (find' req) (fun flash -> flash.alert)
let find_notice req = Option.bind (find' req) (fun flash -> flash.notice)
let find key req =
Option.bind (find' req) (fun flash ->
flash.custom
|> List.find_opt (fun (k, _) -> String.equal key k)
|> Option.map snd)
;;
let set_alert alert resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = Some alert; notice = None; custom = [] }
| Some flash -> Flash.{ flash with alert = Some alert }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
let set_notice notice resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = None; notice = Some notice; custom = [] }
| Some flash -> Flash.{ flash with notice = Some notice }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
let set values resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = None; notice = None; custom = values }
| Some flash -> Flash.{ flash with custom = values }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
type decode_status =
| No_cookie_found
| Parse_error
| Found of Flash.t
let decode_flash cookie_key req =
match Opium.Request.cookie cookie_key req with
| None -> No_cookie_found
| Some cookie_value ->
(match Flash.of_json cookie_value with
| None ->
Logs.err (fun m ->
m
"Failed to parse value found in flash cookie '%s': '%s'"
cookie_key
cookie_value);
Logs.info (fun m ->
m
"Maybe the cookie key '%s' collides with a cookie issued by someone \
else. Try to change the cookie key."
cookie_key);
Parse_error
| Some flash -> Found flash)
;;
let persist_flash ?old_flash ?(delete_if_not_set = false) cookie_key resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
match flash with
(* No flash was set in handler *)
| None ->
if delete_if_not_set
then
(* Remove flash cookie *)
Opium.Response.add_cookie_or_replace
~expires:(`Max_age Int64.zero)
~scope:(Uri.of_string "/")
(cookie_key, "")
resp
else resp
(* Flash was set in handler *)
| Some flash ->
(match old_flash with
| Some old_flash ->
if Flash.equals old_flash flash
then (* Same flash value, don't set cookie *)
resp
else (
(* Flash was changed and is different than old flash, set cookie *)
let cookie_value = Flash.to_json flash in
let cookie = cookie_key, cookie_value in
let resp =
Opium.Response.add_cookie_or_replace
~scope:(Uri.of_string "/")
cookie
resp
in
resp)
| None ->
(* Flash was changed and old flash is empty, set cookie *)
let cookie_value = Flash.to_json flash in
let cookie = cookie_key, cookie_value in
let resp =
Opium.Response.add_cookie_or_replace
~scope:(Uri.of_string "/")
cookie
resp
in
resp)
;;
let middleware ?(cookie_key = "_flash") () =
let filter handler req =
match decode_flash cookie_key req with
| No_cookie_found ->
let%lwt resp = handler req in
Lwt.return @@ persist_flash cookie_key resp
| Parse_error ->
let%lwt resp = handler req in
Lwt.return @@ persist_flash ~delete_if_not_set:true cookie_key resp
| Found flash ->
let env = req.Opium.Request.env in
let env = Opium.Context.add Env.key flash env in
let req = { req with env } in
let%lwt resp = handler req in
Lwt.return
@@ persist_flash ~delete_if_not_set:true ~old_flash:flash cookie_key resp
in
Rock.Middleware.create ~name:"flash" ~filter
;;
| null | https://raw.githubusercontent.com/oxidizing/sihl/1870d7c9fba19a883a9ccc5f07e9f9f76bf22e35/sihl/src/web_flash.ml | ocaml | No flash was set in handler
Remove flash cookie
Flash was set in handler
Same flash value, don't set cookie
Flash was changed and is different than old flash, set cookie
Flash was changed and old flash is empty, set cookie | let log_src = Logs.Src.create "sihl.middleware.flash"
module Logs = (val Logs.src_log log_src : Logs.LOG)
module Flash = struct
open Sexplib.Conv
type t =
{ alert : string option
; notice : string option
; custom : (string * string) list
}
[@@deriving yojson, sexp, eq]
let equals f1 f2 =
Option.equal String.equal f1.alert f2.alert
&& Option.equal String.equal f1.notice f2.notice
&& CCList.equal (CCPair.equal String.equal String.equal) f1.custom f2.custom
;;
let of_json (json : string) : t option =
try Some (of_yojson (Yojson.Safe.from_string json) |> Result.get_ok) with
| _ -> None
;;
let to_json (flash : t) : string = flash |> to_yojson |> Yojson.Safe.to_string
end
module Env = struct
let key : Flash.t Opium.Context.key =
Opium.Context.Key.create ("flash", Flash.sexp_of_t)
;;
end
let find' req = Opium.Context.find Env.key req.Opium.Request.env
let find_alert req = Option.bind (find' req) (fun flash -> flash.alert)
let find_notice req = Option.bind (find' req) (fun flash -> flash.notice)
let find key req =
Option.bind (find' req) (fun flash ->
flash.custom
|> List.find_opt (fun (k, _) -> String.equal key k)
|> Option.map snd)
;;
let set_alert alert resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = Some alert; notice = None; custom = [] }
| Some flash -> Flash.{ flash with alert = Some alert }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
let set_notice notice resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = None; notice = Some notice; custom = [] }
| Some flash -> Flash.{ flash with notice = Some notice }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
let set values resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = None; notice = None; custom = values }
| Some flash -> Flash.{ flash with custom = values }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
type decode_status =
| No_cookie_found
| Parse_error
| Found of Flash.t
let decode_flash cookie_key req =
match Opium.Request.cookie cookie_key req with
| None -> No_cookie_found
| Some cookie_value ->
(match Flash.of_json cookie_value with
| None ->
Logs.err (fun m ->
m
"Failed to parse value found in flash cookie '%s': '%s'"
cookie_key
cookie_value);
Logs.info (fun m ->
m
"Maybe the cookie key '%s' collides with a cookie issued by someone \
else. Try to change the cookie key."
cookie_key);
Parse_error
| Some flash -> Found flash)
;;
let persist_flash ?old_flash ?(delete_if_not_set = false) cookie_key resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
match flash with
| None ->
if delete_if_not_set
then
Opium.Response.add_cookie_or_replace
~expires:(`Max_age Int64.zero)
~scope:(Uri.of_string "/")
(cookie_key, "")
resp
else resp
| Some flash ->
(match old_flash with
| Some old_flash ->
if Flash.equals old_flash flash
resp
else (
let cookie_value = Flash.to_json flash in
let cookie = cookie_key, cookie_value in
let resp =
Opium.Response.add_cookie_or_replace
~scope:(Uri.of_string "/")
cookie
resp
in
resp)
| None ->
let cookie_value = Flash.to_json flash in
let cookie = cookie_key, cookie_value in
let resp =
Opium.Response.add_cookie_or_replace
~scope:(Uri.of_string "/")
cookie
resp
in
resp)
;;
let middleware ?(cookie_key = "_flash") () =
let filter handler req =
match decode_flash cookie_key req with
| No_cookie_found ->
let%lwt resp = handler req in
Lwt.return @@ persist_flash cookie_key resp
| Parse_error ->
let%lwt resp = handler req in
Lwt.return @@ persist_flash ~delete_if_not_set:true cookie_key resp
| Found flash ->
let env = req.Opium.Request.env in
let env = Opium.Context.add Env.key flash env in
let req = { req with env } in
let%lwt resp = handler req in
Lwt.return
@@ persist_flash ~delete_if_not_set:true ~old_flash:flash cookie_key resp
in
Rock.Middleware.create ~name:"flash" ~filter
;;
|
0b7b4ed2364b8457e941a9beb746f6ee083a79982bead9b5609954eeb8b08225 | ekasilicon/jade | sumtype-extra.rkt | #lang racket/base
(require (for-syntax racket/base
racket/match
syntax/parse)
racket/match
"sumtype.rkt")
(define-syntax sumtype-name
(syntax-parser
[(_ typename:id)
(match (syntax-local-value #'typename (λ () #f))
[(sumtype-info variants _)
(with-syntax ([(variant ...)
variants]
[(variant-name ...)
(map symbol->string (map syntax->datum variants))])
#'(sumtype-case-lambda typename
[(variant) variant-name] ...))]
[_
(raise-syntax-error 'sumtype-name "not a declared sumtype" #'typename)])]))
#;
(define-syntax index→enumtype
(syntax-parser
[(_ typename:id)
(match (syntax-local-value #'typename (λ () #f))
[(sumtype-info variants _)
(with-syntax ([(i ...) (for/list ([i (in-naturals)]
[_ (in-list variants)])
i)]
[(variant ...) variants])
#'(match-lambda [i (variant)] ...))]
[_
(raise-syntax-error 'index→enumtype "not a declared sumtype" #'typename)])]))
#;
(define-syntax enumtype-case
(syntax-parser
[(_ type:id expr [(name:id ...) body ...] ...)
(with-syntax ([(sumname ...) (generate-temporaries #'((name ...) ...))])
#'(let ()
(define-sumtype sumname name ...)
...
(sumtype-case type expr
[(sumname _) body ...] ...)))]
[(_ type:id expr [(name:id ...) body ...] ... #:otherwise x:id else-body ...)
(with-syntax ([(sumname ...) (generate-temporaries #'((name ...) ...))])
#'(let ()
(define-sumtype sumname name ...)
...
(sumtype-case type expr
[(sumname _) body ...] ...
#:otherwise x else-body ...)))]))
#;
(define-syntax (enumtype-case-lambda stx)
(syntax-parse stx
[(_ type . rst)
#`(λ (x) #,(syntax/loc stx (enumtype-case type x . rst)))]))
(provide index→enumtype
sumtype-name
enumtype-case
enumtype-case-lambda)
| null | https://raw.githubusercontent.com/ekasilicon/jade/115eb389a20968b3ac90a2deed17016a9d44d174/src/static/sumtype-extra.rkt | racket | #lang racket/base
(require (for-syntax racket/base
racket/match
syntax/parse)
racket/match
"sumtype.rkt")
(define-syntax sumtype-name
(syntax-parser
[(_ typename:id)
(match (syntax-local-value #'typename (λ () #f))
[(sumtype-info variants _)
(with-syntax ([(variant ...)
variants]
[(variant-name ...)
(map symbol->string (map syntax->datum variants))])
#'(sumtype-case-lambda typename
[(variant) variant-name] ...))]
[_
(raise-syntax-error 'sumtype-name "not a declared sumtype" #'typename)])]))
(define-syntax index→enumtype
(syntax-parser
[(_ typename:id)
(match (syntax-local-value #'typename (λ () #f))
[(sumtype-info variants _)
(with-syntax ([(i ...) (for/list ([i (in-naturals)]
[_ (in-list variants)])
i)]
[(variant ...) variants])
#'(match-lambda [i (variant)] ...))]
[_
(raise-syntax-error 'index→enumtype "not a declared sumtype" #'typename)])]))
(define-syntax enumtype-case
(syntax-parser
[(_ type:id expr [(name:id ...) body ...] ...)
(with-syntax ([(sumname ...) (generate-temporaries #'((name ...) ...))])
#'(let ()
(define-sumtype sumname name ...)
...
(sumtype-case type expr
[(sumname _) body ...] ...)))]
[(_ type:id expr [(name:id ...) body ...] ... #:otherwise x:id else-body ...)
(with-syntax ([(sumname ...) (generate-temporaries #'((name ...) ...))])
#'(let ()
(define-sumtype sumname name ...)
...
(sumtype-case type expr
[(sumname _) body ...] ...
#:otherwise x else-body ...)))]))
(define-syntax (enumtype-case-lambda stx)
(syntax-parse stx
[(_ type . rst)
#`(λ (x) #,(syntax/loc stx (enumtype-case type x . rst)))]))
(provide index→enumtype
sumtype-name
enumtype-case
enumtype-case-lambda)
|
|
1ea8e38a6c89fc142f73b5d914938a49ef83cfc5f4690ed210fccc94636382a6 | mbj/stratosphere | DeviceProperty.hs | module Stratosphere.SageMaker.Device.DeviceProperty (
DeviceProperty(..), mkDeviceProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data DeviceProperty
= DeviceProperty {description :: (Prelude.Maybe (Value Prelude.Text)),
deviceName :: (Value Prelude.Text),
iotThingName :: (Prelude.Maybe (Value Prelude.Text))}
mkDeviceProperty :: Value Prelude.Text -> DeviceProperty
mkDeviceProperty deviceName
= DeviceProperty
{deviceName = deviceName, description = Prelude.Nothing,
iotThingName = Prelude.Nothing}
instance ToResourceProperties DeviceProperty where
toResourceProperties DeviceProperty {..}
= ResourceProperties
{awsType = "AWS::SageMaker::Device.Device",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["DeviceName" JSON..= deviceName]
(Prelude.catMaybes
[(JSON..=) "Description" Prelude.<$> description,
(JSON..=) "IotThingName" Prelude.<$> iotThingName]))}
instance JSON.ToJSON DeviceProperty where
toJSON DeviceProperty {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["DeviceName" JSON..= deviceName]
(Prelude.catMaybes
[(JSON..=) "Description" Prelude.<$> description,
(JSON..=) "IotThingName" Prelude.<$> iotThingName])))
instance Property "Description" DeviceProperty where
type PropertyType "Description" DeviceProperty = Value Prelude.Text
set newValue DeviceProperty {..}
= DeviceProperty {description = Prelude.pure newValue, ..}
instance Property "DeviceName" DeviceProperty where
type PropertyType "DeviceName" DeviceProperty = Value Prelude.Text
set newValue DeviceProperty {..}
= DeviceProperty {deviceName = newValue, ..}
instance Property "IotThingName" DeviceProperty where
type PropertyType "IotThingName" DeviceProperty = Value Prelude.Text
set newValue DeviceProperty {..}
= DeviceProperty {iotThingName = Prelude.pure newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/sagemaker/gen/Stratosphere/SageMaker/Device/DeviceProperty.hs | haskell | module Stratosphere.SageMaker.Device.DeviceProperty (
DeviceProperty(..), mkDeviceProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data DeviceProperty
= DeviceProperty {description :: (Prelude.Maybe (Value Prelude.Text)),
deviceName :: (Value Prelude.Text),
iotThingName :: (Prelude.Maybe (Value Prelude.Text))}
mkDeviceProperty :: Value Prelude.Text -> DeviceProperty
mkDeviceProperty deviceName
= DeviceProperty
{deviceName = deviceName, description = Prelude.Nothing,
iotThingName = Prelude.Nothing}
instance ToResourceProperties DeviceProperty where
toResourceProperties DeviceProperty {..}
= ResourceProperties
{awsType = "AWS::SageMaker::Device.Device",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["DeviceName" JSON..= deviceName]
(Prelude.catMaybes
[(JSON..=) "Description" Prelude.<$> description,
(JSON..=) "IotThingName" Prelude.<$> iotThingName]))}
instance JSON.ToJSON DeviceProperty where
toJSON DeviceProperty {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["DeviceName" JSON..= deviceName]
(Prelude.catMaybes
[(JSON..=) "Description" Prelude.<$> description,
(JSON..=) "IotThingName" Prelude.<$> iotThingName])))
instance Property "Description" DeviceProperty where
type PropertyType "Description" DeviceProperty = Value Prelude.Text
set newValue DeviceProperty {..}
= DeviceProperty {description = Prelude.pure newValue, ..}
instance Property "DeviceName" DeviceProperty where
type PropertyType "DeviceName" DeviceProperty = Value Prelude.Text
set newValue DeviceProperty {..}
= DeviceProperty {deviceName = newValue, ..}
instance Property "IotThingName" DeviceProperty where
type PropertyType "IotThingName" DeviceProperty = Value Prelude.Text
set newValue DeviceProperty {..}
= DeviceProperty {iotThingName = Prelude.pure newValue, ..} |
|
b2a67e2a77a782e3f45127fc4b6040b27438c01140a1a065bcbd9c2166d70bd0 | cushon/project-euler | 65.rkt | #lang racket
(require "common.rkt")
Find the sum of digits in the numerator of the 100th convergent of the
; continued fraction for e.
(define (solve)
(define (get-list n)
(cons 2
(map (lambda (x)
(if (= 2 (remainder x 3)) (* 2 (/ (add1 x) 3)) 1))
(build-list n add1))))
(define (convergent lst)
(define (helper lst)
(if (empty? lst) 0
(/ 1 (+ (car lst) (helper (cdr lst))))))
(+ (car lst) (helper (cdr lst))))
(foldl + 0 (int->list (numerator (convergent (get-list 99))))))
(provide solve) | null | https://raw.githubusercontent.com/cushon/project-euler/d7fcbfff0cd59b2c3691293ff35bb2043b409f68/65.rkt | racket | continued fraction for e. | #lang racket
(require "common.rkt")
Find the sum of digits in the numerator of the 100th convergent of the
(define (solve)
(define (get-list n)
(cons 2
(map (lambda (x)
(if (= 2 (remainder x 3)) (* 2 (/ (add1 x) 3)) 1))
(build-list n add1))))
(define (convergent lst)
(define (helper lst)
(if (empty? lst) 0
(/ 1 (+ (car lst) (helper (cdr lst))))))
(+ (car lst) (helper (cdr lst))))
(foldl + 0 (int->list (numerator (convergent (get-list 99))))))
(provide solve) |
767f3911198263c742e6540adea0c8386973cbf23bbd5745937f448c353ddd16 | zk/clojuredocs | core_test.clj | (ns clojuredocs.core-test
(:require [clojure.test :refer :all]
[clojuredocs.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/zk/clojuredocs/28f5ee500f4349039ee81c70d7ac40acbb19e5d8/test/clojuredocs/core_test.clj | clojure | (ns clojuredocs.core-test
(:require [clojure.test :refer :all]
[clojuredocs.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
|
|
69fb1c22f39f3fa692b7a8eec65273893efbee8905e102eb7787f128b8d4126c | techascent/tech.ml.dataset | reductions_test.clj | (ns tech.v3.dataset.reductions-test
(:require [tech.v3.dataset.reductions :as ds-reduce]
[tech.v3.dataset :as ds]
[tech.v3.dataset.column :as ds-col]
[tech.v3.datatype.functional :as dfn]
[tech.v3.datatype :as dtype]
[tech.v3.datatype.datetime :as dtype-dt]
[tech.v3.datatype.jvm-map :as jvm-map]
[tech.v3.datatype.argops :as argops]
[tech.v3.datatype.statistics :as stats]
[tech.v3.dataset.reductions.apache-data-sketch :as ds-sketch]
[tech.v3.parallel.for :as pfor]
[ham-fisted.api :as hamf]
[ham-fisted.lazy-noncaching :as lznc]
[clojure.test :refer [deftest is]]
[clojure.core.protocols :as cl-proto])
(:import [tech.v3.datatype UnaryPredicate FastStruct$FMapEntry]
[java.time LocalDate YearMonth]
[ham_fisted Consumers$IncConsumer MutHashTable]
[java.util ArrayList Map$Entry Arrays]
[clojure.lang MapEntry]))
(deftest simple-reduction
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
agg-ds (-> (ds-reduce/group-by-column-agg
:symbol
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
[stocks stocks stocks])
(ds/sort-by-column :symbol))
single-price (-> (->> (ds/group-by-column stocks :symbol)
(map (fn [[k ds]]
{:symbol k
:n-elems (ds/row-count ds)
:price-sum (dfn/sum (ds :price))
:price-avg (dfn/mean (ds :price))}))
(ds/->>dataset))
(ds/sort-by-column :symbol))]
(is (= 5 (ds/row-count agg-ds)))
(is (dfn/equals (agg-ds :n-elems)
(dfn/* 3 (single-price :n-elems))))
(is (dfn/equals (agg-ds :price-sum)
(dfn/* 3 (single-price :price-sum))))
(is (dfn/equals (agg-ds :price-avg)
(single-price :price-avg)))))
(deftest simple-reduction-filtered
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
agg-ds (-> (ds-reduce/group-by-column-agg
:symbol
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
{:index-filter (fn [dataset]
(let [rdr (dtype/->reader (dataset :price))]
(hamf/long-predicate
idx (> (.readDouble rdr idx) 100.0))))}
[stocks stocks stocks])
(ds/sort-by-column :symbol))
fstocks (ds/filter-column stocks :price #(> % 100.0))
single-price (->
(->> (ds/group-by-column fstocks :symbol)
(map (fn [[k ds]]
{:symbol k
:n-elems (ds/row-count ds)
:price-sum (dfn/sum (ds :price))
:price-avg (dfn/mean (ds :price))}))
(ds/->>dataset))
(ds/sort-by-column :symbol))]
(is (= 4 (ds/row-count agg-ds)))
(is (dfn/equals (agg-ds :n-elems)
(dfn/* 3 (single-price :n-elems))))
(is (dfn/equals (agg-ds :price-sum)
(dfn/* 3 (single-price :price-sum))))
(is (dfn/equals (agg-ds :price-avg)
(single-price :price-avg)))))
(deftest issue-201-incorrect-result-column-count
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
agg-ds (ds-reduce/group-by-column-agg
:symbol
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-avg2 (ds-reduce/mean :price)
:price-avg3 (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:price-med (ds-reduce/prob-median :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
[stocks stocks stocks])
simple-agg-ds (ds-reduce/aggregate
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-avg2 (ds-reduce/mean :price)
:price-avg3 (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:price-med (ds-reduce/prob-median :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
[stocks stocks stocks])]
(is (= 8 (ds/column-count agg-ds)))
(is (= 8 (ds/column-count simple-agg-ds)))))
(deftest data-sketches-test
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
result (ds-reduce/aggregate
{:n-elems (ds-reduce/row-count)
:n-dates (ds-reduce/count-distinct :date :int32)
:n-dates-hll (ds-sketch/prob-set-cardinality :date {:datatype :string})
:n-symbols-hll (ds-sketch/prob-set-cardinality
:symbol {:datatype :string})
:quantiles (ds-sketch/prob-quantiles :price [0.25 0.5 0.75])
:cdfs (ds-sketch/prob-cdfs :price [50 100 150])
:pmfs (ds-sketch/prob-pmfs :price [50 100 150])}
[stocks stocks stocks])
{:keys [n-dates-hll n-symbols-hll]} (first (ds/mapseq-reader result))]
(is (dfn/equals [123 5]
[n-dates-hll
n-symbols-hll]
0.1))))
(deftest reservoir-sampling-test
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
ds-seq [stocks stocks stocks]
small-ds-seq [(-> (ds/shuffle stocks)
(ds/select-rows (range 50)))]
agg-map {:n-elems (ds-reduce/row-count)
:price-std (ds-reduce/reservoir-desc-stat
:price 100 :standard-deviation)
:sub-ds (ds-reduce/reservoir-dataset 100)}
straight (ds-reduce/aggregate agg-map ds-seq)
straight-small (ds-reduce/aggregate agg-map small-ds-seq)
grouped (ds-reduce/group-by-column-agg :symbol agg-map ds-seq)
grouped-small (ds-reduce/group-by-column-agg :symbol agg-map ds-seq)]
;;Mainly ensuring that nothing throws.
(is (every? #(or (= 3 (ds/column-count %))
(= 4 (ds/column-count %)))
[straight straight-small
grouped grouped-small])))
(let [missing-ds (ds/new-dataset [(ds-col/new-column
:missing (range 1000)
nil
(->> (range 1000)
(map (fn [^long idx]
(when (== 0 (rem idx 3))
idx)))
(remove nil?)))])
agg-ds
(ds-reduce/aggregate {:sub-ds (ds-reduce/reservoir-dataset 50)}
[missing-ds])
sub-ds (first (:sub-ds agg-ds))]
;;Make sure we carry the missing set across
(is (not (.isEmpty ^org.roaringbitmap.RoaringBitmap (ds/missing sub-ds))))
(is (every? #(or (nil? %)
(not= 0 (rem (long %) 3)))
(:missing sub-ds)))))
(defn- create-otfrom-init-dataset
[& [{:keys [n-simulations n-placements n-expansion n-rows]
:or {n-simulations 100
n-placements 50
n-expansion 20
n-rows 1000000}}]]
(->> (for [idx (range n-rows)]
(let [sd (.minusDays (dtype-dt/local-date) (+ 200 (rand-int 365)))
ed (.plusDays sd (rand-int n-expansion))]
{:simulation (rand-int n-simulations)
:placement (rand-int n-placements)
:start sd
:end ed}))
(ds/->>dataset)))
;;Slightly less efficient than implementing an inline IReduceInit impl is to create
;;a record with a custom IReduceInit implementation.
(defrecord YMC [year-month ^long count]
clojure.lang.IReduceInit
(reduce [this rfn init]
(let [init (hamf/reduced-> rfn init
(clojure.lang.MapEntry/create :year-month year-month)
(clojure.lang.MapEntry/create :count count))]
(if (and __extmap (not (reduced? init)))
(reduce rfn init __extmap)
init))))
(def inc-cons-fn (hamf/function k (Consumers$IncConsumer.)))
(defn- tally-days-as-year-months
[{:keys [^LocalDate start ^LocalDate end]}]
;;Using a hash provider with equals semantics allows the hamf hashtable to
;;compete on equal terms with the java hashtable. In that we find that compute,
computeIfAbsent and reduce perform as fast as anything on the jvm when we are using
;;Object/equals and Object/hashCode for the map functionality.
(let [tally (MutHashTable. hamf/equal-hash-provider)]
(dotimes [idx (.until start end java.time.temporal.ChronoUnit/DAYS)]
(let [ym (YearMonth/from (.plusDays start idx))]
;;Compute if absent is ever so slightly faster than compute as it involves
;;less mutation of the original hashtable. It does, however, require the
;;value in the node itself to be mutable.
(.inc ^Consumers$IncConsumer (.computeIfAbsent tally ym inc-cons-fn))))
(lznc/map-reducible
#(let [^Map$Entry e %]
Dataset construction using the mapseq - rf only requires the ' map ' type to correctly
;;implement IReduceInit and for that function to produce implementations of Map$Entry.
(hamf/custom-ireduce
rfn acc
Elided reduced ? checks for a tiny bit of extra oomph .
(-> acc
(rfn (MapEntry/create :year-month (.getKey e)))
(rfn (MapEntry/create :count (deref (.getValue e)))))))
(.entrySet tally))))
(defn- otfrom-pathway
[ds]
(->> (ds/row-mapcat ds tally-days-as-year-months
;;generate a sequence of datasets
{:result-type :as-seq
:parser-fn {:count :int32
:year-month :object}})
;;sequence of datasets
(ds-reduce/group-by-column-agg
[:simulation :placement :year-month]
{:count (ds-reduce/sum :count)})
;;single dataset - do joins and such here
(#(let [ds %
count (ds :count)]
(assoc ds :count2 (dfn/sq count))))
(ds-reduce/group-by-column-agg
[:placement :year-month]
{:min-count (ds-reduce/prob-quantile :count 0.0)
:low-95-count (ds-reduce/prob-quantile :count 0.05)
:q1-count (ds-reduce/prob-quantile :count 0.25)
:median-count (ds-reduce/prob-quantile :count 0.50)
:q3-count (ds-reduce/prob-quantile :count 0.75)
:high-95-count (ds-reduce/prob-quantile :count 0.95)
:max-count (ds-reduce/prob-quantile :count 1.0)
:count (ds-reduce/sum :count)})))
(defn- tally-days-columnwise
[ds]
(let [starts (dtype/->buffer (ds :start))
ends (dtype/->buffer (ds :end))
n-rows (.lsize starts)
indexes (dtype/prealloc-list :int64 n-rows)
ArrayList works fine here also .
counts (dtype/prealloc-list :int32 n-rows)
incrementor (jvm-map/bi-function k v
(if v
(unchecked-inc (long v))
1))
tally (jvm-map/hash-map)]
;;Loop through dataset and append results columnwise.
(dotimes [row-idx n-rows]
;;minimize hashtable resize operations
(.clear tally)
(let [^LocalDate start (starts row-idx)
^LocalDate end (ends row-idx)
nd (.until start end java.time.temporal.ChronoUnit/DAYS)]
(dotimes [day-idx nd]
(let [ym (YearMonth/from (.plusDays start day-idx))]
(jvm-map/compute! tally ym incrementor)))
(hamf/consume! (hamf/consumer
kv (do
(.addLong indexes row-idx)
(.add year-months (key kv))
(.add counts (val kv))))
tally)))
(-> (ds/select-rows ds indexes)
(assoc :year-month year-months
:count counts))))
(defn- otfrom-columnwise-pathway
[ds]
(->> (ds/pmap-ds ds tally-days-columnwise
;;generate a sequence of datasets
{:result-type :as-seq})
;;sequence of datasets
(ds-reduce/group-by-column-agg
[:simulation :placement :year-month]
{:count (ds-reduce/sum :count)})
;;single dataset - do joins and such here
(#(let [ds %
count (ds :count)]
;;return a sequence of datasets for next step
[(assoc ds :count2 (dfn/sq count))]))
(ds-reduce/group-by-column-agg
[:placement :year-month]
{:min-count (ds-reduce/prob-quantile :count 0.0)
:low-95-count (ds-reduce/prob-quantile :count 0.05)
:q1-count (ds-reduce/prob-quantile :count 0.25)
:median-count (ds-reduce/prob-quantile :count 0.50)
:q3-count (ds-reduce/prob-quantile :count 0.75)
:high-95-count (ds-reduce/prob-quantile :count 0.95)
:max-count (ds-reduce/prob-quantile :count 1.0)
:count (ds-reduce/sum :count)})))
(deftest otfrom-pathway-test
(let [ds (create-otfrom-init-dataset)
start (ds :start)
end (ds :end)
total-count (->> (dtype/emap #(dtype-dt/between %1 %2 :days) :int64 start end)
(dfn/sum))
;;warmup
_ (do (otfrom-pathway ds)
(otfrom-columnwise-pathway ds))
_ (println "otfrom pathway timing")
ofds (time (otfrom-pathway ds))
_ (println "otfrom columnwise pathway timing")
of-cwise-ds (time (otfrom-columnwise-pathway ds))
ofsum (dfn/sum (ofds :count))
of-cwise-sum (dfn/sum (of-cwise-ds :count))]
(is (= ofsum total-count))
(is (= of-cwise-sum total-count))))
(deftest issue-314
(let [dstds (->
(ds-reduce/group-by-column-agg
:foo
{:foos (ds-reduce/distinct :value)}
(ds/->dataset (into [] (map (fn [i] {:foo 'foo :value (str i)})) (range 3))))
(ds/column-map :foos-2 (fn [values] values) [:foos]))]
(is (= ["0" "1" "2"]
(vec (first (dstds :foos-2)))))))
(deftest issue-312
(let [ds (ds-reduce/aggregate
{:n-elems (ds-reduce/count-distinct :genre)}
[(ds/->dataset "test/data/example-genres.nippy")])]
(is (pos? (first (ds :n-elems))))))
| null | https://raw.githubusercontent.com/techascent/tech.ml.dataset/293c87060eaeab8d6271dd3896cabf8cd91c3b07/test/tech/v3/dataset/reductions_test.clj | clojure | Mainly ensuring that nothing throws.
Make sure we carry the missing set across
Slightly less efficient than implementing an inline IReduceInit impl is to create
a record with a custom IReduceInit implementation.
Using a hash provider with equals semantics allows the hamf hashtable to
compete on equal terms with the java hashtable. In that we find that compute,
Object/equals and Object/hashCode for the map functionality.
Compute if absent is ever so slightly faster than compute as it involves
less mutation of the original hashtable. It does, however, require the
value in the node itself to be mutable.
implement IReduceInit and for that function to produce implementations of Map$Entry.
generate a sequence of datasets
sequence of datasets
single dataset - do joins and such here
Loop through dataset and append results columnwise.
minimize hashtable resize operations
generate a sequence of datasets
sequence of datasets
single dataset - do joins and such here
return a sequence of datasets for next step
warmup | (ns tech.v3.dataset.reductions-test
(:require [tech.v3.dataset.reductions :as ds-reduce]
[tech.v3.dataset :as ds]
[tech.v3.dataset.column :as ds-col]
[tech.v3.datatype.functional :as dfn]
[tech.v3.datatype :as dtype]
[tech.v3.datatype.datetime :as dtype-dt]
[tech.v3.datatype.jvm-map :as jvm-map]
[tech.v3.datatype.argops :as argops]
[tech.v3.datatype.statistics :as stats]
[tech.v3.dataset.reductions.apache-data-sketch :as ds-sketch]
[tech.v3.parallel.for :as pfor]
[ham-fisted.api :as hamf]
[ham-fisted.lazy-noncaching :as lznc]
[clojure.test :refer [deftest is]]
[clojure.core.protocols :as cl-proto])
(:import [tech.v3.datatype UnaryPredicate FastStruct$FMapEntry]
[java.time LocalDate YearMonth]
[ham_fisted Consumers$IncConsumer MutHashTable]
[java.util ArrayList Map$Entry Arrays]
[clojure.lang MapEntry]))
(deftest simple-reduction
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
agg-ds (-> (ds-reduce/group-by-column-agg
:symbol
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
[stocks stocks stocks])
(ds/sort-by-column :symbol))
single-price (-> (->> (ds/group-by-column stocks :symbol)
(map (fn [[k ds]]
{:symbol k
:n-elems (ds/row-count ds)
:price-sum (dfn/sum (ds :price))
:price-avg (dfn/mean (ds :price))}))
(ds/->>dataset))
(ds/sort-by-column :symbol))]
(is (= 5 (ds/row-count agg-ds)))
(is (dfn/equals (agg-ds :n-elems)
(dfn/* 3 (single-price :n-elems))))
(is (dfn/equals (agg-ds :price-sum)
(dfn/* 3 (single-price :price-sum))))
(is (dfn/equals (agg-ds :price-avg)
(single-price :price-avg)))))
(deftest simple-reduction-filtered
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
agg-ds (-> (ds-reduce/group-by-column-agg
:symbol
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
{:index-filter (fn [dataset]
(let [rdr (dtype/->reader (dataset :price))]
(hamf/long-predicate
idx (> (.readDouble rdr idx) 100.0))))}
[stocks stocks stocks])
(ds/sort-by-column :symbol))
fstocks (ds/filter-column stocks :price #(> % 100.0))
single-price (->
(->> (ds/group-by-column fstocks :symbol)
(map (fn [[k ds]]
{:symbol k
:n-elems (ds/row-count ds)
:price-sum (dfn/sum (ds :price))
:price-avg (dfn/mean (ds :price))}))
(ds/->>dataset))
(ds/sort-by-column :symbol))]
(is (= 4 (ds/row-count agg-ds)))
(is (dfn/equals (agg-ds :n-elems)
(dfn/* 3 (single-price :n-elems))))
(is (dfn/equals (agg-ds :price-sum)
(dfn/* 3 (single-price :price-sum))))
(is (dfn/equals (agg-ds :price-avg)
(single-price :price-avg)))))
(deftest issue-201-incorrect-result-column-count
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
agg-ds (ds-reduce/group-by-column-agg
:symbol
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-avg2 (ds-reduce/mean :price)
:price-avg3 (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:price-med (ds-reduce/prob-median :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
[stocks stocks stocks])
simple-agg-ds (ds-reduce/aggregate
{:n-elems (ds-reduce/row-count)
:price-avg (ds-reduce/mean :price)
:price-avg2 (ds-reduce/mean :price)
:price-avg3 (ds-reduce/mean :price)
:price-sum (ds-reduce/sum :price)
:price-med (ds-reduce/prob-median :price)
:symbol (ds-reduce/first-value :symbol)
:n-dates (ds-reduce/count-distinct :date :int32)}
[stocks stocks stocks])]
(is (= 8 (ds/column-count agg-ds)))
(is (= 8 (ds/column-count simple-agg-ds)))))
(deftest data-sketches-test
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
result (ds-reduce/aggregate
{:n-elems (ds-reduce/row-count)
:n-dates (ds-reduce/count-distinct :date :int32)
:n-dates-hll (ds-sketch/prob-set-cardinality :date {:datatype :string})
:n-symbols-hll (ds-sketch/prob-set-cardinality
:symbol {:datatype :string})
:quantiles (ds-sketch/prob-quantiles :price [0.25 0.5 0.75])
:cdfs (ds-sketch/prob-cdfs :price [50 100 150])
:pmfs (ds-sketch/prob-pmfs :price [50 100 150])}
[stocks stocks stocks])
{:keys [n-dates-hll n-symbols-hll]} (first (ds/mapseq-reader result))]
(is (dfn/equals [123 5]
[n-dates-hll
n-symbols-hll]
0.1))))
(deftest reservoir-sampling-test
(let [stocks (ds/->dataset "test/data/stocks.csv" {:key-fn keyword})
ds-seq [stocks stocks stocks]
small-ds-seq [(-> (ds/shuffle stocks)
(ds/select-rows (range 50)))]
agg-map {:n-elems (ds-reduce/row-count)
:price-std (ds-reduce/reservoir-desc-stat
:price 100 :standard-deviation)
:sub-ds (ds-reduce/reservoir-dataset 100)}
straight (ds-reduce/aggregate agg-map ds-seq)
straight-small (ds-reduce/aggregate agg-map small-ds-seq)
grouped (ds-reduce/group-by-column-agg :symbol agg-map ds-seq)
grouped-small (ds-reduce/group-by-column-agg :symbol agg-map ds-seq)]
(is (every? #(or (= 3 (ds/column-count %))
(= 4 (ds/column-count %)))
[straight straight-small
grouped grouped-small])))
(let [missing-ds (ds/new-dataset [(ds-col/new-column
:missing (range 1000)
nil
(->> (range 1000)
(map (fn [^long idx]
(when (== 0 (rem idx 3))
idx)))
(remove nil?)))])
agg-ds
(ds-reduce/aggregate {:sub-ds (ds-reduce/reservoir-dataset 50)}
[missing-ds])
sub-ds (first (:sub-ds agg-ds))]
(is (not (.isEmpty ^org.roaringbitmap.RoaringBitmap (ds/missing sub-ds))))
(is (every? #(or (nil? %)
(not= 0 (rem (long %) 3)))
(:missing sub-ds)))))
(defn- create-otfrom-init-dataset
[& [{:keys [n-simulations n-placements n-expansion n-rows]
:or {n-simulations 100
n-placements 50
n-expansion 20
n-rows 1000000}}]]
(->> (for [idx (range n-rows)]
(let [sd (.minusDays (dtype-dt/local-date) (+ 200 (rand-int 365)))
ed (.plusDays sd (rand-int n-expansion))]
{:simulation (rand-int n-simulations)
:placement (rand-int n-placements)
:start sd
:end ed}))
(ds/->>dataset)))
(defrecord YMC [year-month ^long count]
clojure.lang.IReduceInit
(reduce [this rfn init]
(let [init (hamf/reduced-> rfn init
(clojure.lang.MapEntry/create :year-month year-month)
(clojure.lang.MapEntry/create :count count))]
(if (and __extmap (not (reduced? init)))
(reduce rfn init __extmap)
init))))
(def inc-cons-fn (hamf/function k (Consumers$IncConsumer.)))
(defn- tally-days-as-year-months
[{:keys [^LocalDate start ^LocalDate end]}]
computeIfAbsent and reduce perform as fast as anything on the jvm when we are using
(let [tally (MutHashTable. hamf/equal-hash-provider)]
(dotimes [idx (.until start end java.time.temporal.ChronoUnit/DAYS)]
(let [ym (YearMonth/from (.plusDays start idx))]
(.inc ^Consumers$IncConsumer (.computeIfAbsent tally ym inc-cons-fn))))
(lznc/map-reducible
#(let [^Map$Entry e %]
Dataset construction using the mapseq - rf only requires the ' map ' type to correctly
(hamf/custom-ireduce
rfn acc
Elided reduced ? checks for a tiny bit of extra oomph .
(-> acc
(rfn (MapEntry/create :year-month (.getKey e)))
(rfn (MapEntry/create :count (deref (.getValue e)))))))
(.entrySet tally))))
(defn- otfrom-pathway
[ds]
(->> (ds/row-mapcat ds tally-days-as-year-months
{:result-type :as-seq
:parser-fn {:count :int32
:year-month :object}})
(ds-reduce/group-by-column-agg
[:simulation :placement :year-month]
{:count (ds-reduce/sum :count)})
(#(let [ds %
count (ds :count)]
(assoc ds :count2 (dfn/sq count))))
(ds-reduce/group-by-column-agg
[:placement :year-month]
{:min-count (ds-reduce/prob-quantile :count 0.0)
:low-95-count (ds-reduce/prob-quantile :count 0.05)
:q1-count (ds-reduce/prob-quantile :count 0.25)
:median-count (ds-reduce/prob-quantile :count 0.50)
:q3-count (ds-reduce/prob-quantile :count 0.75)
:high-95-count (ds-reduce/prob-quantile :count 0.95)
:max-count (ds-reduce/prob-quantile :count 1.0)
:count (ds-reduce/sum :count)})))
(defn- tally-days-columnwise
[ds]
(let [starts (dtype/->buffer (ds :start))
ends (dtype/->buffer (ds :end))
n-rows (.lsize starts)
indexes (dtype/prealloc-list :int64 n-rows)
ArrayList works fine here also .
counts (dtype/prealloc-list :int32 n-rows)
incrementor (jvm-map/bi-function k v
(if v
(unchecked-inc (long v))
1))
tally (jvm-map/hash-map)]
(dotimes [row-idx n-rows]
(.clear tally)
(let [^LocalDate start (starts row-idx)
^LocalDate end (ends row-idx)
nd (.until start end java.time.temporal.ChronoUnit/DAYS)]
(dotimes [day-idx nd]
(let [ym (YearMonth/from (.plusDays start day-idx))]
(jvm-map/compute! tally ym incrementor)))
(hamf/consume! (hamf/consumer
kv (do
(.addLong indexes row-idx)
(.add year-months (key kv))
(.add counts (val kv))))
tally)))
(-> (ds/select-rows ds indexes)
(assoc :year-month year-months
:count counts))))
(defn- otfrom-columnwise-pathway
[ds]
(->> (ds/pmap-ds ds tally-days-columnwise
{:result-type :as-seq})
(ds-reduce/group-by-column-agg
[:simulation :placement :year-month]
{:count (ds-reduce/sum :count)})
(#(let [ds %
count (ds :count)]
[(assoc ds :count2 (dfn/sq count))]))
(ds-reduce/group-by-column-agg
[:placement :year-month]
{:min-count (ds-reduce/prob-quantile :count 0.0)
:low-95-count (ds-reduce/prob-quantile :count 0.05)
:q1-count (ds-reduce/prob-quantile :count 0.25)
:median-count (ds-reduce/prob-quantile :count 0.50)
:q3-count (ds-reduce/prob-quantile :count 0.75)
:high-95-count (ds-reduce/prob-quantile :count 0.95)
:max-count (ds-reduce/prob-quantile :count 1.0)
:count (ds-reduce/sum :count)})))
(deftest otfrom-pathway-test
(let [ds (create-otfrom-init-dataset)
start (ds :start)
end (ds :end)
total-count (->> (dtype/emap #(dtype-dt/between %1 %2 :days) :int64 start end)
(dfn/sum))
_ (do (otfrom-pathway ds)
(otfrom-columnwise-pathway ds))
_ (println "otfrom pathway timing")
ofds (time (otfrom-pathway ds))
_ (println "otfrom columnwise pathway timing")
of-cwise-ds (time (otfrom-columnwise-pathway ds))
ofsum (dfn/sum (ofds :count))
of-cwise-sum (dfn/sum (of-cwise-ds :count))]
(is (= ofsum total-count))
(is (= of-cwise-sum total-count))))
(deftest issue-314
(let [dstds (->
(ds-reduce/group-by-column-agg
:foo
{:foos (ds-reduce/distinct :value)}
(ds/->dataset (into [] (map (fn [i] {:foo 'foo :value (str i)})) (range 3))))
(ds/column-map :foos-2 (fn [values] values) [:foos]))]
(is (= ["0" "1" "2"]
(vec (first (dstds :foos-2)))))))
(deftest issue-312
(let [ds (ds-reduce/aggregate
{:n-elems (ds-reduce/count-distinct :genre)}
[(ds/->dataset "test/data/example-genres.nippy")])]
(is (pos? (first (ds :n-elems))))))
|
4965edc7e9997f319692dd7b4dc05cb15a96c534e01cd118fe03639d68a75551 | ds-wizard/engine-backend | Tag.hs | module Wizard.Service.KnowledgeModel.Squash.Event.Tag where
import Shared.Model.Event.Tag.TagEvent
import Wizard.Service.KnowledgeModel.Squash.Event.Common
instance SimpleEventSquash EditTagEvent where
isSimpleEventSquashApplicable _ = True
isReorderEventSquashApplicable _ _ = False
isTypeChanged _ _ = False
simpleSquashEvent mPreviousEvent oldEvent newEvent =
EditTagEvent
{ uuid = newEvent.uuid
, parentUuid = newEvent.parentUuid
, entityUuid = newEvent.entityUuid
, name = applyValue oldEvent newEvent (.name)
, description = applyValue oldEvent newEvent (.description)
, color = applyValue oldEvent newEvent (.color)
, annotations = applyValue oldEvent newEvent (.annotations)
, createdAt = newEvent.createdAt
}
| null | https://raw.githubusercontent.com/ds-wizard/engine-backend/c60bcc649d3d1aefe73d54ba990bcb024c8948eb/engine-wizard/src/Wizard/Service/KnowledgeModel/Squash/Event/Tag.hs | haskell | module Wizard.Service.KnowledgeModel.Squash.Event.Tag where
import Shared.Model.Event.Tag.TagEvent
import Wizard.Service.KnowledgeModel.Squash.Event.Common
instance SimpleEventSquash EditTagEvent where
isSimpleEventSquashApplicable _ = True
isReorderEventSquashApplicable _ _ = False
isTypeChanged _ _ = False
simpleSquashEvent mPreviousEvent oldEvent newEvent =
EditTagEvent
{ uuid = newEvent.uuid
, parentUuid = newEvent.parentUuid
, entityUuid = newEvent.entityUuid
, name = applyValue oldEvent newEvent (.name)
, description = applyValue oldEvent newEvent (.description)
, color = applyValue oldEvent newEvent (.color)
, annotations = applyValue oldEvent newEvent (.annotations)
, createdAt = newEvent.createdAt
}
|
|
2048044d1313851fb14672936b3f3a6126b7b1b3720658f39df8c2dd6dcab8bd | aarvid/SmackJack | demo-push.lisp | (in-package :smackjack-demo)
;;; create an ajax-pusher object.
(defparameter *ajax-pusher*
(make-instance 'smackjack:ajax-pusher :server-uri "/ajax-push"))
;;; start hunchentoot server
(defparameter *push-server*
(start (make-instance 'easy-acceptor
:name 'push-server
:address "localhost"
:port 8080
:access-log-destination nil)))
(reset-session-secret)
add to hunchentoot
(push (create-ajax-dispatcher *ajax-pusher*) *dispatch-table*)
;; define push function
(defun-push push-show-text (text) (*ajax-pusher*)
(let* ((div (chain document (get-element-by-id "pushed-text"))))
(when div
(let* ((p (chain document (create-element "p")))
(tnode (chain document (create-text-node text))))
(chain p (append-child tnode))
(chain div (append-child p))))))
;;; define a simple web page
(define-easy-handler (home :uri "/"
:acceptor-names (list 'push-server)) ()
(with-html-output-to-string (s)
(:html
(:head
(:title "smackjack push demo")
(str (generate-prologue *ajax-pusher*)))
(:body :onload (ps-inline (chain smackpusher (start-poll)))
(:p (:b "Pushed Text"))
(:div :id "pushed-text")))))
run this code after at least one browser opens the page .
;;; this will push the text to all open pages for the *push-server*.
(let ((hunchentoot:*acceptor* *push-server*))
(smackjack-demo::push-show-text "Four score and seven years ago"))
| null | https://raw.githubusercontent.com/aarvid/SmackJack/019051e6ba3c5fd1e28bae0d85d53510beb63f2f/demo/demo-push.lisp | lisp | create an ajax-pusher object.
start hunchentoot server
define push function
define a simple web page
this will push the text to all open pages for the *push-server*. | (in-package :smackjack-demo)
(defparameter *ajax-pusher*
(make-instance 'smackjack:ajax-pusher :server-uri "/ajax-push"))
(defparameter *push-server*
(start (make-instance 'easy-acceptor
:name 'push-server
:address "localhost"
:port 8080
:access-log-destination nil)))
(reset-session-secret)
add to hunchentoot
(push (create-ajax-dispatcher *ajax-pusher*) *dispatch-table*)
(defun-push push-show-text (text) (*ajax-pusher*)
(let* ((div (chain document (get-element-by-id "pushed-text"))))
(when div
(let* ((p (chain document (create-element "p")))
(tnode (chain document (create-text-node text))))
(chain p (append-child tnode))
(chain div (append-child p))))))
(define-easy-handler (home :uri "/"
:acceptor-names (list 'push-server)) ()
(with-html-output-to-string (s)
(:html
(:head
(:title "smackjack push demo")
(str (generate-prologue *ajax-pusher*)))
(:body :onload (ps-inline (chain smackpusher (start-poll)))
(:p (:b "Pushed Text"))
(:div :id "pushed-text")))))
run this code after at least one browser opens the page .
(let ((hunchentoot:*acceptor* *push-server*))
(smackjack-demo::push-show-text "Four score and seven years ago"))
|
7ef143b3f156deb25aca831f6849d980c869e27b7e4b116290066fd97b8cad98 | bvaugon/ocapic | div.ml | (*************************************************************************)
(* *)
(* OCaPIC *)
(* *)
(* *)
This file is distributed under the terms of the CeCILL license .
(* See file ../../LICENSE-en. *)
(* *)
(*************************************************************************)
let ml_of_int x = ((x lsl 1) lor 1) land 0xFFFF;;
let int_of_ml x = ((x lsl 15) asr 16);;
let div x y = ml_of_int (int_of_ml x / int_of_ml y);;
let op rx ry =
let x = ref (rx lsr 1) in
let y = ref (ry lsr 1) in
let p = ref 2 in
let r = ref 1 in
if !x land 0x4000 <> 0 then x := -(!x) land 0x7FFF;
if !y land 0x4000 <> 0 then y := -(!y) land 0x7FFF;
begin
let rec f () =
y := !y lsl 1;
if !x >= !y then (
p := (!p lsl 1) land 0xFFFF;
f ();
);
in
f ();
end;
begin
let rec g () =
y := !y lsr 1;
if !x >= !y then (
x := !x - !y;
r := !r lor !p;
);
p := !p lsr 1;
if !p land 1 <> 1 then g ();
in
g ();
end;
if (rx land 0x8000 = 0 && ry land 0x8000 = 0) ||
(rx land 0x8000 <> 0 && ry land 0x8000 <> 0) then
!r
else
((lnot !r) + 3) land 0xFFFF
;;
for x = -0x4000 to 0x3FFF do
Printf.printf "\r%d %!" x;
for y = -0x4000 to 0x3FFF do
if y <> 0x000 then
let rx = ml_of_int x in
let ry = ml_of_int y in
let d1 = div rx ry in
let d2 = op rx ry in
if d1 <> d2 then (
Printf.printf "\n%4d / %-4d = %-4d %-4d\n%!"
x y (int_of_ml d1) (int_of_ml d2);
Printf.printf "%04x %04x = %04x %04x\n%!" rx ry d1 d2;
failwith "op <> mul";
)
done
done;;
Printf.printf "\nOk\n%!";;
| null | https://raw.githubusercontent.com/bvaugon/ocapic/a14cd9ec3f5022aeb5fe2264d595d7e8f1ddf58a/tests/arith/div.ml | ocaml | ***********************************************************************
OCaPIC
See file ../../LICENSE-en.
*********************************************************************** |
This file is distributed under the terms of the CeCILL license .
let ml_of_int x = ((x lsl 1) lor 1) land 0xFFFF;;
let int_of_ml x = ((x lsl 15) asr 16);;
let div x y = ml_of_int (int_of_ml x / int_of_ml y);;
let op rx ry =
let x = ref (rx lsr 1) in
let y = ref (ry lsr 1) in
let p = ref 2 in
let r = ref 1 in
if !x land 0x4000 <> 0 then x := -(!x) land 0x7FFF;
if !y land 0x4000 <> 0 then y := -(!y) land 0x7FFF;
begin
let rec f () =
y := !y lsl 1;
if !x >= !y then (
p := (!p lsl 1) land 0xFFFF;
f ();
);
in
f ();
end;
begin
let rec g () =
y := !y lsr 1;
if !x >= !y then (
x := !x - !y;
r := !r lor !p;
);
p := !p lsr 1;
if !p land 1 <> 1 then g ();
in
g ();
end;
if (rx land 0x8000 = 0 && ry land 0x8000 = 0) ||
(rx land 0x8000 <> 0 && ry land 0x8000 <> 0) then
!r
else
((lnot !r) + 3) land 0xFFFF
;;
for x = -0x4000 to 0x3FFF do
Printf.printf "\r%d %!" x;
for y = -0x4000 to 0x3FFF do
if y <> 0x000 then
let rx = ml_of_int x in
let ry = ml_of_int y in
let d1 = div rx ry in
let d2 = op rx ry in
if d1 <> d2 then (
Printf.printf "\n%4d / %-4d = %-4d %-4d\n%!"
x y (int_of_ml d1) (int_of_ml d2);
Printf.printf "%04x %04x = %04x %04x\n%!" rx ry d1 d2;
failwith "op <> mul";
)
done
done;;
Printf.printf "\nOk\n%!";;
|
57d8f10949acf951d858172e8923f6af0a1871b35845c1b96512ecc7a2451f86 | xvw/preface | functor.ml | module Core (Req : Preface_specs.Functor.WITH_MAP) = Req
module Operation (Core : Preface_specs.Functor.CORE) = struct
type 'a t = 'a Core.t
include (
Indexed_functor.Operation (struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end) :
Preface_specs.Indexed_functor.OPERATION with type ('a, _) t := 'a Core.t )
end
module Infix
(Core : Preface_specs.Functor.CORE)
(Operation : Preface_specs.Functor.OPERATION with type 'a t = 'a Core.t) =
struct
type 'a t = 'a Core.t
include (
Indexed_functor.Infix
(struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end)
(struct
type ('a, 'index) t = 'a Operation.t
include (
Operation :
Preface_specs.Functor.OPERATION with type 'a t := 'a Core.t )
end) :
Preface_specs.Indexed_functor.INFIX with type ('a, _) t := 'a Core.t )
end
module Syntax (Core : Preface_specs.Functor.CORE) = struct
type 'a t = 'a Core.t
include (
Indexed_functor.Syntax (struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end) :
Preface_specs.Indexed_functor.SYNTAX with type ('a, _) t := 'a Core.t )
end
module Via
(Core : Preface_specs.Functor.CORE)
(Operation : Preface_specs.Functor.OPERATION with type 'a t = 'a Core.t)
(Infix : Preface_specs.Functor.INFIX with type 'a t = 'a Core.t)
(Syntax : Preface_specs.Functor.SYNTAX with type 'a t = 'a Core.t) =
struct
type 'a t = 'a Core.t
include (
Indexed_functor.Via
(struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end)
(struct
type ('a, 'index) t = 'a Core.t
include (
Operation :
Preface_specs.Functor.OPERATION with type 'a t := 'a Core.t )
end)
(struct
type ('a, 'index) t = 'a Core.t
include (Infix : Preface_specs.Functor.INFIX with type 'a t := 'a Core.t)
end)
(struct
type ('a, 'index) t = 'a Core.t
include (
Syntax : Preface_specs.Functor.SYNTAX with type 'a t := 'a Core.t )
end) :
Preface_specs.Indexed_functor.API with type ('a, _) t := 'a Core.t )
end
module Via_map (Req : Preface_specs.Functor.WITH_MAP) = struct
type 'a t = 'a Req.t
include (
Indexed_functor.Via_map (struct
type ('a, 'index) t = 'a Req.t
include (Req : Preface_specs.Functor.WITH_MAP with type 'a t := 'a Req.t)
end) :
Preface_specs.Indexed_functor.API with type ('a, _) t := 'a Req.t )
end
module Composition (F : Preface_specs.FUNCTOR) (G : Preface_specs.FUNCTOR) =
Via_map (struct
type 'a t = 'a G.t F.t
let map f x = F.map (G.map f) x
end)
module From_arrow (A : Preface_specs.ARROW) = Via_map (struct
type 'a t = (unit, 'a) A.t
let map f x = A.(x >>> arrow f)
end)
module From_applicative (Applicative : Preface_specs.APPLICATIVE) = Applicative
module From_alt (Alt : Preface_specs.ALT) = Alt
module From_monad (Monad : Preface_specs.MONAD) = Monad
module From_alternative (Alternative : Preface_specs.ALTERNATIVE) = Alternative
module From_monad_plus (Monad_plus : Preface_specs.MONAD_PLUS) = Monad_plus
module From_comonad (Comonad : Preface_specs.COMONAD) = Comonad
module From_bifunctor (Bifunctor : Preface_specs.Bifunctor.CORE) =
Via_map (struct
type 'a t = ('a, 'a) Bifunctor.t
let map f b = Bifunctor.bimap f f b
end)
module Sum (F : Preface_specs.FUNCTOR) (G : Preface_specs.FUNCTOR) = struct
type 'a sum =
| L of 'a F.t
| R of 'a G.t
include Via_map (struct
type 'a t = 'a sum
let map f = function L x -> L (F.map f x) | R x -> R (G.map f x)
end)
end
module Product (F : Preface_specs.FUNCTOR) (G : Preface_specs.FUNCTOR) =
Via_map (struct
type 'a t = 'a F.t * 'a G.t
let map f (x, y) = (F.map f x, G.map f y)
end)
module Index (F : Preface_specs.FUNCTOR) = struct
type ('a, 'index) t = 'a F.t
include (
Indexed_functor.Via
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (F : Preface_specs.Functor.CORE with type 'a t := 'a F.t)
end)
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (F : Preface_specs.Functor.OPERATION with type 'a t := 'a F.t)
end)
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (F.Infix : Preface_specs.Functor.INFIX with type 'a t := 'a F.t)
end)
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (
F.Syntax : Preface_specs.Functor.SYNTAX with type 'a t := 'a F.t )
end) :
Preface_specs.INDEXED_FUNCTOR with type ('a, 'index) t := ('a, 'index) t )
end
| null | https://raw.githubusercontent.com/xvw/preface/51892a7ce2ddfef69de963265da3617968cdb7ad/lib/preface_make/functor.ml | ocaml | module Core (Req : Preface_specs.Functor.WITH_MAP) = Req
module Operation (Core : Preface_specs.Functor.CORE) = struct
type 'a t = 'a Core.t
include (
Indexed_functor.Operation (struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end) :
Preface_specs.Indexed_functor.OPERATION with type ('a, _) t := 'a Core.t )
end
module Infix
(Core : Preface_specs.Functor.CORE)
(Operation : Preface_specs.Functor.OPERATION with type 'a t = 'a Core.t) =
struct
type 'a t = 'a Core.t
include (
Indexed_functor.Infix
(struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end)
(struct
type ('a, 'index) t = 'a Operation.t
include (
Operation :
Preface_specs.Functor.OPERATION with type 'a t := 'a Core.t )
end) :
Preface_specs.Indexed_functor.INFIX with type ('a, _) t := 'a Core.t )
end
module Syntax (Core : Preface_specs.Functor.CORE) = struct
type 'a t = 'a Core.t
include (
Indexed_functor.Syntax (struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end) :
Preface_specs.Indexed_functor.SYNTAX with type ('a, _) t := 'a Core.t )
end
module Via
(Core : Preface_specs.Functor.CORE)
(Operation : Preface_specs.Functor.OPERATION with type 'a t = 'a Core.t)
(Infix : Preface_specs.Functor.INFIX with type 'a t = 'a Core.t)
(Syntax : Preface_specs.Functor.SYNTAX with type 'a t = 'a Core.t) =
struct
type 'a t = 'a Core.t
include (
Indexed_functor.Via
(struct
type ('a, 'index) t = 'a Core.t
include (Core : Preface_specs.Functor.CORE with type 'a t := 'a Core.t)
end)
(struct
type ('a, 'index) t = 'a Core.t
include (
Operation :
Preface_specs.Functor.OPERATION with type 'a t := 'a Core.t )
end)
(struct
type ('a, 'index) t = 'a Core.t
include (Infix : Preface_specs.Functor.INFIX with type 'a t := 'a Core.t)
end)
(struct
type ('a, 'index) t = 'a Core.t
include (
Syntax : Preface_specs.Functor.SYNTAX with type 'a t := 'a Core.t )
end) :
Preface_specs.Indexed_functor.API with type ('a, _) t := 'a Core.t )
end
module Via_map (Req : Preface_specs.Functor.WITH_MAP) = struct
type 'a t = 'a Req.t
include (
Indexed_functor.Via_map (struct
type ('a, 'index) t = 'a Req.t
include (Req : Preface_specs.Functor.WITH_MAP with type 'a t := 'a Req.t)
end) :
Preface_specs.Indexed_functor.API with type ('a, _) t := 'a Req.t )
end
module Composition (F : Preface_specs.FUNCTOR) (G : Preface_specs.FUNCTOR) =
Via_map (struct
type 'a t = 'a G.t F.t
let map f x = F.map (G.map f) x
end)
module From_arrow (A : Preface_specs.ARROW) = Via_map (struct
type 'a t = (unit, 'a) A.t
let map f x = A.(x >>> arrow f)
end)
module From_applicative (Applicative : Preface_specs.APPLICATIVE) = Applicative
module From_alt (Alt : Preface_specs.ALT) = Alt
module From_monad (Monad : Preface_specs.MONAD) = Monad
module From_alternative (Alternative : Preface_specs.ALTERNATIVE) = Alternative
module From_monad_plus (Monad_plus : Preface_specs.MONAD_PLUS) = Monad_plus
module From_comonad (Comonad : Preface_specs.COMONAD) = Comonad
module From_bifunctor (Bifunctor : Preface_specs.Bifunctor.CORE) =
Via_map (struct
type 'a t = ('a, 'a) Bifunctor.t
let map f b = Bifunctor.bimap f f b
end)
module Sum (F : Preface_specs.FUNCTOR) (G : Preface_specs.FUNCTOR) = struct
type 'a sum =
| L of 'a F.t
| R of 'a G.t
include Via_map (struct
type 'a t = 'a sum
let map f = function L x -> L (F.map f x) | R x -> R (G.map f x)
end)
end
module Product (F : Preface_specs.FUNCTOR) (G : Preface_specs.FUNCTOR) =
Via_map (struct
type 'a t = 'a F.t * 'a G.t
let map f (x, y) = (F.map f x, G.map f y)
end)
module Index (F : Preface_specs.FUNCTOR) = struct
type ('a, 'index) t = 'a F.t
include (
Indexed_functor.Via
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (F : Preface_specs.Functor.CORE with type 'a t := 'a F.t)
end)
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (F : Preface_specs.Functor.OPERATION with type 'a t := 'a F.t)
end)
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (F.Infix : Preface_specs.Functor.INFIX with type 'a t := 'a F.t)
end)
(struct
type nonrec ('a, 'index) t = ('a, 'index) t
include (
F.Syntax : Preface_specs.Functor.SYNTAX with type 'a t := 'a F.t )
end) :
Preface_specs.INDEXED_FUNCTOR with type ('a, 'index) t := ('a, 'index) t )
end
|
|
961a6d1d165e97bc834c0e703e926e42a4d42f55931722bb5be3cc625dc403cf | naoto-ogawa/h-xproto-mysql | CRUD.hs | |
module : Database . MySQLX.CRUD
description : crud interface
copyright : ( c ) , 2017
license : MIT
maintainer :
stability : experimental
portability :
CRUD interface
@
find update insert delete
collection * * * *
data_model * * * * option
projection * 1 - * 2 - list * 1 : Fields * 2 : Column
criteria * * - * option
row - - * - list TypedRow
args * * * * list
limit * * - * option
order * * - * list
grouping * - - - list
- - - option
operation - * - - list
@
module : Database.MySQLX.CRUD
description : crud interface
copyright : (c) naoto ogawa, 2017
license : MIT
maintainer :
stability : experimental
portability :
CRUD interface
@
find update insert delete
collection * * * *
data_model * * * * option
projection *1 - *2 - list *1 : Fields *2 : Column
criteria * * - * option
row - - * - list TypedRow
args * * * * list
limit * * - * option
order * * - * list
grouping * - - - list
grouping_criteria * - - - option
operation - * - - list
@
-}
# LANGUAGE ConstrainedClassMethods #
module DataBase.MySQLX.CRUD
(
-- * Setting a field to a CRUD record
setCollection -- collection
,setCollection'
,setDataModel -- data_model
,setDocumentModel -- DOCUMENT
,getDocumentModel -- DOCUMENT
,setTableModel -- TABLE
,getTableModel -- TABLE
,setFields -- projection
,setFields' -- projection
,setColumns -- projection
,setCriteria -- criteria
,setCriteria' -- criteria
,setCriteriaBind
,setTypedRow -- row
,setTypedRow' -- row
,setArgs -- args
,setLimit -- limit (Limit)
,setLimit' -- limit (Int)
limit ( Int , Int )
,setOrder -- order
,setOrder' -- order
,setGrouping -- grouping
,setGroupingCriteria -- grouping_criteria
,setOperation -- operation (Only Update)
-- * Create a CRUD Object
,createInsert
,createFind
,createUpdate
,createDelete
-- * CRUD Execution
,find
,delete
,insert
,update
) where
-- general, standard library
import Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM)
import Control.Monad
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map.Strict as Map
import qualified Data.Maybe as Maybe
import qualified Data.Word as W
import qualified Data.Sequence as Seq
import Data.Typeable (TypeRep, Typeable, typeRep, typeOf)
-- generated library
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Any as PA
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ColumnMetaData.FieldType as PCMDFT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ColumnMetaData as PCMD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Collection as PCll
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Column as PCol
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.DataModel as PDM
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Delete as PD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Expr as PEx
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Find as PF
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Insert.TypedRow as PITR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Insert as PI
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Limit as PL
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Order as PO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Projection as PP
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Row as PR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Scalar as PS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Update as PU
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.UpdateOperation as PUO
-- protocol buffer library
import qualified Text.ProtocolBuffers as PB
import qualified Text.ProtocolBuffers.Basic as PBB
import qualified Text.ProtocolBuffers.Header as PBH
import qualified Text.ProtocolBuffers.TextMessage as PBT
import qualified Text.ProtocolBuffers.WireMessage as PBW
import qualified Text.ProtocolBuffers.Reflections as PBR
-- my library
import DataBase.MySQLX.Exception
import DataBase.MySQLX.ExprParser
import DataBase.MySQLX.Model as XM
import DataBase.MySQLX.NodeSession
import DataBase.MySQLX.Util
-- -----------------------------------------------------------------------------
--
-- -----------------------------------------------------------------------------
-- | CRUD operations which need a Collection
class HasCollection a where
-- | Set collection record
setCollection :: a -> PCll.Collection -> a
-- | Set a schema and a collection
setCollection' ::
a -- ^ CRUD Object
-> String -- ^ Schema name
-> String -- ^ Collection name
-> a -- ^ CRUD Object
setCollection' a schema coll = a `setCollection` (mkCollection schema coll)
instance HasCollection PF.Find where setCollection a coll = a {PF.collection = coll }
instance HasCollection PU.Update where setCollection a coll = a {PU.collection = coll }
instance HasCollection PI.Insert where setCollection a coll = a {PI.collection = coll }
instance HasCollection PD.Delete where setCollection a coll = a {PD.collection = coll }
| CRUD operations which need a DataModel .
class HasDataModel a where
-- | Set DataModel record
setDataModel :: a -> PDM.DataModel -> a
-- | Set Document Model
setDocumentModel :: a -> a
setDocumentModel a = a `setDataModel` PDM.DOCUMENT
-- | Get Document Model
getDocumentModel :: PBH.Default a => a
getDocumentModel = PBH.defaultValue `setDataModel` PDM.DOCUMENT
-- | Set Table Model
setTableModel :: a -> a
setTableModel a = a `setDataModel` PDM.TABLE
-- | Get Table Model
getTableModel :: PBH.Default a => a
getTableModel = PBH.defaultValue `setDataModel` PDM.TABLE
instance HasDataModel PF.Find where setDataModel a dataModel = a {PF.data_model = Just dataModel }
instance HasDataModel PU.Update where setDataModel a dataModel = a {PU.data_model = Just dataModel }
instance HasDataModel PI.Insert where setDataModel a dataModel = a {PI.data_model = Just dataModel }
instance HasDataModel PD.Delete where setDataModel a dataModel = a {PD.data_model = Just dataModel }
| CRUD operations which need a Criteria .
class HasCriteria a where
-- | Set Criteria record
setCriteria :: a -> PEx.Expr -> a
setCriteria' :: a -> String -> a
setCriteria' a str = setCriteria a $ parseCriteria' $ s2bs str
instance HasCriteria PF.Find where setCriteria a criteria = a {PF.criteria = Just criteria }
instance HasCriteria PU.Update where setCriteria a criteria = a {PU.criteria = Just criteria }
instance HasCriteria PD.Delete where setCriteria a criteria = a {PD.criteria = Just criteria }
| CRUD operations which need a .
class HasArgs a where
| Set record
TODO need to re - order args by a placeholder - order .
instance HasArgs PF.Find where setArgs a arg = a {PF.args = Seq.fromList arg }
instance HasArgs PU.Update where setArgs a arg = a {PU.args = Seq.fromList arg }
instance HasArgs PI.Insert where setArgs a arg = a {PI.args = Seq.fromList arg }
instance HasArgs PD.Delete where setArgs a arg = a {PD.args = Seq.fromList arg }
| CRUD operations which need both a Criteria and a map of
class HasCriteriaBind a where
setCriteriaBind :: (HasCriteria a, HasArgs a) => a -> (String, BindMap) -> a
setCriteriaBind a (str, bind) = a `setCriteria` exp `setArgs` map
where (exp, map) =
case parseCriteria $ s2bs str of
Left y -> error $ "parseCriteria error " ++ y
Right (e, state) -> (e, bindMap2Seq' bind $ bindList state)
instance HasCriteriaBind PF.Find
instance HasCriteriaBind PU.Update
instance HasCriteriaBind PI.Insert
instance HasCriteriaBind PD.Delete
class HasLimit a where
-- | CRUD operations which need a Limit
setLimit :: a -> PL.Limit -> a
setLimit' :: a -> Int -> a
setLimit' a num = setLimit a (mkLimit' num)
setLimit'' :: a -> Int -> Int -> a
setLimit'' a num offset = setLimit a (mkLimit num offset)
instance HasLimit PF.Find where setLimit a lmt = a {PF.limit = Just lmt }
instance HasLimit PU.Update where setLimit a lmt = a {PU.limit = Just lmt }
instance HasLimit PD.Delete where setLimit a lmt = a {PD.limit = Just lmt }
class HasOrder a where
-- | CRUD operations which need a Order.
setOrder :: a -> [PO.Order] -> a
setOrder' :: a -> String -> a
setOrder' a str = setOrder a $ parseOrderBy' $ s2bs str
instance HasOrder PF.Find where setOrder a ord = a {PF.order = Seq.fromList ord }
instance HasOrder PU.Update where setOrder a ord = a {PU.order = Seq.fromList ord }
instance HasOrder PD.Delete where setOrder a ord = a {PD.order = Seq.fromList ord }
-- | Insert
createInsert :: PCll.Collection -- ^ Collection
^ DataModel
-> [PCol.Column] -- ^ Column
^
-> [PS.Scalar] -- ^ Scalar
-> PI.Insert -- ^ Insert Object
createInsert col model projs rows args = PB.defaultValue
`setCollection` col
`setDataModel` model
`setColumns` projs
`setTypedRow` rows
`setArgs` args
-- | Set columns to a Insert record.
setColumns :: PI.Insert -> [PCol.Column] -> PI.Insert
setColumns inst clms = inst {PI.projection = Seq.fromList clms}
-- | Set typed rows to a Insert record.
setTypedRow :: PI.Insert -> [PITR.TypedRow] -> PI.Insert
setTypedRow inst rows = inst {PI.row = Seq.fromList rows}
| Set typed rows to a Insert record from Exprs .
setTypedRow' :: PI.Insert -> [PEx.Expr] -> PI.Insert
setTypedRow' inst exprs = inst {PI.row = Seq.fromList [mkExpr2TypedRow exprs]}
-- | Delete
createDelete :: PCll.Collection -- ^ Collection
^ DataModel
-> PEx.Expr -- ^ where
-> [PS.Scalar] -- ^ bindings
-> PL.Limit -- ^ Limit
-> [PO.Order] -- ^ Order
-> PD.Delete -- ^ Delete Object
createDelete col model criteria args lmt orders = PB.defaultValue
`setCollection` col
`setDataModel` model
`setCriteria` criteria -- Expr
`setArgs` args -- [Scalar]
`setLimit` lmt -- Limit
`setOrder` orders -- Order
-- | Update
createUpdate :: PCll.Collection -- ^ Collection
^ DataModel
-> PEx.Expr -- ^ where
-> [PS.Scalar] -- ^ bindings
-> PL.Limit -- ^ Limit
-> [PO.Order] -- ^ Order
-> [PUO.UpdateOperation] -- ^ UpdateOperation
-> PU.Update -- ^ Update Object
createUpdate col model criteria args lmt orders upOpes = PB.defaultValue
`setCollection` col
`setDataModel` model
`setCriteria` criteria
`setArgs` args
`setLimit` lmt
`setOrder` orders
`setOperation` upOpes -- UpdateOperation
-- | Set update operations to a Update record
setOperation:: PU.Update -> [PUO.UpdateOperation] -> PU.Update
setOperation up upOpe = up {PU.operation = Seq.fromList upOpe}
-- | Find
createFind :: PCll.Collection -- ^ Collection
^ DataModel
-> [PP.Projection] -- ^ Projection
-> PEx.Expr -- ^ where
-> [PS.Scalar] -- ^ bindings
-> PL.Limit -- ^ Limit
-> [PO.Order] -- ^ Order
-> [PEx.Expr] -- ^ group by
-> PEx.Expr -- ^ having
-> PF.Find -- ^ Find Object
createFind col model projs criteria args lmt orders grouping gCriteria = PB.defaultValue
`setCollection` col
`setDataModel` model
Seq Projection
Maybe
Seq Scalar
`setLimit` lmt -- Maybe Limit
`setOrder` orders -- Seq Order
`setGrouping` grouping -- Seq Expr
Maybe
-- | put fields to a Find record. (This is like a select clause of SQL)
setFields :: PF.Find -> [PP.Projection] -> PF.Find
setFields find proj = find {PF.projection = Seq.fromList proj }
-- | put fields by String to a Find record. (This is like a select clause of SQL)
setFields' :: PF.Find -> String -> PF.Find
setFields' find proj = find {PF.projection = Seq.fromList $ parseProjection' $ s2bs proj }
-- | put grouping field to a Find record. (This is like a group by clause of SQL)
setGrouping :: PF.Find -> [PEx.Expr] -> PF.Find
setGrouping find group = find {PF.grouping = Seq.fromList group }
-- | put grouping_criteria to a Find record. (This is like a having clause of SQL)
setGroupingCriteria :: PF.Find -> PEx.Expr -> PF.Find
setGroupingCriteria find criteria = find {PF.grouping_criteria = Just criteria }
--
-- CRUD functions
--
-- | Common Operation : Insert / Update / Delete
modify :: (PBT.TextMsg msg
,PBR.ReflectDescriptor msg
,PBW.Wire msg
,Show msg
,Typeable msg
,MonadIO m
,MonadThrow m) => msg -> NodeSession -> m W.Word64
modify obj nodeSess = do
runReaderT (writeMessageR obj) nodeSess
ret@(x:xs) <- runReaderT readMessagesR nodeSess -- [(Int, B.ByteString)]
if fst x == s_error then do
msg <- getError $ snd x
throwM $ XProtocolError msg
else do
frm <- (getFrame . snd ) $ head $ filter (\(t, b) -> t == s_notice) ret -- Frame
ssc <- getPayloadSessionStateChanged frm
getRowsAffected ssc
-- | Delete
delete :: (MonadIO m, MonadThrow m) => PD.Delete -> NodeSession -> m W.Word64
delete = modify
-- | Update
update :: (MonadIO m, MonadThrow m) => PU.Update -> NodeSession -> m W.Word64
update = modify
-- | Insert
insert :: (MonadIO m, MonadThrow m) => PI.Insert -> NodeSession -> m W.Word64
insert = modify
-- | Find (Select)
TODO selectと共通化 , エラーハンドリング
find fd nodeSess = do
-- debug fd
runReaderT (writeMessageR fd) nodeSess
ret <- runReaderT readMessagesR nodeSess
debug ret
return $ tupleRfmap ((map PR.field) . join) -- m (_, [m Row]) -> m (_, [Row]) -> (_, [Seq ByteString])
m ( [ m , _ ) - > m ( [ ColumnMetaData ] , _ ) - > m ( Seq ColumnMetaData , _ )
collect ColumnMetaData and Row , throw away others
where f = \(t, b) (meta, rows) ->
if t == s_resultset_column_meta_data then
(getColumnMetaData b : meta , rows )
else if t == s_resultset_row then
(meta , getRow b : rows)
else
(meta , rows )
tupleLfmap f (a,b) = (f a, b)
tupleRfmap = fmap
--
-- functions for binding
--
Map String Scalar - > [ String ] - > Seq . Seq
type BindMap = Map.Map String PS.Scalar
type BindList = [String]
emptyBindMap :: BindMap
emptyBindMap = Map.empty
bind :: String -> PS.Scalar -> BindMap -> BindMap
bind key val map = Map.insert key val map
ex : [ ( " a " , XM.scalar " aaa " ) , ( " b " , XM.scalar 1 ) , ( " c " , XM.scalar 1.2 ) ]
bindParams :: [(String, PS.Scalar)] -> BindMap
bindParams entries = foldr (\(key, val) accMap -> bind key val accMap) Map.empty entries
bindParams ' : : ( XM.Scalarable a ) = > [ ( String , a ) ] - > BindMap
bindParams ' entries = foldr ( , ) accMap - > bind key ( XM.scalar val ) accMap ) Map.empty entries
> > bindParams ' [ ( " a " , 1 ) , ( " b " , True ) ]
< interactive>:302:20 : error :
• No instance for ( ) arising from the literal ‘ 1 ’
• In the expression : 1
In the expression : ( " a " , 1 )
In the first argument of ‘ bindParams ' ’ , namely
‘ [ ( " a " , 1 ) , ( " b " , True ) ] ’
> >
bindParams' :: (XM.Scalarable a) => [(String, a)] -> BindMap
bindParams' entries = foldr (\(key, val) accMap -> bind key (XM.scalar val) accMap) Map.empty entries
>> bindParams' [("a", 1), ("b", True)]
<interactive>:302:20: error:
• No instance for (Num Bool) arising from the literal ‘1’
• In the expression: 1
In the expression: ("a", 1)
In the first argument of ‘bindParams'’, namely
‘[("a", 1), ("b", True)]’
>>
-}
bindMap2Seq :: BindMap -> BindList -> Seq.Seq PS.Scalar
bindMap2Seq map list = foldl (\acc item -> (Maybe.fromJust $ Map.lookup item map) Seq.<| acc) Seq.empty list
-- let map = bind "c" (XM.scalar (3.0::Double)) $ bind "b" (XM.scalar "b") $ bind "a" (XM.scalar 1) emptyBindMap
-- let list = ["c", "a"]
-- pPrint $ bind "c" (XM.scalar (3.0::Double)) $ bind "b" (XM.scalar "b") $ bind "a" (XM.scalar 1) emptyBindMap
bindMap2Seq' :: BindMap -> BindList -> [PS.Scalar]
bindMap2Seq' map list = foldl (\acc item -> (Maybe.fromJust $ Map.lookup item map) : acc) [] list
| null | https://raw.githubusercontent.com/naoto-ogawa/h-xproto-mysql/1eacd6486c99b849016bf088788cb8d8b166f964/src/DataBase/MySQLX/CRUD.hs | haskell | * Setting a field to a CRUD record
collection
data_model
DOCUMENT
DOCUMENT
TABLE
TABLE
projection
projection
projection
criteria
criteria
row
row
args
limit (Limit)
limit (Int)
order
order
grouping
grouping_criteria
operation (Only Update)
* Create a CRUD Object
* CRUD Execution
general, standard library
generated library
protocol buffer library
my library
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| CRUD operations which need a Collection
| Set collection record
| Set a schema and a collection
^ CRUD Object
^ Schema name
^ Collection name
^ CRUD Object
| Set DataModel record
| Set Document Model
| Get Document Model
| Set Table Model
| Get Table Model
| Set Criteria record
| CRUD operations which need a Limit
| CRUD operations which need a Order.
| Insert
^ Collection
^ Column
^ Scalar
^ Insert Object
| Set columns to a Insert record.
| Set typed rows to a Insert record.
| Delete
^ Collection
^ where
^ bindings
^ Limit
^ Order
^ Delete Object
Expr
[Scalar]
Limit
Order
| Update
^ Collection
^ where
^ bindings
^ Limit
^ Order
^ UpdateOperation
^ Update Object
UpdateOperation
| Set update operations to a Update record
| Find
^ Collection
^ Projection
^ where
^ bindings
^ Limit
^ Order
^ group by
^ having
^ Find Object
Maybe Limit
Seq Order
Seq Expr
| put fields to a Find record. (This is like a select clause of SQL)
| put fields by String to a Find record. (This is like a select clause of SQL)
| put grouping field to a Find record. (This is like a group by clause of SQL)
| put grouping_criteria to a Find record. (This is like a having clause of SQL)
CRUD functions
| Common Operation : Insert / Update / Delete
[(Int, B.ByteString)]
Frame
| Delete
| Update
| Insert
| Find (Select)
debug fd
m (_, [m Row]) -> m (_, [Row]) -> (_, [Seq ByteString])
functions for binding
let map = bind "c" (XM.scalar (3.0::Double)) $ bind "b" (XM.scalar "b") $ bind "a" (XM.scalar 1) emptyBindMap
let list = ["c", "a"]
pPrint $ bind "c" (XM.scalar (3.0::Double)) $ bind "b" (XM.scalar "b") $ bind "a" (XM.scalar 1) emptyBindMap | |
module : Database . MySQLX.CRUD
description : crud interface
copyright : ( c ) , 2017
license : MIT
maintainer :
stability : experimental
portability :
CRUD interface
@
find update insert delete
collection * * * *
data_model * * * * option
projection * 1 - * 2 - list * 1 : Fields * 2 : Column
criteria * * - * option
row - - * - list TypedRow
args * * * * list
limit * * - * option
order * * - * list
grouping * - - - list
- - - option
operation - * - - list
@
module : Database.MySQLX.CRUD
description : crud interface
copyright : (c) naoto ogawa, 2017
license : MIT
maintainer :
stability : experimental
portability :
CRUD interface
@
find update insert delete
collection * * * *
data_model * * * * option
projection *1 - *2 - list *1 : Fields *2 : Column
criteria * * - * option
row - - * - list TypedRow
args * * * * list
limit * * - * option
order * * - * list
grouping * - - - list
grouping_criteria * - - - option
operation - * - - list
@
-}
# LANGUAGE ConstrainedClassMethods #
module DataBase.MySQLX.CRUD
(
,setCollection'
,setCriteriaBind
limit ( Int , Int )
,createInsert
,createFind
,createUpdate
,createDelete
,find
,delete
,insert
,update
) where
import Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM)
import Control.Monad
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map.Strict as Map
import qualified Data.Maybe as Maybe
import qualified Data.Word as W
import qualified Data.Sequence as Seq
import Data.Typeable (TypeRep, Typeable, typeRep, typeOf)
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Any as PA
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ColumnMetaData.FieldType as PCMDFT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ColumnMetaData as PCMD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Collection as PCll
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Column as PCol
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.DataModel as PDM
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Delete as PD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Expr as PEx
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Find as PF
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Insert.TypedRow as PITR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Insert as PI
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Limit as PL
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Order as PO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Projection as PP
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Row as PR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Scalar as PS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Update as PU
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.UpdateOperation as PUO
import qualified Text.ProtocolBuffers as PB
import qualified Text.ProtocolBuffers.Basic as PBB
import qualified Text.ProtocolBuffers.Header as PBH
import qualified Text.ProtocolBuffers.TextMessage as PBT
import qualified Text.ProtocolBuffers.WireMessage as PBW
import qualified Text.ProtocolBuffers.Reflections as PBR
import DataBase.MySQLX.Exception
import DataBase.MySQLX.ExprParser
import DataBase.MySQLX.Model as XM
import DataBase.MySQLX.NodeSession
import DataBase.MySQLX.Util
class HasCollection a where
setCollection :: a -> PCll.Collection -> a
setCollection' ::
setCollection' a schema coll = a `setCollection` (mkCollection schema coll)
instance HasCollection PF.Find where setCollection a coll = a {PF.collection = coll }
instance HasCollection PU.Update where setCollection a coll = a {PU.collection = coll }
instance HasCollection PI.Insert where setCollection a coll = a {PI.collection = coll }
instance HasCollection PD.Delete where setCollection a coll = a {PD.collection = coll }
| CRUD operations which need a DataModel .
class HasDataModel a where
setDataModel :: a -> PDM.DataModel -> a
setDocumentModel :: a -> a
setDocumentModel a = a `setDataModel` PDM.DOCUMENT
getDocumentModel :: PBH.Default a => a
getDocumentModel = PBH.defaultValue `setDataModel` PDM.DOCUMENT
setTableModel :: a -> a
setTableModel a = a `setDataModel` PDM.TABLE
getTableModel :: PBH.Default a => a
getTableModel = PBH.defaultValue `setDataModel` PDM.TABLE
instance HasDataModel PF.Find where setDataModel a dataModel = a {PF.data_model = Just dataModel }
instance HasDataModel PU.Update where setDataModel a dataModel = a {PU.data_model = Just dataModel }
instance HasDataModel PI.Insert where setDataModel a dataModel = a {PI.data_model = Just dataModel }
instance HasDataModel PD.Delete where setDataModel a dataModel = a {PD.data_model = Just dataModel }
| CRUD operations which need a Criteria .
class HasCriteria a where
setCriteria :: a -> PEx.Expr -> a
setCriteria' :: a -> String -> a
setCriteria' a str = setCriteria a $ parseCriteria' $ s2bs str
instance HasCriteria PF.Find where setCriteria a criteria = a {PF.criteria = Just criteria }
instance HasCriteria PU.Update where setCriteria a criteria = a {PU.criteria = Just criteria }
instance HasCriteria PD.Delete where setCriteria a criteria = a {PD.criteria = Just criteria }
| CRUD operations which need a .
class HasArgs a where
| Set record
TODO need to re - order args by a placeholder - order .
instance HasArgs PF.Find where setArgs a arg = a {PF.args = Seq.fromList arg }
instance HasArgs PU.Update where setArgs a arg = a {PU.args = Seq.fromList arg }
instance HasArgs PI.Insert where setArgs a arg = a {PI.args = Seq.fromList arg }
instance HasArgs PD.Delete where setArgs a arg = a {PD.args = Seq.fromList arg }
| CRUD operations which need both a Criteria and a map of
class HasCriteriaBind a where
setCriteriaBind :: (HasCriteria a, HasArgs a) => a -> (String, BindMap) -> a
setCriteriaBind a (str, bind) = a `setCriteria` exp `setArgs` map
where (exp, map) =
case parseCriteria $ s2bs str of
Left y -> error $ "parseCriteria error " ++ y
Right (e, state) -> (e, bindMap2Seq' bind $ bindList state)
instance HasCriteriaBind PF.Find
instance HasCriteriaBind PU.Update
instance HasCriteriaBind PI.Insert
instance HasCriteriaBind PD.Delete
class HasLimit a where
setLimit :: a -> PL.Limit -> a
setLimit' :: a -> Int -> a
setLimit' a num = setLimit a (mkLimit' num)
setLimit'' :: a -> Int -> Int -> a
setLimit'' a num offset = setLimit a (mkLimit num offset)
instance HasLimit PF.Find where setLimit a lmt = a {PF.limit = Just lmt }
instance HasLimit PU.Update where setLimit a lmt = a {PU.limit = Just lmt }
instance HasLimit PD.Delete where setLimit a lmt = a {PD.limit = Just lmt }
class HasOrder a where
setOrder :: a -> [PO.Order] -> a
setOrder' :: a -> String -> a
setOrder' a str = setOrder a $ parseOrderBy' $ s2bs str
instance HasOrder PF.Find where setOrder a ord = a {PF.order = Seq.fromList ord }
instance HasOrder PU.Update where setOrder a ord = a {PU.order = Seq.fromList ord }
instance HasOrder PD.Delete where setOrder a ord = a {PD.order = Seq.fromList ord }
^ DataModel
^
createInsert col model projs rows args = PB.defaultValue
`setCollection` col
`setDataModel` model
`setColumns` projs
`setTypedRow` rows
`setArgs` args
setColumns :: PI.Insert -> [PCol.Column] -> PI.Insert
setColumns inst clms = inst {PI.projection = Seq.fromList clms}
setTypedRow :: PI.Insert -> [PITR.TypedRow] -> PI.Insert
setTypedRow inst rows = inst {PI.row = Seq.fromList rows}
| Set typed rows to a Insert record from Exprs .
setTypedRow' :: PI.Insert -> [PEx.Expr] -> PI.Insert
setTypedRow' inst exprs = inst {PI.row = Seq.fromList [mkExpr2TypedRow exprs]}
^ DataModel
createDelete col model criteria args lmt orders = PB.defaultValue
`setCollection` col
`setDataModel` model
^ DataModel
createUpdate col model criteria args lmt orders upOpes = PB.defaultValue
`setCollection` col
`setDataModel` model
`setCriteria` criteria
`setArgs` args
`setLimit` lmt
`setOrder` orders
setOperation:: PU.Update -> [PUO.UpdateOperation] -> PU.Update
setOperation up upOpe = up {PU.operation = Seq.fromList upOpe}
^ DataModel
createFind col model projs criteria args lmt orders grouping gCriteria = PB.defaultValue
`setCollection` col
`setDataModel` model
Seq Projection
Maybe
Seq Scalar
Maybe
setFields :: PF.Find -> [PP.Projection] -> PF.Find
setFields find proj = find {PF.projection = Seq.fromList proj }
setFields' :: PF.Find -> String -> PF.Find
setFields' find proj = find {PF.projection = Seq.fromList $ parseProjection' $ s2bs proj }
setGrouping :: PF.Find -> [PEx.Expr] -> PF.Find
setGrouping find group = find {PF.grouping = Seq.fromList group }
setGroupingCriteria :: PF.Find -> PEx.Expr -> PF.Find
setGroupingCriteria find criteria = find {PF.grouping_criteria = Just criteria }
modify :: (PBT.TextMsg msg
,PBR.ReflectDescriptor msg
,PBW.Wire msg
,Show msg
,Typeable msg
,MonadIO m
,MonadThrow m) => msg -> NodeSession -> m W.Word64
modify obj nodeSess = do
runReaderT (writeMessageR obj) nodeSess
if fst x == s_error then do
msg <- getError $ snd x
throwM $ XProtocolError msg
else do
ssc <- getPayloadSessionStateChanged frm
getRowsAffected ssc
delete :: (MonadIO m, MonadThrow m) => PD.Delete -> NodeSession -> m W.Word64
delete = modify
update :: (MonadIO m, MonadThrow m) => PU.Update -> NodeSession -> m W.Word64
update = modify
insert :: (MonadIO m, MonadThrow m) => PI.Insert -> NodeSession -> m W.Word64
insert = modify
TODO selectと共通化 , エラーハンドリング
find fd nodeSess = do
runReaderT (writeMessageR fd) nodeSess
ret <- runReaderT readMessagesR nodeSess
debug ret
m ( [ m , _ ) - > m ( [ ColumnMetaData ] , _ ) - > m ( Seq ColumnMetaData , _ )
collect ColumnMetaData and Row , throw away others
where f = \(t, b) (meta, rows) ->
if t == s_resultset_column_meta_data then
(getColumnMetaData b : meta , rows )
else if t == s_resultset_row then
(meta , getRow b : rows)
else
(meta , rows )
tupleLfmap f (a,b) = (f a, b)
tupleRfmap = fmap
Map String Scalar - > [ String ] - > Seq . Seq
type BindMap = Map.Map String PS.Scalar
type BindList = [String]
emptyBindMap :: BindMap
emptyBindMap = Map.empty
bind :: String -> PS.Scalar -> BindMap -> BindMap
bind key val map = Map.insert key val map
ex : [ ( " a " , XM.scalar " aaa " ) , ( " b " , XM.scalar 1 ) , ( " c " , XM.scalar 1.2 ) ]
bindParams :: [(String, PS.Scalar)] -> BindMap
bindParams entries = foldr (\(key, val) accMap -> bind key val accMap) Map.empty entries
bindParams ' : : ( XM.Scalarable a ) = > [ ( String , a ) ] - > BindMap
bindParams ' entries = foldr ( , ) accMap - > bind key ( XM.scalar val ) accMap ) Map.empty entries
> > bindParams ' [ ( " a " , 1 ) , ( " b " , True ) ]
< interactive>:302:20 : error :
• No instance for ( ) arising from the literal ‘ 1 ’
• In the expression : 1
In the expression : ( " a " , 1 )
In the first argument of ‘ bindParams ' ’ , namely
‘ [ ( " a " , 1 ) , ( " b " , True ) ] ’
> >
bindParams' :: (XM.Scalarable a) => [(String, a)] -> BindMap
bindParams' entries = foldr (\(key, val) accMap -> bind key (XM.scalar val) accMap) Map.empty entries
>> bindParams' [("a", 1), ("b", True)]
<interactive>:302:20: error:
• No instance for (Num Bool) arising from the literal ‘1’
• In the expression: 1
In the expression: ("a", 1)
In the first argument of ‘bindParams'’, namely
‘[("a", 1), ("b", True)]’
>>
-}
bindMap2Seq :: BindMap -> BindList -> Seq.Seq PS.Scalar
bindMap2Seq map list = foldl (\acc item -> (Maybe.fromJust $ Map.lookup item map) Seq.<| acc) Seq.empty list
bindMap2Seq' :: BindMap -> BindList -> [PS.Scalar]
bindMap2Seq' map list = foldl (\acc item -> (Maybe.fromJust $ Map.lookup item map) : acc) [] list
|
f729928473f9919ca25dc5867c6cd34d9c2651e91b10626b204327f92101e608 | janegca/htdp2e | Exercise-275-find-name.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname Exercise-275-find-name) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
Exercise 275 .
;
; Define find-name. The function consumes a name and a list of names. It
retrieves the first name on the latter that is equal to , or an extension of ,
; the former.
;
; Define a function that ensures that no name on some list of names exceeds
some given width . Compare with exercise 236 .
(require 2htdp/abstraction)
; String [List-of String] -> Boolean
(check-expect (find-name "John" '()) false)
(check-expect (find-name "John" (list "JohnLennon")) "JohnLennon")
(check-expect (find-name "Paul" (list "John" "Paul" "Ringo" "George")) "Paul")
(define (find-name name los)
(for/or ([str los]) (if (string-contains? name str) str #false)))
; Number [List-of String] -> Boolean
; returns true if any string exceeds the given width (w)
(check-expect (any-exceed-width? 2 '()) false)
(check-expect (any-exceed-width? 3 (list "cat" "hat" "mine" "dog")) true)
(define (any-exceed-width? w los)
(for/or ([name los]) (> (string-length name) w))) | null | https://raw.githubusercontent.com/janegca/htdp2e/2d50378135edc2b8b1816204021f8763f8b2707b/03-Intermezzo-Scope/Exercise-275-find-name.rkt | racket | about the language level of this file in a form that our tools can easily process.
Define find-name. The function consumes a name and a list of names. It
the former.
Define a function that ensures that no name on some list of names exceeds
String [List-of String] -> Boolean
Number [List-of String] -> Boolean
returns true if any string exceeds the given width (w) | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname Exercise-275-find-name) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
Exercise 275 .
retrieves the first name on the latter that is equal to , or an extension of ,
some given width . Compare with exercise 236 .
(require 2htdp/abstraction)
(check-expect (find-name "John" '()) false)
(check-expect (find-name "John" (list "JohnLennon")) "JohnLennon")
(check-expect (find-name "Paul" (list "John" "Paul" "Ringo" "George")) "Paul")
(define (find-name name los)
(for/or ([str los]) (if (string-contains? name str) str #false)))
(check-expect (any-exceed-width? 2 '()) false)
(check-expect (any-exceed-width? 3 (list "cat" "hat" "mine" "dog")) true)
(define (any-exceed-width? w los)
(for/or ([name los]) (> (string-length name) w))) |
3e0545444190665eb4d42160d861425f5398c855fe59ec0309ceba17ce234495 | igorhvr/bedlam | object.scm | " object.scm " Macroless Object System
;;; Author: Wade Humeniuk <>
;;;
;;; This code is in the public domain.
Date : February 15 , 1994
;; Object Construction:
0 1 2 3 4
;; #(object-tag get-method make-method! unmake-method! get-all-methods)
(define object:tag "object")
;;; This might be better done using COMLIST:DELETE-IF.
(define (object:removeq obj alist)
(if (null? alist)
alist
(if (eq? (caar alist) obj)
(cdr alist)
(cons (car alist) (object:removeq obj (cdr alist))))))
(define (get-all-methods obj)
(if (object? obj)
((vector-ref obj 4))
(slib:error "Cannot get methods on non-object: " obj)))
;@
(define (object? obj)
(and (vector? obj)
(eq? object:tag (vector-ref obj 0))))
;@
(define (make-method! obj generic-method method)
(if (object? obj)
(if (procedure? method)
(begin
((vector-ref obj 2) generic-method method)
method)
(slib:error "Method must be a procedure: " method))
(slib:error "Cannot make method on non-object: " obj)))
;@
(define (get-method obj generic-method)
(if (object? obj)
((vector-ref obj 1) generic-method)
(slib:error "Cannot get method on non-object: " obj)))
;@
(define (unmake-method! obj generic-method)
(if (object? obj)
((vector-ref obj 3) generic-method)
(slib:error "Cannot unmake method on non-object: " obj)))
;@
(define (make-predicate! obj generic-predicate)
(if (object? obj)
((vector-ref obj 2) generic-predicate (lambda (self) #t))
(slib:error "Cannot make predicate on non-object: " obj)))
;@
(define (make-generic-method . exception-procedure)
(define generic-method
(lambda (obj . operands)
(if (object? obj)
(let ((object-method ((vector-ref obj 1) generic-method)))
(if object-method
(apply object-method (cons obj operands))
(slib:error "Method not supported: " obj)))
(apply exception-procedure (cons obj operands)))))
(if (not (null? exception-procedure))
(if (procedure? (car exception-procedure))
(set! exception-procedure (car exception-procedure))
(slib:error "Exception Handler Not Procedure:"))
(set! exception-procedure
(lambda (obj . params)
(slib:error "Operation not supported: " obj))))
generic-method)
;@
(define (make-generic-predicate)
(define generic-predicate
(lambda (obj)
(if (object? obj)
(if ((vector-ref obj 1) generic-predicate)
#t
#f)
#f)))
generic-predicate)
;@
(define (make-object . ancestors)
(define method-list
(apply append (map (lambda (obj) (get-all-methods obj)) ancestors)))
(define (make-method! generic-method method)
(set! method-list (cons (cons generic-method method) method-list))
method)
(define (unmake-method! generic-method)
(set! method-list (object:removeq generic-method method-list))
#t)
(define (all-methods) method-list)
(define (get-method generic-method)
(let ((method-def (assq generic-method method-list)))
(if method-def (cdr method-def) #f)))
(vector object:tag get-method make-method! unmake-method! all-methods))
| null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/iasylum/slib/3b2/object.scm | scheme | Author: Wade Humeniuk <>
This code is in the public domain.
Object Construction:
#(object-tag get-method make-method! unmake-method! get-all-methods)
This might be better done using COMLIST:DELETE-IF.
@
@
@
@
@
@
@
@ | " object.scm " Macroless Object System
Date : February 15 , 1994
0 1 2 3 4
(define object:tag "object")
(define (object:removeq obj alist)
(if (null? alist)
alist
(if (eq? (caar alist) obj)
(cdr alist)
(cons (car alist) (object:removeq obj (cdr alist))))))
(define (get-all-methods obj)
(if (object? obj)
((vector-ref obj 4))
(slib:error "Cannot get methods on non-object: " obj)))
(define (object? obj)
(and (vector? obj)
(eq? object:tag (vector-ref obj 0))))
(define (make-method! obj generic-method method)
(if (object? obj)
(if (procedure? method)
(begin
((vector-ref obj 2) generic-method method)
method)
(slib:error "Method must be a procedure: " method))
(slib:error "Cannot make method on non-object: " obj)))
(define (get-method obj generic-method)
(if (object? obj)
((vector-ref obj 1) generic-method)
(slib:error "Cannot get method on non-object: " obj)))
(define (unmake-method! obj generic-method)
(if (object? obj)
((vector-ref obj 3) generic-method)
(slib:error "Cannot unmake method on non-object: " obj)))
(define (make-predicate! obj generic-predicate)
(if (object? obj)
((vector-ref obj 2) generic-predicate (lambda (self) #t))
(slib:error "Cannot make predicate on non-object: " obj)))
(define (make-generic-method . exception-procedure)
(define generic-method
(lambda (obj . operands)
(if (object? obj)
(let ((object-method ((vector-ref obj 1) generic-method)))
(if object-method
(apply object-method (cons obj operands))
(slib:error "Method not supported: " obj)))
(apply exception-procedure (cons obj operands)))))
(if (not (null? exception-procedure))
(if (procedure? (car exception-procedure))
(set! exception-procedure (car exception-procedure))
(slib:error "Exception Handler Not Procedure:"))
(set! exception-procedure
(lambda (obj . params)
(slib:error "Operation not supported: " obj))))
generic-method)
(define (make-generic-predicate)
(define generic-predicate
(lambda (obj)
(if (object? obj)
(if ((vector-ref obj 1) generic-predicate)
#t
#f)
#f)))
generic-predicate)
(define (make-object . ancestors)
(define method-list
(apply append (map (lambda (obj) (get-all-methods obj)) ancestors)))
(define (make-method! generic-method method)
(set! method-list (cons (cons generic-method method) method-list))
method)
(define (unmake-method! generic-method)
(set! method-list (object:removeq generic-method method-list))
#t)
(define (all-methods) method-list)
(define (get-method generic-method)
(let ((method-def (assq generic-method method-list)))
(if method-def (cdr method-def) #f)))
(vector object:tag get-method make-method! unmake-method! all-methods))
|
e7d0bce08897ca0a003932d1a1ee1b99bea86f2b41e0934da549b045d623f528 | uwplse/coq-plugin-lib | modutils.mli | open Names
open Declarations
(* --- Modules --- *)
(*
* Pull any functor parameters off the module signature, returning the list of
* functor parameters and the list of module elements (i.e., fields).
*)
val decompose_module_signature : module_signature -> (Names.MBId.t * module_type_body) list * structure_body
(*
* Declare an interactive (i.e., elementwise) module structure, with the
* functional argument called to populate the module elements by declaration.
*
* The optional argument specifies functor parameters.
*)
val declare_module_structure : ?params:(Constrexpr.module_ast Declaremods.module_params) -> Id.t -> (ModPath.t -> unit) -> ModPath.t
(*
* Fold over the constant/inductive definitions within a module structure,
* skipping any module (type) components and any unsupported (e.g., mutual)
* inductive definitions.
*
* Elimination schemes (e.g., `Ind_rect`) are filtered out from the definitions.
*)
val fold_module_structure_by_decl : 'a -> ('a -> Constant.t -> constant_body -> 'a) -> ('a -> inductive -> Inductive.mind_specif -> 'a) -> module_body -> 'a
(*
* Same as `fold_module_structure_by_decl` except a single step function
* accepting a global reference.
*)
val fold_module_structure_by_glob : 'a -> ('a -> global_reference -> 'a) -> module_body -> 'a
(*
* Same as `fold_module_structure_by_glob` except an implicit unit accumulator.
*)
val iter_module_structure_by_glob : (global_reference -> unit) -> module_body -> unit
| null | https://raw.githubusercontent.com/uwplse/coq-plugin-lib/cbfde621598d26f365d195aa47207d2407ccc263/src/coq/logicutils/contexts/modutils.mli | ocaml | --- Modules ---
* Pull any functor parameters off the module signature, returning the list of
* functor parameters and the list of module elements (i.e., fields).
* Declare an interactive (i.e., elementwise) module structure, with the
* functional argument called to populate the module elements by declaration.
*
* The optional argument specifies functor parameters.
* Fold over the constant/inductive definitions within a module structure,
* skipping any module (type) components and any unsupported (e.g., mutual)
* inductive definitions.
*
* Elimination schemes (e.g., `Ind_rect`) are filtered out from the definitions.
* Same as `fold_module_structure_by_decl` except a single step function
* accepting a global reference.
* Same as `fold_module_structure_by_glob` except an implicit unit accumulator.
| open Names
open Declarations
val decompose_module_signature : module_signature -> (Names.MBId.t * module_type_body) list * structure_body
val declare_module_structure : ?params:(Constrexpr.module_ast Declaremods.module_params) -> Id.t -> (ModPath.t -> unit) -> ModPath.t
val fold_module_structure_by_decl : 'a -> ('a -> Constant.t -> constant_body -> 'a) -> ('a -> inductive -> Inductive.mind_specif -> 'a) -> module_body -> 'a
val fold_module_structure_by_glob : 'a -> ('a -> global_reference -> 'a) -> module_body -> 'a
val iter_module_structure_by_glob : (global_reference -> unit) -> module_body -> unit
|
99250a401e7a08d4f53ab73e877d03a445e2aafb1132234e3348fa13aaf1029b | hipsleek/hipsleek | ptranal.ml |
*
* Copyright ( c ) 2001 - 2002 ,
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2002,
* John Kodumal <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
exception Bad_return
exception Bad_function
open Cil
module H = Hashtbl
module A = Olf
exception UnknownLocation = A.UnknownLocation
type access = A.lvalue * bool
type access_map = (lval, access) H.t
* a mapping from varinfo 's back to fundecs
module VarInfoKey =
struct
type t = varinfo
let compare v1 v2 = v1.vid - v2.vid
end
module F = Map.Make (VarInfoKey)
(***********************************************************************)
(* *)
(* Global Variables *)
(* *)
(***********************************************************************)
let model_strings = ref false
let print_constraints = A.print_constraints
let debug_constraints = A.debug_constraints
let debug_aliases = A.debug_aliases
let smart_aliases = A.smart_aliases
let debug = A.debug
let analyze_mono = A.analyze_mono
let no_flow = A.no_flow
let no_sub = A.no_sub
let fun_ptrs_as_funs = ref false
let show_progress = ref false
let debug_may_aliases = ref false
let found_undefined = ref false
let conservative_undefineds = ref false
let current_fundec : fundec option ref = ref None
let fun_access_map : (fundec, access_map) H.t = H.create 64
A mapping from varinfos to fundecs
let fun_varinfo_map = ref F.empty
let current_ret : A.tau option ref = ref None
let lvalue_hash : (varinfo,A.lvalue) H.t = H.create 64
let expressions : (exp,A.tau) H.t = H.create 64
let lvalues : (lval,A.lvalue) H.t = H.create 64
let fresh_index : (unit -> int) =
let count = ref 0 in
fun () ->
incr count;
!count
let alloc_names = [
"malloc";
"calloc";
"realloc";
"xmalloc";
"__builtin_alloca";
"alloca";
"kmalloc"
]
(* This function should be set by the client if it
* knows of functions returning a result that have
* no side effects. If the result is not used, then
* the call will be eliminated. *)
let callHasNoSideEffects : (exp -> bool) ref =
ref (fun _ -> false)
let all_globals : varinfo list ref = ref []
let all_functions : fundec list ref = ref []
(***********************************************************************)
(* *)
(* Utility Functions *)
(* *)
(***********************************************************************)
let is_undefined_fun = function
Lval (lh, o) ->
if isFunctionType (typeOfLval (lh, o)) then
match lh with
Var v -> v.vstorage = Extern
| _ -> false
else false
| _ -> false
let is_alloc_fun = function
Lval (lh, o) ->
if isFunctionType (typeOfLval (lh, o)) then
match lh with
Var v -> List.mem v.vname alloc_names
| _ -> false
else false
| _ -> false
let next_alloc = function
Lval (Var v, o) ->
let name = Printf.sprintf "%s@%d" v.vname (fresh_index ())
in
A.address (A.make_lvalue false name (Some v)) (* check *)
| _ -> raise Bad_return
let is_effect_free_fun = function
Lval (lh, o) when isFunctionType (typeOfLval (lh, o)) ->
begin
match lh with
Var v ->
begin
try ("CHECK_" = String.sub v.vname 0 6 ||
!callHasNoSideEffects (Lval(lh,o)))
with Invalid_argument _ -> false
end
| _ -> false
end
| _ -> false
(***********************************************************************)
(* *)
AST Traversal Functions
(* *)
(***********************************************************************)
(* should do nothing, might need to worry about Index case *)
let analyzeOffset ( o : offset ) : A.tau = A.bottom ( )
let analyze_var_decl (v : varinfo ) : A.lvalue =
try H.find lvalue_hash v
with Not_found ->
let lv = A.make_lvalue false v.vname (Some v)
in
H.add lvalue_hash v lv;
lv
let isFunPtrType (t : typ) : bool =
match t with
TPtr (t, _) -> isFunctionType t
| _ -> false
let rec analyze_lval (lv : lval ) : A.lvalue =
let find_access (l : A.lvalue) (is_var : bool) : A.lvalue =
match !current_fundec with
None -> l
| Some f ->
let accesses = H.find fun_access_map f in
if H.mem accesses lv then l
else
begin
H.add accesses lv (l, is_var);
l
end in
let result =
match lv with
| Var v, _, _ -> (* instantiate every syntactic occurrence of a function *)
let alv =
if isFunctionType (typeOfLval lv) then
A.instantiate (analyze_var_decl v) (fresh_index ())
else analyze_var_decl v
in
find_access alv true
| Mem e, _, _ ->
(* assert (not (isFunctionType(typeOf(e))) ); *)
let alv =
if !fun_ptrs_as_funs && isFunPtrType (typeOf e) then
analyze_expr_as_lval e
else A.deref (analyze_expr e)
in
find_access alv false
in
H.replace lvalues lv result;
result
and analyze_expr_as_lval (e : exp) : A.lvalue =
match e with
Lval l -> analyze_lval l
| _ -> assert false (* todo -- other kinds of expressions? *)
and analyze_expr (e : exp ) : A.tau =
let result =
match e with
Const (CStr s) ->
if !model_strings then
A.address (A.make_lvalue
false
s
(Some (makeVarinfo false s charConstPtrType)))
else A.bottom ()
| Const c -> A.bottom ()
| Lval l -> A.rvalue (analyze_lval l)
| SizeOf _ -> A.bottom ()
| SizeOfStr _ -> A.bottom ()
| AlignOf _ -> A.bottom ()
| UnOp (op, e, t) -> analyze_expr e
| BinOp (op, e, e', t) -> A.join (analyze_expr e) (analyze_expr e')
| Question (_, e, e', _) -> A.join (analyze_expr e) (analyze_expr e')
| CastE (t, e) -> analyze_expr e
| AddrOf l ->
if !fun_ptrs_as_funs && isFunctionType (typeOfLval l) then
A.rvalue (analyze_lval l)
else A.address (analyze_lval l)
| StartOf l -> A.address (analyze_lval l)
| AlignOfE _ -> A.bottom ()
| SizeOfE _ -> A.bottom ()
in
H.add expressions e result;
result
(* check *)
let rec analyze_init (i : init ) : A.tau =
match i with
SingleInit e -> analyze_expr e
| CompoundInit (t, oi) ->
A.join_inits (Util.list_map (function (_, i) -> analyze_init i) oi)
let analyze_instr (i : instr ) : unit =
match i with
Set (lval, rhs, l) ->
A.assign (analyze_lval lval) (analyze_expr rhs)
| Call (res, fexpr, actuals, l) ->
if not (isFunctionType (typeOf fexpr)) then
() (* todo : is this a varargs? *)
else if is_alloc_fun fexpr then
begin
if !debug then print_string "Found allocation function...\n";
match res with
Some r -> A.assign (analyze_lval r) (next_alloc fexpr)
| None -> ()
end
else if is_effect_free_fun fexpr then
List.iter (fun e -> ignore (analyze_expr e)) actuals
else (* todo : check to see if the thing is an undefined function *)
let fnres, site =
if is_undefined_fun fexpr & !conservative_undefineds then
A.apply_undefined (Util.list_map analyze_expr actuals)
else
A.apply (analyze_expr fexpr) (Util.list_map analyze_expr actuals)
in
begin
match res with
Some r ->
begin
A.assign_ret site (analyze_lval r) fnres;
found_undefined := true;
end
| None -> ()
end
| Asm _ -> ()
let rec analyze_stmt (s : stmt ) : unit =
match s.skind with
Instr il -> List.iter analyze_instr il
| Return (eo, l) ->
begin
match eo with
Some e ->
begin
match !current_ret with
Some ret -> A.return ret (analyze_expr e)
| None -> raise Bad_return
end
| None -> ()
end
| Goto (s', l) -> () (* analyze_stmt(!s') *)
| If (e, b, b', l) ->
(* ignore the expression e; expressions can't be side-effecting *)
analyze_block b;
analyze_block b'
| Switch (e, b, sl, l) ->
analyze_block b;
List.iter analyze_stmt sl
| Loop (b, _, l, _, _) -> analyze_block b
| Block b -> analyze_block b
| TryFinally (b, h, _) ->
analyze_block b;
analyze_block h
| TryExcept (b, (il, _), h, _) ->
analyze_block b;
List.iter analyze_instr il;
analyze_block h
| Break l -> ()
| Continue l -> ()
| HipStmt _ -> ()
and analyze_block (b : block ) : unit =
List.iter analyze_stmt b.bstmts
let analyze_function (f : fundec ) : unit =
let oldlv = analyze_var_decl f.svar in
let ret = A.make_fresh (f.svar.vname ^ "_ret") in
let formals = Util.list_map analyze_var_decl f.sformals in
let newf = A.make_function f.svar.vname formals ret in
if !show_progress then
Printf.printf "Analyzing function %s\n" f.svar.vname;
fun_varinfo_map := F.add f.svar f (!fun_varinfo_map);
current_fundec := Some f;
H.add fun_access_map f (H.create 8);
A.assign oldlv newf;
current_ret := Some ret;
analyze_block f.sbody
let analyze_global (g : global ) : unit =
match g with
GVarDecl (v, l) -> () (* ignore (analyze_var_decl(v)) -- no need *)
| GVar (v, init, l) ->
all_globals := v :: !all_globals;
begin
match init.init with
Some i -> A.assign (analyze_var_decl v) (analyze_init i)
| None -> ignore (analyze_var_decl v)
end
| GFun (f, l) ->
all_functions := f :: !all_functions;
analyze_function f
| _ -> ()
let analyze_file (f : file) : unit =
iterGlobals f analyze_global
(***********************************************************************)
(* *)
(* High-level Query Interface *)
(* *)
(***********************************************************************)
(* Same as analyze_expr, but no constraints. *)
let rec traverse_expr (e : exp) : A.tau =
H.find expressions e
and traverse_expr_as_lval (e : exp) : A.lvalue =
match e with
| Lval l -> traverse_lval l
| _ -> assert false (* todo -- other kinds of expressions? *)
and traverse_lval (lv : lval ) : A.lvalue =
H.find lvalues lv
let may_alias (e1 : exp) (e2 : exp) : bool =
let tau1,tau2 = traverse_expr e1, traverse_expr e2 in
let result = A.may_alias tau1 tau2 in
if !debug_may_aliases then
begin
let doc1 = d_exp () e1 in
let doc2 = d_exp () e2 in
let s1 = Pretty.sprint ~width:30 doc1 in
let s2 = Pretty.sprint ~width:30 doc2 in
Printf.printf
"%s and %s may alias? %s\n"
s1
s2
(if result then "yes" else "no")
end;
result
let resolve_lval (lv : lval) : varinfo list =
A.points_to (traverse_lval lv)
let resolve_exp (e : exp) : varinfo list =
A.epoints_to (traverse_expr e)
let resolve_funptr (e : exp) : fundec list =
let varinfos = A.epoints_to (traverse_expr e) in
List.fold_left
(fun fdecs -> fun vinf ->
try F.find vinf !fun_varinfo_map :: fdecs
with Not_found -> fdecs)
[]
varinfos
let count_hash_elts h =
let result = ref 0 in
H.iter (fun _ -> fun _ -> incr result) lvalue_hash;
!result
let compute_may_aliases (b : bool) : unit =
let rec compute_may_aliases_aux (exps : exp list) =
match exps with
[] -> ()
| h :: t ->
ignore (Util.list_map (may_alias h) t);
compute_may_aliases_aux t
and exprs : exp list ref = ref [] in
H.iter (fun e -> fun _ -> exprs := e :: !exprs) expressions;
compute_may_aliases_aux !exprs
let compute_results (show_sets : bool) : unit =
let total_pointed_to = ref 0
and total_lvalues = H.length lvalue_hash
and counted_lvalues = ref 0
and lval_elts : (string * (string list)) list ref = ref [] in
let print_result (name, set) =
let rec print_set s =
match s with
[] -> ()
| h :: [] -> print_string h
| h :: t ->
print_string (h ^ ", ");
print_set t
and ptsize = List.length set in
total_pointed_to := !total_pointed_to + ptsize;
if ptsize > 0 then
begin
print_string (name ^ "(" ^ (string_of_int ptsize) ^ ") -> ");
print_set set;
print_newline ()
end
in
(* Make the most pessimistic assumptions about globals if an
undefined function is present. Such a function can write to every
global variable *)
let hose_globals () : unit =
List.iter
(fun vd -> A.assign_undefined (analyze_var_decl vd))
!all_globals
in
let show_progress_fn (counted : int ref) (total : int) : unit =
incr counted;
if !show_progress then
Printf.printf "Computed flow for %d of %d sets\n" !counted total
in
if !conservative_undefineds && !found_undefined then hose_globals ();
A.finished_constraints ();
if show_sets then
begin
print_endline "Computing points-to sets...";
Hashtbl.iter
(fun vinf -> fun lv ->
show_progress_fn counted_lvalues total_lvalues;
try lval_elts := (vinf.vname, A.points_to_names lv) :: !lval_elts
with A.UnknownLocation -> ())
lvalue_hash;
List.iter print_result !lval_elts;
Printf.printf
"Total number of things pointed to: %d\n"
!total_pointed_to
end;
if !debug_may_aliases then
begin
Printf.printf "Printing may alias relationships\n";
compute_may_aliases true
end
let print_types () : unit =
print_string "Printing inferred types of lvalues...\n";
Hashtbl.iter
(fun vi -> fun lv ->
Printf.printf "%s : %s\n" vi.vname (A.string_of_lvalue lv))
lvalue_hash
(** Alias queries. For each function, gather sets of locals, formals, and
globals. Do n^2 work for each of these functions, reporting whether or not
each pair of values is aliased. Aliasing is determined by taking points-to
set intersections.
*)
let compute_aliases = compute_may_aliases
(***********************************************************************)
(* *)
(* Abstract Location Interface *)
(* *)
(***********************************************************************)
type absloc = A.absloc
let rec lvalue_of_varinfo (vi : varinfo) : A.lvalue =
H.find lvalue_hash vi
let lvalue_of_lval = traverse_lval
let tau_of_expr = traverse_expr
(** return an abstract location for a varinfo, resp. lval *)
let absloc_of_varinfo vi =
A.absloc_of_lvalue (lvalue_of_varinfo vi)
let absloc_of_lval lv =
A.absloc_of_lvalue (lvalue_of_lval lv)
let absloc_e_points_to e =
A.absloc_epoints_to (tau_of_expr e)
let absloc_lval_aliases lv =
A.absloc_points_to (lvalue_of_lval lv)
all abslocs that e transitively points to
let absloc_e_transitive_points_to (e : Cil.exp) : absloc list =
let rec lv_trans_ptsto (worklist : varinfo list) (acc : varinfo list) : absloc list =
match worklist with
[] -> Util.list_map absloc_of_varinfo acc
| vi :: wklst'' ->
if List.mem vi acc then lv_trans_ptsto wklst'' acc
else
lv_trans_ptsto
(List.rev_append
(A.points_to (lvalue_of_varinfo vi))
wklst'')
(vi :: acc)
in
lv_trans_ptsto (A.epoints_to (tau_of_expr e)) []
let absloc_eq a b = A.absloc_eq (a, b)
let d_absloc: unit -> absloc -> Pretty.doc = A.d_absloc
let ptrAnalysis = ref false
let ptrResults = ref false
let ptrTypes = ref false
(** Turn this into a CIL feature *)
let feature : featureDescr = {
fd_name = "ptranal";
fd_enabled = ptrAnalysis;
fd_description = "alias analysis";
fd_extraopt = [
("--ptr_may_aliases",
Arg.Unit (fun _ -> debug_may_aliases := true),
" Print out results of may alias queries");
("--ptr_unify", Arg.Unit (fun _ -> no_sub := true),
" Make the alias analysis unification-based");
("--ptr_model_strings", Arg.Unit (fun _ -> model_strings := true),
" Make the alias analysis model string constants");
("--ptr_conservative",
Arg.Unit (fun _ -> conservative_undefineds := true),
" Treat undefineds conservatively in alias analysis");
("--ptr_results", Arg.Unit (fun _ -> ptrResults := true),
" print the results of the alias analysis");
("--ptr_mono", Arg.Unit (fun _ -> analyze_mono := true),
" run alias analysis monomorphically");
("--ptr_types",Arg.Unit (fun _ -> ptrTypes := true),
" print inferred points-to analysis types")
];
fd_doit = (function (f: file) ->
analyze_file f;
compute_results !ptrResults;
if !ptrTypes then print_types ());
fd_post_check = false (* No changes *)
}
| null | https://raw.githubusercontent.com/hipsleek/hipsleek/596f7fa7f67444c8309da2ca86ba4c47d376618c/cil/src/ext/pta/ptranal.ml | ocaml | *********************************************************************
Global Variables
*********************************************************************
This function should be set by the client if it
* knows of functions returning a result that have
* no side effects. If the result is not used, then
* the call will be eliminated.
*********************************************************************
Utility Functions
*********************************************************************
check
*********************************************************************
*********************************************************************
should do nothing, might need to worry about Index case
instantiate every syntactic occurrence of a function
assert (not (isFunctionType(typeOf(e))) );
todo -- other kinds of expressions?
check
todo : is this a varargs?
todo : check to see if the thing is an undefined function
analyze_stmt(!s')
ignore the expression e; expressions can't be side-effecting
ignore (analyze_var_decl(v)) -- no need
*********************************************************************
High-level Query Interface
*********************************************************************
Same as analyze_expr, but no constraints.
todo -- other kinds of expressions?
Make the most pessimistic assumptions about globals if an
undefined function is present. Such a function can write to every
global variable
* Alias queries. For each function, gather sets of locals, formals, and
globals. Do n^2 work for each of these functions, reporting whether or not
each pair of values is aliased. Aliasing is determined by taking points-to
set intersections.
*********************************************************************
Abstract Location Interface
*********************************************************************
* return an abstract location for a varinfo, resp. lval
* Turn this into a CIL feature
No changes |
*
* Copyright ( c ) 2001 - 2002 ,
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2002,
* John Kodumal <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
exception Bad_return
exception Bad_function
open Cil
module H = Hashtbl
module A = Olf
exception UnknownLocation = A.UnknownLocation
type access = A.lvalue * bool
type access_map = (lval, access) H.t
* a mapping from varinfo 's back to fundecs
module VarInfoKey =
struct
type t = varinfo
let compare v1 v2 = v1.vid - v2.vid
end
module F = Map.Make (VarInfoKey)
let model_strings = ref false
let print_constraints = A.print_constraints
let debug_constraints = A.debug_constraints
let debug_aliases = A.debug_aliases
let smart_aliases = A.smart_aliases
let debug = A.debug
let analyze_mono = A.analyze_mono
let no_flow = A.no_flow
let no_sub = A.no_sub
let fun_ptrs_as_funs = ref false
let show_progress = ref false
let debug_may_aliases = ref false
let found_undefined = ref false
let conservative_undefineds = ref false
let current_fundec : fundec option ref = ref None
let fun_access_map : (fundec, access_map) H.t = H.create 64
A mapping from varinfos to fundecs
let fun_varinfo_map = ref F.empty
let current_ret : A.tau option ref = ref None
let lvalue_hash : (varinfo,A.lvalue) H.t = H.create 64
let expressions : (exp,A.tau) H.t = H.create 64
let lvalues : (lval,A.lvalue) H.t = H.create 64
let fresh_index : (unit -> int) =
let count = ref 0 in
fun () ->
incr count;
!count
let alloc_names = [
"malloc";
"calloc";
"realloc";
"xmalloc";
"__builtin_alloca";
"alloca";
"kmalloc"
]
let callHasNoSideEffects : (exp -> bool) ref =
ref (fun _ -> false)
let all_globals : varinfo list ref = ref []
let all_functions : fundec list ref = ref []
let is_undefined_fun = function
Lval (lh, o) ->
if isFunctionType (typeOfLval (lh, o)) then
match lh with
Var v -> v.vstorage = Extern
| _ -> false
else false
| _ -> false
let is_alloc_fun = function
Lval (lh, o) ->
if isFunctionType (typeOfLval (lh, o)) then
match lh with
Var v -> List.mem v.vname alloc_names
| _ -> false
else false
| _ -> false
let next_alloc = function
Lval (Var v, o) ->
let name = Printf.sprintf "%s@%d" v.vname (fresh_index ())
in
| _ -> raise Bad_return
let is_effect_free_fun = function
Lval (lh, o) when isFunctionType (typeOfLval (lh, o)) ->
begin
match lh with
Var v ->
begin
try ("CHECK_" = String.sub v.vname 0 6 ||
!callHasNoSideEffects (Lval(lh,o)))
with Invalid_argument _ -> false
end
| _ -> false
end
| _ -> false
AST Traversal Functions
let analyzeOffset ( o : offset ) : A.tau = A.bottom ( )
let analyze_var_decl (v : varinfo ) : A.lvalue =
try H.find lvalue_hash v
with Not_found ->
let lv = A.make_lvalue false v.vname (Some v)
in
H.add lvalue_hash v lv;
lv
let isFunPtrType (t : typ) : bool =
match t with
TPtr (t, _) -> isFunctionType t
| _ -> false
let rec analyze_lval (lv : lval ) : A.lvalue =
let find_access (l : A.lvalue) (is_var : bool) : A.lvalue =
match !current_fundec with
None -> l
| Some f ->
let accesses = H.find fun_access_map f in
if H.mem accesses lv then l
else
begin
H.add accesses lv (l, is_var);
l
end in
let result =
match lv with
let alv =
if isFunctionType (typeOfLval lv) then
A.instantiate (analyze_var_decl v) (fresh_index ())
else analyze_var_decl v
in
find_access alv true
| Mem e, _, _ ->
let alv =
if !fun_ptrs_as_funs && isFunPtrType (typeOf e) then
analyze_expr_as_lval e
else A.deref (analyze_expr e)
in
find_access alv false
in
H.replace lvalues lv result;
result
and analyze_expr_as_lval (e : exp) : A.lvalue =
match e with
Lval l -> analyze_lval l
and analyze_expr (e : exp ) : A.tau =
let result =
match e with
Const (CStr s) ->
if !model_strings then
A.address (A.make_lvalue
false
s
(Some (makeVarinfo false s charConstPtrType)))
else A.bottom ()
| Const c -> A.bottom ()
| Lval l -> A.rvalue (analyze_lval l)
| SizeOf _ -> A.bottom ()
| SizeOfStr _ -> A.bottom ()
| AlignOf _ -> A.bottom ()
| UnOp (op, e, t) -> analyze_expr e
| BinOp (op, e, e', t) -> A.join (analyze_expr e) (analyze_expr e')
| Question (_, e, e', _) -> A.join (analyze_expr e) (analyze_expr e')
| CastE (t, e) -> analyze_expr e
| AddrOf l ->
if !fun_ptrs_as_funs && isFunctionType (typeOfLval l) then
A.rvalue (analyze_lval l)
else A.address (analyze_lval l)
| StartOf l -> A.address (analyze_lval l)
| AlignOfE _ -> A.bottom ()
| SizeOfE _ -> A.bottom ()
in
H.add expressions e result;
result
let rec analyze_init (i : init ) : A.tau =
match i with
SingleInit e -> analyze_expr e
| CompoundInit (t, oi) ->
A.join_inits (Util.list_map (function (_, i) -> analyze_init i) oi)
let analyze_instr (i : instr ) : unit =
match i with
Set (lval, rhs, l) ->
A.assign (analyze_lval lval) (analyze_expr rhs)
| Call (res, fexpr, actuals, l) ->
if not (isFunctionType (typeOf fexpr)) then
else if is_alloc_fun fexpr then
begin
if !debug then print_string "Found allocation function...\n";
match res with
Some r -> A.assign (analyze_lval r) (next_alloc fexpr)
| None -> ()
end
else if is_effect_free_fun fexpr then
List.iter (fun e -> ignore (analyze_expr e)) actuals
let fnres, site =
if is_undefined_fun fexpr & !conservative_undefineds then
A.apply_undefined (Util.list_map analyze_expr actuals)
else
A.apply (analyze_expr fexpr) (Util.list_map analyze_expr actuals)
in
begin
match res with
Some r ->
begin
A.assign_ret site (analyze_lval r) fnres;
found_undefined := true;
end
| None -> ()
end
| Asm _ -> ()
let rec analyze_stmt (s : stmt ) : unit =
match s.skind with
Instr il -> List.iter analyze_instr il
| Return (eo, l) ->
begin
match eo with
Some e ->
begin
match !current_ret with
Some ret -> A.return ret (analyze_expr e)
| None -> raise Bad_return
end
| None -> ()
end
| If (e, b, b', l) ->
analyze_block b;
analyze_block b'
| Switch (e, b, sl, l) ->
analyze_block b;
List.iter analyze_stmt sl
| Loop (b, _, l, _, _) -> analyze_block b
| Block b -> analyze_block b
| TryFinally (b, h, _) ->
analyze_block b;
analyze_block h
| TryExcept (b, (il, _), h, _) ->
analyze_block b;
List.iter analyze_instr il;
analyze_block h
| Break l -> ()
| Continue l -> ()
| HipStmt _ -> ()
and analyze_block (b : block ) : unit =
List.iter analyze_stmt b.bstmts
let analyze_function (f : fundec ) : unit =
let oldlv = analyze_var_decl f.svar in
let ret = A.make_fresh (f.svar.vname ^ "_ret") in
let formals = Util.list_map analyze_var_decl f.sformals in
let newf = A.make_function f.svar.vname formals ret in
if !show_progress then
Printf.printf "Analyzing function %s\n" f.svar.vname;
fun_varinfo_map := F.add f.svar f (!fun_varinfo_map);
current_fundec := Some f;
H.add fun_access_map f (H.create 8);
A.assign oldlv newf;
current_ret := Some ret;
analyze_block f.sbody
let analyze_global (g : global ) : unit =
match g with
| GVar (v, init, l) ->
all_globals := v :: !all_globals;
begin
match init.init with
Some i -> A.assign (analyze_var_decl v) (analyze_init i)
| None -> ignore (analyze_var_decl v)
end
| GFun (f, l) ->
all_functions := f :: !all_functions;
analyze_function f
| _ -> ()
let analyze_file (f : file) : unit =
iterGlobals f analyze_global
let rec traverse_expr (e : exp) : A.tau =
H.find expressions e
and traverse_expr_as_lval (e : exp) : A.lvalue =
match e with
| Lval l -> traverse_lval l
and traverse_lval (lv : lval ) : A.lvalue =
H.find lvalues lv
let may_alias (e1 : exp) (e2 : exp) : bool =
let tau1,tau2 = traverse_expr e1, traverse_expr e2 in
let result = A.may_alias tau1 tau2 in
if !debug_may_aliases then
begin
let doc1 = d_exp () e1 in
let doc2 = d_exp () e2 in
let s1 = Pretty.sprint ~width:30 doc1 in
let s2 = Pretty.sprint ~width:30 doc2 in
Printf.printf
"%s and %s may alias? %s\n"
s1
s2
(if result then "yes" else "no")
end;
result
let resolve_lval (lv : lval) : varinfo list =
A.points_to (traverse_lval lv)
let resolve_exp (e : exp) : varinfo list =
A.epoints_to (traverse_expr e)
let resolve_funptr (e : exp) : fundec list =
let varinfos = A.epoints_to (traverse_expr e) in
List.fold_left
(fun fdecs -> fun vinf ->
try F.find vinf !fun_varinfo_map :: fdecs
with Not_found -> fdecs)
[]
varinfos
let count_hash_elts h =
let result = ref 0 in
H.iter (fun _ -> fun _ -> incr result) lvalue_hash;
!result
let compute_may_aliases (b : bool) : unit =
let rec compute_may_aliases_aux (exps : exp list) =
match exps with
[] -> ()
| h :: t ->
ignore (Util.list_map (may_alias h) t);
compute_may_aliases_aux t
and exprs : exp list ref = ref [] in
H.iter (fun e -> fun _ -> exprs := e :: !exprs) expressions;
compute_may_aliases_aux !exprs
let compute_results (show_sets : bool) : unit =
let total_pointed_to = ref 0
and total_lvalues = H.length lvalue_hash
and counted_lvalues = ref 0
and lval_elts : (string * (string list)) list ref = ref [] in
let print_result (name, set) =
let rec print_set s =
match s with
[] -> ()
| h :: [] -> print_string h
| h :: t ->
print_string (h ^ ", ");
print_set t
and ptsize = List.length set in
total_pointed_to := !total_pointed_to + ptsize;
if ptsize > 0 then
begin
print_string (name ^ "(" ^ (string_of_int ptsize) ^ ") -> ");
print_set set;
print_newline ()
end
in
let hose_globals () : unit =
List.iter
(fun vd -> A.assign_undefined (analyze_var_decl vd))
!all_globals
in
let show_progress_fn (counted : int ref) (total : int) : unit =
incr counted;
if !show_progress then
Printf.printf "Computed flow for %d of %d sets\n" !counted total
in
if !conservative_undefineds && !found_undefined then hose_globals ();
A.finished_constraints ();
if show_sets then
begin
print_endline "Computing points-to sets...";
Hashtbl.iter
(fun vinf -> fun lv ->
show_progress_fn counted_lvalues total_lvalues;
try lval_elts := (vinf.vname, A.points_to_names lv) :: !lval_elts
with A.UnknownLocation -> ())
lvalue_hash;
List.iter print_result !lval_elts;
Printf.printf
"Total number of things pointed to: %d\n"
!total_pointed_to
end;
if !debug_may_aliases then
begin
Printf.printf "Printing may alias relationships\n";
compute_may_aliases true
end
let print_types () : unit =
print_string "Printing inferred types of lvalues...\n";
Hashtbl.iter
(fun vi -> fun lv ->
Printf.printf "%s : %s\n" vi.vname (A.string_of_lvalue lv))
lvalue_hash
let compute_aliases = compute_may_aliases
type absloc = A.absloc
let rec lvalue_of_varinfo (vi : varinfo) : A.lvalue =
H.find lvalue_hash vi
let lvalue_of_lval = traverse_lval
let tau_of_expr = traverse_expr
let absloc_of_varinfo vi =
A.absloc_of_lvalue (lvalue_of_varinfo vi)
let absloc_of_lval lv =
A.absloc_of_lvalue (lvalue_of_lval lv)
let absloc_e_points_to e =
A.absloc_epoints_to (tau_of_expr e)
let absloc_lval_aliases lv =
A.absloc_points_to (lvalue_of_lval lv)
all abslocs that e transitively points to
let absloc_e_transitive_points_to (e : Cil.exp) : absloc list =
let rec lv_trans_ptsto (worklist : varinfo list) (acc : varinfo list) : absloc list =
match worklist with
[] -> Util.list_map absloc_of_varinfo acc
| vi :: wklst'' ->
if List.mem vi acc then lv_trans_ptsto wklst'' acc
else
lv_trans_ptsto
(List.rev_append
(A.points_to (lvalue_of_varinfo vi))
wklst'')
(vi :: acc)
in
lv_trans_ptsto (A.epoints_to (tau_of_expr e)) []
let absloc_eq a b = A.absloc_eq (a, b)
let d_absloc: unit -> absloc -> Pretty.doc = A.d_absloc
let ptrAnalysis = ref false
let ptrResults = ref false
let ptrTypes = ref false
let feature : featureDescr = {
fd_name = "ptranal";
fd_enabled = ptrAnalysis;
fd_description = "alias analysis";
fd_extraopt = [
("--ptr_may_aliases",
Arg.Unit (fun _ -> debug_may_aliases := true),
" Print out results of may alias queries");
("--ptr_unify", Arg.Unit (fun _ -> no_sub := true),
" Make the alias analysis unification-based");
("--ptr_model_strings", Arg.Unit (fun _ -> model_strings := true),
" Make the alias analysis model string constants");
("--ptr_conservative",
Arg.Unit (fun _ -> conservative_undefineds := true),
" Treat undefineds conservatively in alias analysis");
("--ptr_results", Arg.Unit (fun _ -> ptrResults := true),
" print the results of the alias analysis");
("--ptr_mono", Arg.Unit (fun _ -> analyze_mono := true),
" run alias analysis monomorphically");
("--ptr_types",Arg.Unit (fun _ -> ptrTypes := true),
" print inferred points-to analysis types")
];
fd_doit = (function (f: file) ->
analyze_file f;
compute_results !ptrResults;
if !ptrTypes then print_types ());
}
|
1f530a13cb40ce8e49a0d3d22b37d3955fd323f081f819d276cdaf0a124a4039 | 97jaz/gregor | symbols.rkt | #lang racket/base
(require racket/match
cldr/core
cldr/dates-modern
cldr/numbers-modern
"../ast.rkt"
"../parse-state.rkt"
"trie.rkt")
(provide (all-defined-out))
(define (l10n-cal loc . path)
(cldr-ref (ca-gregorian loc) path))
(define (str-parse ast trie state update)
(define input (parse-state-input state))
(match (trie-longest-match trie (string->list input))
[(cons key val)
(update
(substring input (string-length val))
(parse-state-fields state)
(regexp-replace #rx"-alt-variant"
(symbol->string key)
""))]
[_
(parse-error ast state)]))
(define (sym-parse ast trie state update)
(str-parse ast trie state
(λ (in fs val)
(update in fs (string->symbol val)))))
(define (symnum-parse ast trie state update)
(str-parse ast trie state
(λ (in fs val)
(update in fs (string->number val)))))
| null | https://raw.githubusercontent.com/97jaz/gregor/91d71c6082fec4197aaf9ade57aceb148116c11c/gregor-lib/gregor/private/pattern/l10n/symbols.rkt | racket | #lang racket/base
(require racket/match
cldr/core
cldr/dates-modern
cldr/numbers-modern
"../ast.rkt"
"../parse-state.rkt"
"trie.rkt")
(provide (all-defined-out))
(define (l10n-cal loc . path)
(cldr-ref (ca-gregorian loc) path))
(define (str-parse ast trie state update)
(define input (parse-state-input state))
(match (trie-longest-match trie (string->list input))
[(cons key val)
(update
(substring input (string-length val))
(parse-state-fields state)
(regexp-replace #rx"-alt-variant"
(symbol->string key)
""))]
[_
(parse-error ast state)]))
(define (sym-parse ast trie state update)
(str-parse ast trie state
(λ (in fs val)
(update in fs (string->symbol val)))))
(define (symnum-parse ast trie state update)
(str-parse ast trie state
(λ (in fs val)
(update in fs (string->number val)))))
|
|
bd3d616fbc108973ea5b9d56c81c91981e5611279a2d45f06139fb3316831031 | gebi/jungerl | xmerl_validate.erl | The contents of this file are subject to the Erlang Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
%%% compliance with the License. You may obtain a copy of the License at
%%%
%%%
Software distributed under the License is distributed on an " AS IS "
%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%%% the License for the specific language governing rights and limitations
%%% under the License.
%%%
The Original Code is xmerl-0.19
%%%
The Initial Developer of the Original Code is Ericsson Telecom
AB . Portions created by Ericsson are Copyright ( C ) , 1998 , Ericsson
Telecom AB . All Rights Reserved .
%%%
%%%
%%%----------------------------------------------------------------------
# 0 . BASIC INFORMATION
%%%----------------------------------------------------------------------
@private
%%% File: xmerl_validate.erl
Author : < >
%%% Description : XML validation hooks for xmerl
%%%
%%% Modules used : lists, ets, xmerl_lib
%%%
%%%----------------------------------------------------------------------
-module(xmerl_validate).
-vsn('0.1').
-date('27-11-02').
-author('').
-export([validate/2]).
%%%%%%%%%%%%% TODOO
%%%% Atributes data type id , idrefs, FIXED, ...
-include("xmerl.hrl"). % record def, macros
%% +type validate(xmerl_scanner(),xmlElement())->
%% xmlElment() | {error,tuple()}.
validate(#xmerl_scanner{doctype_name=DTName},
#xmlElement{name=Name})
when DTName=/=Name ->
{error, {mismatched_root_element,Name,DTName}};
validate(#xmerl_scanner{rules=Rules}=S,
XML=#xmlElement{name=Name})->
% io:format("XML:~n~p~n",[XML]),
catch do_validation(read_rules(Rules,Name),XML,Rules,S);
validate(_, XML) ->
{error, {no_xml_element, XML}}.
%% +type validate(rules(),xmlElement())->
{ ok , ( ) } | { error , tuple ( ) } .
do_validation(undefined,#xmlElement{name=Name}, _Rules,_S) ->
{error,{unknown_element,Name}};
do_validation(_E, #xmlText{}=XML, _Rules,_S) -> % Nothing to validate
XML;
do_validation(El_Rule,XML,Rules,S)->
case catch valid_attributes(El_Rule#xmlElement.attributes,
XML#xmlElement.attributes,S) of
{'EXIT',Reason} ->
{error,Reason};
{error,Reason} ->
{error,Reason};
Attr_2->
XML_=XML#xmlElement{attributes=Attr_2},
El_Rule_Cont = El_Rule#xmlElement.content,
WSActionMode = ws_action_mode(El_Rule#xmlElement.elementdef,
El_Rule_Cont,S),
XML_Cont = XML_#xmlElement.content,
check_direct_ws_SDD(XML_Cont,WSActionMode),
case valid_contents(El_Rule_Cont,
XML_Cont,Rules,S,WSActionMode) of
{error,Reason}->
{error,Reason};
{error,Reason,N}->
{error,Reason,N};
XMLS ->
XML_#xmlElement{content=XMLS}
end
end.
check_direct_ws_SDD(XML,always_preserve) ->
case XML of
[#xmlText{}|_Rest] ->
exit({error,{illegal_whitespace_standalone_doc,XML}});
_ -> ok
end,
case lists:reverse(XML) of
[#xmlText{}|_Rest2] ->
exit({error,{illegal_whitespace_standalone_doc,XML}});
_ -> ok
end;
check_direct_ws_SDD(_,_) -> ok.
ws_action_mode({external,_},Content,#xmerl_scanner{standalone=yes}) ->
case element_content(Content) of
children ->
always_preserve;
_ ->
preserve
end;
ws_action_mode(_,_,_) ->
preserve.
element_content(A) when atom(A),A /= any, A /= empty ->
children;
element_content({choice,L}) when list(L) ->
element_content(L);
element_content({seq,L}) when list(L) ->
element_content(L);
element_content(['#PCDATA'|_T]) ->
mixed;
element_content('#PCDATA') ->
mixed;
element_content({'*',Rest}) ->
element_content(Rest);
element_content(_) -> children.
%% +type read_rules(DTD::atom(),Element_Name::atom())->
undefined | xmlElement ( ) .
read_rules(_, pcdata) ->
pcdata;
read_rules(T, Name) ->
case ets:lookup(T, {elem_def, Name}) of
[] ->
undefined;
[{_K, V}] ->
V
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%% Attributes Validation %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+ deftype attribute_rule ( ) = { Attr_Name::atom(),attribute_type ( ) ,
%% attribute_priority()}.
%% +type valid_attributes([attribute_rule()],[xmlAttribute()])->
[ xmlAttribute ( ) ] | { error , attribute_unknow } .
valid_attributes(All_Attr,[#xmlAttribute{}|_T]=Attr,S)->
single_ID_definition(All_Attr),
vc_Name_Token_IDREFS(All_Attr,Attr),
lists:foreach(fun(#xmlAttribute{name=Name})->
case is_attribute_exist(Name,All_Attr) of
true ->
ok;
false ->
exit({error,{attribute_unknown,Name}})
end
end,
Attr),
lists:flatten(lists:foldl(fun({Name,DataType,IF,DefDecl,Env},Attr_2)->
Attr_2++
[valid_attribute(Name,DataType,IF,
DefDecl,Attr,Env,S)]
end,[],All_Attr));
valid_attributes([],[],_) ->
[];
valid_attributes(All_Attr,[],S) ->
single_ID_definition(All_Attr),
lists:flatten(lists:foldl(fun({Name,DataType,IF,DefDecl,Env},Attr_2)->
Attr_2++[valid_attribute(Name,
DataType,IF,
DefDecl,
[],
Env,S)]
end,[],All_Attr)).
[ 60 ] DefaultDecl::=
%%%% '#REQUIRED' | '#IMPLIED'
| ( ( ' # FIXED ' S ) ? AttValue )
%% +deftype attribute_priority = '#REQUIRED'|'#FIXED'|'#IMPLIED'.
%% +type valid_attribute(Name::atom(),DataType::attribute_value(),
>
[ xmlAttribute ( ) ] | exit ( ) .
valid_attribute(Name,DataType,IF,DefaultDecl,List_of_Attributes,Env,S)->
SA = S#xmerl_scanner.standalone,
Attr=search_attr(Name,List_of_Attributes),
check_SDD_validity(SA,Env,Attr,IF),
case {DefaultDecl,IF,Attr} of
{'#REQUIRED',_,no_attribute}->
exit({error,{Name,is_required}});
{'#IMPLIED',_,no_attribute}->
[]; %% and no default value
{'#FIXED',DefVal,#xmlAttribute{value=DefVal}=Attr} ->
Attr;
{'#FIXED',A,no_attribute} ->
#xmlAttribute{name=Name,value=A}; % FIXED declare value becomes default.
{'#FIXED',A,B} ->
exit({error,{fixed_default_value_missmatch,A,B}});
{_,Value,no_attribute} when list(Value)->
#xmlAttribute{name=Name,value=Value};
{_,_,#xmlAttribute{}=Attr}->
%% do test data value, and default_value
test_attribute_value(DataType,Attr,IF,S);
{DefDecl,Else,XML} ->
exit({error,{unknow_attribute_type,DefDecl,Else,XML}})
end.
vc_Name_Token_IDREFS([{Name,Type,_,_,_}|Rest],Attrs)
when Type=='NMTOKEN';Type=='NMTOKENS'->
case lists:keysearch(Name,#xmlAttribute.name,Attrs) of
{value,A} ->
valid_nmtoken_value(A#xmlAttribute.value,Type);
_ -> ok
end,
vc_Name_Token_IDREFS(Rest,Attrs);
vc_Name_Token_IDREFS([{Name,Type,_,_,_}|Rest],Attrs)
when Type=='IDREFS'->
case lists:keysearch(Name,#xmlAttribute.name,Attrs) of
{value,A} ->
valid_IDREFS(A#xmlAttribute.value,Type);
_ -> ok
end,
vc_Name_Token_IDREFS(Rest,Attrs);
vc_Name_Token_IDREFS([_H|Rest],Attrs) ->
vc_Name_Token_IDREFS(Rest,Attrs);
vc_Name_Token_IDREFS([],_) -> ok.
valid_nmtoken_value([],'NMTOKENS') ->
exit({error,{at_least_one_Nmtoken_required}});
valid_nmtoken_value([H|_T ] = ' ) when list(H ) - >
% ValidChar =
fun(X ) - >
% case xmerl_lib:is_namechar(X) of
% false ->
exit({error,{invalid_character_in_Nmtoken , X } } ) ;
% _ -> ok
% end
% end,
% ValidCharList =
% fun([Nmtok|T],F) ->
lists : foreach(ValidChar , Nmtok ) ,
F(T , F ) ;
% ([],_) -> ok
% end,
ValidCharList(L , ValidChar ) ;
valid_nmtoken_value(Nmtok,_) ->
ValidChar =
fun(X) when ?whitespace(X),Nmtok=='NMTOKENS' ->
ok;
(X) ->
case xmerl_lib:is_namechar(X) of
false ->
exit({error,{invalid_character_in_Nmtoken,X}});
_ -> ok
end
end,
lists:foreach(ValidChar,Nmtok).
valid_IDREFS([],'IDREFS') ->
exit({error,{at_least_one_IDREF_Name_required}});
valid_IDREFS(_Str,'IDREFS') ->
ok.
single_ID_definition([{_,'ID',_,_,_}=Att1|Rest]) ->
case lists:keysearch('ID',2,Rest) of
{value,Att2} ->
exit({error,{just_one_ID_definition_allowed,Att1,Att2}});
_ -> ok
end;
single_ID_definition([_H|T]) ->
single_ID_definition(T);
single_ID_definition([]) ->
ok.
check_SDD_validity(yes,{external,_},#xmlAttribute{name=Name,normalized=true},_) ->
exit({error,{externally_defed_attribute_normalized_in_standalone_doc,Name}});
check_SDD_validity(yes,{external,_},no_attribute,V) when V /= no_value->
exit({error,{externally_defed_attribute_with_default_value_missing_in_standalone_doc}});
check_SDD_validity(_,_,_,_) ->
ok.
search_attr(Name,[#xmlAttribute{name=Name}=H|_T])->
H;
search_attr(Name,[#xmlAttribute{}|T])->
search_attr(Name,T);
search_attr(_Name,_T) ->
no_attribute.
is_attribute_exist(Name,[{Name,_,_,_,_}|_T])->
true;
is_attribute_exist(Name,[{_Attr,_,_,_,_}|T]) ->
is_attribute_exist(Name,T);
is_attribute_exist(_Name,[]) ->
false.
[ 54 ] AttType::= StringType | TokenizedType | EnumeratedType
[ 55 ] StringType::= ' CDATA '
[ 56 ] TokenizedType::= ' ID'|'IDREF'| ' IDREFS'|'ENTITY'| ' ENTITIES '
| ' NMTOKEN'| ' NMTOKENS '
[ 57 ] EnumeratedType::= NotationType | Enumeration
[ 58 ] NotationType::= ' NOTATION ' S ' ( ' S ? Name ( S ? ' | ' S ? Name ) * S ? ' ) '
[ 59 ] Enumeration : : = ' ( ' S ? ( S ? ' | ' S ? Nmtoken ) * S ? ' ) '
+ deftype attribute_type()- > ' CDATA ' | ' ID'|'IDREF'| ' IDREFS'|'ENTITY'|
' ENTITIES'| ' NMTOKEN'| ' NMTOKENS '
%% {enumeration,[List_of_value::atom()]}.
%% +type test_attribute_value(attribute_type(),xmlAttribute())->
%% xmlAttribute()| exit.
%%%% test the constraint validity of Attribute value.
test_attribute_value('CDATA',#xmlAttribute{}=Attr,_,_) ->
Attr;
test_attribute_value('NMTOKEN',#xmlAttribute{name=Name,value=V}=Attr,
Default,_S) ->
Fun =
fun (X)->
case xmerl_lib:is_namechar(X) of
true->
ok;
false->
io : format("Warning * * * nmtoken , value_incorrect : ~p ~ n",[V ] ) ,
exit({error,{invalid_value_nmtoken,Name,V}})
end
end,
lists:foreach(Fun,V),
if
list(Default) ->
lists:foreach(Fun,Default);
true -> ok
end,
Attr;
test_attribute_value('NMTOKENS',#xmlAttribute{name=Name,value=V}=Attr,
Default,_S) ->
Fun =
fun (X)->
case xmerl_lib:is_namechar(X) of
true->
ok;
false when ?whitespace(X)->
ok;
false ->
exit({error,{invalid_value_nmtokens,Name,V}})
end
end,
lists:foreach(Fun,V),
if
list(Default) ->
lists:foreach(Fun,Default);
true -> ok
end,
Attr;
test_attribute_value(Ent,#xmlAttribute{name=_Name,value=V}=Attr,_Default,
S=#xmerl_scanner{rules_read_fun=Read})
when Ent == 'ENTITY'; Ent == 'ENTITIES'->
%% The default value is already checked
NameListFun =
fun([],Acc,_) ->
lists:reverse(Acc);
(Str,Acc,Fun) ->
{N,Str2} = scan_name(Str,[]),
Fun(Str2,[N|Acc],Fun)
end,
NameList = NameListFun(V,[],NameListFun),
VC_Entity_Name =
fun(X) ->
case Read(entity,X,S) of
{_,external,{_,{ndata,_}}} ->
ok;
_ -> exit({error,{vc_Entity_Name,X,V}})
end
end,
lists:foreach(VC_Entity_Name,NameList),
Attr;
test_attribute_value({Type,L},#xmlAttribute{value=Value}=Attr,Default,_S)
when Type == enumeration; Type == notation ->
ValidDefault =
if
atom(Default) -> true;
true -> lists:member(list_to_atom(Default),L)
end,
NoDuplicatesFun =
fun(_,_,notation) -> true;
([],_,_) -> true;
([H|T],F,Enum) ->
case lists:member(H,T) of
true -> false;
_ -> F(T,F,Enum)
end
end,
NoDuplicates = NoDuplicatesFun(L,NoDuplicatesFun,Type),
case {lists:member(list_to_atom(Value),L),ValidDefault,NoDuplicates} of
{true,true,true}->
Attr;
{false,_,_} ->
exit({error,{attribute_value_unknow,Value,{list,L}}});
{_,false,_} ->
exit({error,{attribute_default_value_unknow,Default,{list,L}}});
{_,_,false} ->
exit({error,{duplicate_tokens_not_allowed,{list,L}}})
end;
test_attribute_value(_Rule,Attr,_,_) ->
% io:format("Attr Value*****~nRule~p~nValue~p~n",[Rule,Attr]),
Attr.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%% Contents Validation %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Element - content Models
[ 47 ] children::= ( choice | seq ) ( ' ? ' | ' * ' | ' + ' ) ?
[ 48 ] cp::= ( Name | choice | seq ) ( ' ? ' | ' * ' | ' + ' ) ?
[ 49 ] choice::= ' ( ' S ? cp ( S ? ' | ' S ? cp ) + S ? ' ) '
[ 50 ] seq::= ' ( ' S ? cp ( S ? ' , ' S ? cp ) * S ? ' ) '
[ 51 ] Mixed::= ' ( ' S ? ' # PCDATA ' ( S ? ' | ' S ? Name ) * S ? ' ) * '
%%%% | '(' S? '#PCDATA' S? ')'
%% +type valid_contents([rule()],[xmlElement()])->
[ xmlElement ( ) | { error , ? ? ? } .
valid_contents(Rule,XMLS,Rules,S,WSActionMode)->
case parse(Rule,XMLS,Rules,WSActionMode,S) of
{XML_N,[]}->
lists:flatten(XML_N);
{_,[#xmlElement{name=Name}|_T]} ->
exit({error,{element,Name,isnt_comprise_in_the_rule}});
{_,[#xmlText{}=Txt|_T]} ->
exit({error,{element,text,isnt_comprise_in_the_rule,Txt}});
{error,Reason} ->
{error,Reason};
{error,Reason,N} ->
{error,Reason,N}
end.
parse({'*',SubRule},XMLS,Rules,WSaction,S)->
star(SubRule,XMLS,Rules,WSaction,[],S);
parse({'+',SubRule},XMLS,Rules,WSaction,S) ->
plus(SubRule,XMLS,Rules,WSaction,S);
parse({choice,CHOICE},XMLS,Rules,WSaction,S)->
choice(CHOICE,XMLS,Rules,WSaction,S);
parse(empty,[],_Rules,_WSaction,_S) ->
{[],[]};
parse({'?',SubRule},XMLS,Rules,_WSaction,S)->
question(SubRule,XMLS,Rules,S);
parse({seq,List},XMLS,Rules,WSaction,S) ->
seq(List,XMLS,Rules,WSaction,S);
parse(El_Name,[#xmlElement{name=El_Name}=XML|T],Rules,_WSaction,S)
when atom(El_Name)->
case do_validation(read_rules(Rules,el_name(XML)),XML,Rules,S) of
{error,R} ->
% {error,R};
exit(R);
{error,R,_N}->
% {error,R,N};
exit(R);
XML_->
{[XML_],T}
end;
parse(any,Cont,Rules,_WSaction,S) ->
case catch parse_any(Cont,Rules,S) of
Err = {error,_} -> Err;
ValidContents -> {ValidContents,[]}
end;
parse(El_Name,[#xmlElement{name=Name}|_T]=S,_Rules,_WSa,_S) when atom(El_Name)->
{error,
{element_seq_not_conform,{wait,El_Name},{is,Name}},
{{next,S},{act,[]}} };
parse(_El_Name,[#xmlPI{}=H|T],_Rules,_WSa,_S) ->
{[H],T};
parse('#PCDATA',XML,_Rules,_WSa,_S)->
PCDATA it is 0 , 1 or more # xmlText { } .
parse_pcdata(XML);
parse(El_Name,[#xmlText{}|_T]=S,_Rules,_WSa,_S)->
{error,
{text_in_place_of,El_Name},
{{next,S},{act,[]}}};
parse([],_,_,_,_) ->
{error,no_rule};
parse(Rule,[],_,_,_) ->
{error,{no_xml_element,Rule}}.
parse_any([],_Rules,_S) ->
[];
parse_any([H|T],Rules,S) ->
case parse_any(H,Rules,S) of
[Cont] ->
[Cont|parse_any(T,Rules,S)];
Err -> throw(Err)
end;
parse_any(#xmlElement{}=XML,Rules,S) ->
case do_validation(read_rules(Rules,el_name(XML)),XML,Rules,S) of
{error,R} ->
{error,R};
{error,R,N}->
{error,R,N};
XML_->
[XML_]
end;
parse_any(El,_Rules,_S) ->
[El].
XXX remove first function clause
% choice(_Choice,[#xmlText{}=T|R],_Rules) ->
% {[T],R};
choice([CH|CHS],[_XML|_T]=XMLS,Rules,WSaction,S)->
{WS,XMLS1} = whitespace_action(XMLS,ws_action(WSaction,remove)),
case parse(CH,XMLS1,Rules,ws_action(WSaction,remove),S) of
{error,_R} ->
choice(CHS,XMLS,Rules,WSaction,S);
{error,_R,_N} ->
choice(CHS,XMLS,Rules,WSaction,S);
{Tree,XMLS2}->
{WS++[Tree],XMLS2}
end;
choice([],XMLS,_,WSaction,_S)->
case whitespace_action(XMLS,ws_action(WSaction,remove)) of
Res={_,[]} -> Res;
_ ->
{error,element_unauthorize_in_choice,{{next,XMLS},{act,[]}}}
end.
plus(Rule,XMLS,Rules,WSaction,S) ->
1 or more
{WS,XMLS1}=whitespace_action(XMLS,WSaction),
case parse(Rule,XMLS1,Rules,WSaction,S) of
{error, Reason,_XML} ->
{error, Reason};
{error, X} ->
{error, X};
{Tree, XMLS2} ->
case star(Rule, XMLS2,Rules,WSaction,[],S) of
{[], _} ->
{WS++[Tree], XMLS2};
{Tree_1, XMLS3} ->
{WS++[Tree]++Tree_1, XMLS3}
end
end.
star(_Rule,XML,_Rules,_WSa,Tree,_S) when length(XML)==0->
{[Tree],[]};
star(Rule,XMLS,Rules,WSaction,Tree,S) ->
{WS,XMLS1} = whitespace_action(XMLS,WSaction),
case parse(Rule,XMLS1,Rules,WSaction,S) of
{error, _E, {{next,N},{act,A}}}->
%%io:format("Error~p~n",[_E]),
{WS++Tree++A,N};
{error, _E}->
%%io:format("Error~p~n",[_E]),
{WS++[Tree],[]};
{Tree1,XMLS2}->
star(Rule,XMLS2,Rules,WSaction,Tree++WS++[Tree1],S)
end.
question(Rule, Toks,Rules,S) ->
0 or 1
case parse(Rule, Toks,Rules,preserve,S) of
{error, _E, _Next}->
{[],Toks};
{error, _E} ->
{[], Toks};
{T,Toks1} ->
{T, Toks1}
end.
seq(H,Toks,Rules,WSaction,S)->
case seq2(H,Toks,Rules,[],WSaction,S) of
{error,E}->
{error,E};
{error,R,N}->
{error,R,N};
{Tree,Toks2}->
{Tree,Toks2}
end.
seq2([],[],_,Tree,_WSa,_S)->
{Tree,[]};
seq2([],[#xmlElement{name=Name}|_T]=XMLS,_,Tree,_WSa,_S)->
{error,{sequence_finish,Name,isnt_in_the_right_place},
{{next,XMLS},{act,Tree}}};
seq2([],[#xmlText{}]=XML,_,Tree,_WSa,_S)->
{error,sequence_finish,{{next,XML},{act,Tree}}};
seq2([],Rest,_,Tree,_WSa,_S) ->
{WS,Rest2}=whitespace_action(Rest,remove),
{WS++Tree,Rest2};
seq2([H|T],Toks,Rules,Tree,WSaction,S) ->
{WS,Toks1} = whitespace_action(Toks,ws_action(WSaction,remove)),
H maybe only match parts of Toks
{error,Reason,_XML}->
{error,Reason};
{error,E}->
{error,E};
{[],Toks2}->
seq2(T,Toks2,Rules,Tree,WSaction,S);
{Tree1,Toks2} when list(Tree1)->
seq2(T,Toks2,Rules,Tree++WS++Tree1,WSaction,S);
{Tree1,Toks2}->
seq2(T,Toks2,Rules,Tree++WS++[Tree1],WSaction,S)
end.
el_name(#xmlElement{name=Name})->
Name;
el_name(#xmlText{}) ->
pcdata.
parse_pcdata([#xmlText{}=H|T])->
parse_pcdata(T,[H]);
parse_pcdata(H) ->
{[],H}.
parse_pcdata([#xmlText{}=H|T],Acc)->
parse_pcdata(T,Acc++[H]);
parse_pcdata(H,Acc) ->
{Acc,H}.
whitespace([]) ->
true;
whitespace([H|T]) when ?whitespace(H) ->
whitespace(T);
whitespace(_) ->
false.
whitespace_action(XML,remove) ->
whitespace_remove(XML,[]);
whitespace_action(XML,_) ->
{[],XML}.
whitespace_remove([#xmlText{value=V,type=text}=T|R]=L,Acc) ->
case whitespace(V) of
true ->
whitespace_remove(R,[T|Acc]);
_ ->
{lists:reverse(Acc),L}
end;
whitespace_remove(L,Acc) ->
{lists:reverse(Acc),L}.
ws_action(always_preserve=A,_) ->
A;
ws_action(_,B) ->
B.
scan_name(N,_) when atom(N) ->
N;
scan_name([$\s|T],Acc) ->
{list_to_atom(lists:reverse(Acc)),T};
scan_name([H|T],Acc) ->
scan_name(T,[H|Acc]);
scan_name("",Acc) ->
{list_to_atom(lists:reverse(Acc)),[]}.
| null | https://raw.githubusercontent.com/gebi/jungerl/8f5c102295dbe903f47d79fd64714b7de17026ec/lib/xmerl/src/xmerl_validate.erl | erlang | compliance with the License. You may obtain a copy of the License at
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
----------------------------------------------------------------------
----------------------------------------------------------------------
File: xmerl_validate.erl
Description : XML validation hooks for xmerl
Modules used : lists, ets, xmerl_lib
----------------------------------------------------------------------
TODOO
Atributes data type id , idrefs, FIXED, ...
record def, macros
+type validate(xmerl_scanner(),xmlElement())->
xmlElment() | {error,tuple()}.
io:format("XML:~n~p~n",[XML]),
+type validate(rules(),xmlElement())->
Nothing to validate
+type read_rules(DTD::atom(),Element_Name::atom())->
Attributes Validation %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
attribute_priority()}.
+type valid_attributes([attribute_rule()],[xmlAttribute()])->
'#REQUIRED' | '#IMPLIED'
+deftype attribute_priority = '#REQUIRED'|'#FIXED'|'#IMPLIED'.
+type valid_attribute(Name::atom(),DataType::attribute_value(),
and no default value
FIXED declare value becomes default.
do test data value, and default_value
ValidChar =
case xmerl_lib:is_namechar(X) of
false ->
_ -> ok
end
end,
ValidCharList =
fun([Nmtok|T],F) ->
([],_) -> ok
end,
{enumeration,[List_of_value::atom()]}.
+type test_attribute_value(attribute_type(),xmlAttribute())->
xmlAttribute()| exit.
test the constraint validity of Attribute value.
The default value is already checked
io:format("Attr Value*****~nRule~p~nValue~p~n",[Rule,Attr]),
Contents Validation %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
| '(' S? '#PCDATA' S? ')'
+type valid_contents([rule()],[xmlElement()])->
{error,R};
{error,R,N};
choice(_Choice,[#xmlText{}=T|R],_Rules) ->
{[T],R};
io:format("Error~p~n",[_E]),
io:format("Error~p~n",[_E]), | The contents of this file are subject to the Erlang Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Original Code is xmerl-0.19
The Initial Developer of the Original Code is Ericsson Telecom
AB . Portions created by Ericsson are Copyright ( C ) , 1998 , Ericsson
Telecom AB . All Rights Reserved .
# 0 . BASIC INFORMATION
@private
Author : < >
-module(xmerl_validate).
-vsn('0.1').
-date('27-11-02').
-author('').
-export([validate/2]).
validate(#xmerl_scanner{doctype_name=DTName},
#xmlElement{name=Name})
when DTName=/=Name ->
{error, {mismatched_root_element,Name,DTName}};
validate(#xmerl_scanner{rules=Rules}=S,
XML=#xmlElement{name=Name})->
catch do_validation(read_rules(Rules,Name),XML,Rules,S);
validate(_, XML) ->
{error, {no_xml_element, XML}}.
{ ok , ( ) } | { error , tuple ( ) } .
do_validation(undefined,#xmlElement{name=Name}, _Rules,_S) ->
{error,{unknown_element,Name}};
XML;
do_validation(El_Rule,XML,Rules,S)->
case catch valid_attributes(El_Rule#xmlElement.attributes,
XML#xmlElement.attributes,S) of
{'EXIT',Reason} ->
{error,Reason};
{error,Reason} ->
{error,Reason};
Attr_2->
XML_=XML#xmlElement{attributes=Attr_2},
El_Rule_Cont = El_Rule#xmlElement.content,
WSActionMode = ws_action_mode(El_Rule#xmlElement.elementdef,
El_Rule_Cont,S),
XML_Cont = XML_#xmlElement.content,
check_direct_ws_SDD(XML_Cont,WSActionMode),
case valid_contents(El_Rule_Cont,
XML_Cont,Rules,S,WSActionMode) of
{error,Reason}->
{error,Reason};
{error,Reason,N}->
{error,Reason,N};
XMLS ->
XML_#xmlElement{content=XMLS}
end
end.
check_direct_ws_SDD(XML,always_preserve) ->
case XML of
[#xmlText{}|_Rest] ->
exit({error,{illegal_whitespace_standalone_doc,XML}});
_ -> ok
end,
case lists:reverse(XML) of
[#xmlText{}|_Rest2] ->
exit({error,{illegal_whitespace_standalone_doc,XML}});
_ -> ok
end;
check_direct_ws_SDD(_,_) -> ok.
ws_action_mode({external,_},Content,#xmerl_scanner{standalone=yes}) ->
case element_content(Content) of
children ->
always_preserve;
_ ->
preserve
end;
ws_action_mode(_,_,_) ->
preserve.
element_content(A) when atom(A),A /= any, A /= empty ->
children;
element_content({choice,L}) when list(L) ->
element_content(L);
element_content({seq,L}) when list(L) ->
element_content(L);
element_content(['#PCDATA'|_T]) ->
mixed;
element_content('#PCDATA') ->
mixed;
element_content({'*',Rest}) ->
element_content(Rest);
element_content(_) -> children.
undefined | xmlElement ( ) .
read_rules(_, pcdata) ->
pcdata;
read_rules(T, Name) ->
case ets:lookup(T, {elem_def, Name}) of
[] ->
undefined;
[{_K, V}] ->
V
end.
+ deftype attribute_rule ( ) = { Attr_Name::atom(),attribute_type ( ) ,
[ xmlAttribute ( ) ] | { error , attribute_unknow } .
valid_attributes(All_Attr,[#xmlAttribute{}|_T]=Attr,S)->
single_ID_definition(All_Attr),
vc_Name_Token_IDREFS(All_Attr,Attr),
lists:foreach(fun(#xmlAttribute{name=Name})->
case is_attribute_exist(Name,All_Attr) of
true ->
ok;
false ->
exit({error,{attribute_unknown,Name}})
end
end,
Attr),
lists:flatten(lists:foldl(fun({Name,DataType,IF,DefDecl,Env},Attr_2)->
Attr_2++
[valid_attribute(Name,DataType,IF,
DefDecl,Attr,Env,S)]
end,[],All_Attr));
valid_attributes([],[],_) ->
[];
valid_attributes(All_Attr,[],S) ->
single_ID_definition(All_Attr),
lists:flatten(lists:foldl(fun({Name,DataType,IF,DefDecl,Env},Attr_2)->
Attr_2++[valid_attribute(Name,
DataType,IF,
DefDecl,
[],
Env,S)]
end,[],All_Attr)).
[ 60 ] DefaultDecl::=
| ( ( ' # FIXED ' S ) ? AttValue )
>
[ xmlAttribute ( ) ] | exit ( ) .
valid_attribute(Name,DataType,IF,DefaultDecl,List_of_Attributes,Env,S)->
SA = S#xmerl_scanner.standalone,
Attr=search_attr(Name,List_of_Attributes),
check_SDD_validity(SA,Env,Attr,IF),
case {DefaultDecl,IF,Attr} of
{'#REQUIRED',_,no_attribute}->
exit({error,{Name,is_required}});
{'#IMPLIED',_,no_attribute}->
{'#FIXED',DefVal,#xmlAttribute{value=DefVal}=Attr} ->
Attr;
{'#FIXED',A,no_attribute} ->
{'#FIXED',A,B} ->
exit({error,{fixed_default_value_missmatch,A,B}});
{_,Value,no_attribute} when list(Value)->
#xmlAttribute{name=Name,value=Value};
{_,_,#xmlAttribute{}=Attr}->
test_attribute_value(DataType,Attr,IF,S);
{DefDecl,Else,XML} ->
exit({error,{unknow_attribute_type,DefDecl,Else,XML}})
end.
vc_Name_Token_IDREFS([{Name,Type,_,_,_}|Rest],Attrs)
when Type=='NMTOKEN';Type=='NMTOKENS'->
case lists:keysearch(Name,#xmlAttribute.name,Attrs) of
{value,A} ->
valid_nmtoken_value(A#xmlAttribute.value,Type);
_ -> ok
end,
vc_Name_Token_IDREFS(Rest,Attrs);
vc_Name_Token_IDREFS([{Name,Type,_,_,_}|Rest],Attrs)
when Type=='IDREFS'->
case lists:keysearch(Name,#xmlAttribute.name,Attrs) of
{value,A} ->
valid_IDREFS(A#xmlAttribute.value,Type);
_ -> ok
end,
vc_Name_Token_IDREFS(Rest,Attrs);
vc_Name_Token_IDREFS([_H|Rest],Attrs) ->
vc_Name_Token_IDREFS(Rest,Attrs);
vc_Name_Token_IDREFS([],_) -> ok.
valid_nmtoken_value([],'NMTOKENS') ->
exit({error,{at_least_one_Nmtoken_required}});
valid_nmtoken_value([H|_T ] = ' ) when list(H ) - >
fun(X ) - >
exit({error,{invalid_character_in_Nmtoken , X } } ) ;
lists : foreach(ValidChar , Nmtok ) ,
F(T , F ) ;
ValidCharList(L , ValidChar ) ;
valid_nmtoken_value(Nmtok,_) ->
ValidChar =
fun(X) when ?whitespace(X),Nmtok=='NMTOKENS' ->
ok;
(X) ->
case xmerl_lib:is_namechar(X) of
false ->
exit({error,{invalid_character_in_Nmtoken,X}});
_ -> ok
end
end,
lists:foreach(ValidChar,Nmtok).
valid_IDREFS([],'IDREFS') ->
exit({error,{at_least_one_IDREF_Name_required}});
valid_IDREFS(_Str,'IDREFS') ->
ok.
single_ID_definition([{_,'ID',_,_,_}=Att1|Rest]) ->
case lists:keysearch('ID',2,Rest) of
{value,Att2} ->
exit({error,{just_one_ID_definition_allowed,Att1,Att2}});
_ -> ok
end;
single_ID_definition([_H|T]) ->
single_ID_definition(T);
single_ID_definition([]) ->
ok.
check_SDD_validity(yes,{external,_},#xmlAttribute{name=Name,normalized=true},_) ->
exit({error,{externally_defed_attribute_normalized_in_standalone_doc,Name}});
check_SDD_validity(yes,{external,_},no_attribute,V) when V /= no_value->
exit({error,{externally_defed_attribute_with_default_value_missing_in_standalone_doc}});
check_SDD_validity(_,_,_,_) ->
ok.
search_attr(Name,[#xmlAttribute{name=Name}=H|_T])->
H;
search_attr(Name,[#xmlAttribute{}|T])->
search_attr(Name,T);
search_attr(_Name,_T) ->
no_attribute.
is_attribute_exist(Name,[{Name,_,_,_,_}|_T])->
true;
is_attribute_exist(Name,[{_Attr,_,_,_,_}|T]) ->
is_attribute_exist(Name,T);
is_attribute_exist(_Name,[]) ->
false.
[ 54 ] AttType::= StringType | TokenizedType | EnumeratedType
[ 55 ] StringType::= ' CDATA '
[ 56 ] TokenizedType::= ' ID'|'IDREF'| ' IDREFS'|'ENTITY'| ' ENTITIES '
| ' NMTOKEN'| ' NMTOKENS '
[ 57 ] EnumeratedType::= NotationType | Enumeration
[ 58 ] NotationType::= ' NOTATION ' S ' ( ' S ? Name ( S ? ' | ' S ? Name ) * S ? ' ) '
[ 59 ] Enumeration : : = ' ( ' S ? ( S ? ' | ' S ? Nmtoken ) * S ? ' ) '
+ deftype attribute_type()- > ' CDATA ' | ' ID'|'IDREF'| ' IDREFS'|'ENTITY'|
' ENTITIES'| ' NMTOKEN'| ' NMTOKENS '
test_attribute_value('CDATA',#xmlAttribute{}=Attr,_,_) ->
Attr;
test_attribute_value('NMTOKEN',#xmlAttribute{name=Name,value=V}=Attr,
Default,_S) ->
Fun =
fun (X)->
case xmerl_lib:is_namechar(X) of
true->
ok;
false->
io : format("Warning * * * nmtoken , value_incorrect : ~p ~ n",[V ] ) ,
exit({error,{invalid_value_nmtoken,Name,V}})
end
end,
lists:foreach(Fun,V),
if
list(Default) ->
lists:foreach(Fun,Default);
true -> ok
end,
Attr;
test_attribute_value('NMTOKENS',#xmlAttribute{name=Name,value=V}=Attr,
Default,_S) ->
Fun =
fun (X)->
case xmerl_lib:is_namechar(X) of
true->
ok;
false when ?whitespace(X)->
ok;
false ->
exit({error,{invalid_value_nmtokens,Name,V}})
end
end,
lists:foreach(Fun,V),
if
list(Default) ->
lists:foreach(Fun,Default);
true -> ok
end,
Attr;
test_attribute_value(Ent,#xmlAttribute{name=_Name,value=V}=Attr,_Default,
S=#xmerl_scanner{rules_read_fun=Read})
when Ent == 'ENTITY'; Ent == 'ENTITIES'->
NameListFun =
fun([],Acc,_) ->
lists:reverse(Acc);
(Str,Acc,Fun) ->
{N,Str2} = scan_name(Str,[]),
Fun(Str2,[N|Acc],Fun)
end,
NameList = NameListFun(V,[],NameListFun),
VC_Entity_Name =
fun(X) ->
case Read(entity,X,S) of
{_,external,{_,{ndata,_}}} ->
ok;
_ -> exit({error,{vc_Entity_Name,X,V}})
end
end,
lists:foreach(VC_Entity_Name,NameList),
Attr;
test_attribute_value({Type,L},#xmlAttribute{value=Value}=Attr,Default,_S)
when Type == enumeration; Type == notation ->
ValidDefault =
if
atom(Default) -> true;
true -> lists:member(list_to_atom(Default),L)
end,
NoDuplicatesFun =
fun(_,_,notation) -> true;
([],_,_) -> true;
([H|T],F,Enum) ->
case lists:member(H,T) of
true -> false;
_ -> F(T,F,Enum)
end
end,
NoDuplicates = NoDuplicatesFun(L,NoDuplicatesFun,Type),
case {lists:member(list_to_atom(Value),L),ValidDefault,NoDuplicates} of
{true,true,true}->
Attr;
{false,_,_} ->
exit({error,{attribute_value_unknow,Value,{list,L}}});
{_,false,_} ->
exit({error,{attribute_default_value_unknow,Default,{list,L}}});
{_,_,false} ->
exit({error,{duplicate_tokens_not_allowed,{list,L}}})
end;
test_attribute_value(_Rule,Attr,_,_) ->
Attr.
Element - content Models
[ 47 ] children::= ( choice | seq ) ( ' ? ' | ' * ' | ' + ' ) ?
[ 48 ] cp::= ( Name | choice | seq ) ( ' ? ' | ' * ' | ' + ' ) ?
[ 49 ] choice::= ' ( ' S ? cp ( S ? ' | ' S ? cp ) + S ? ' ) '
[ 50 ] seq::= ' ( ' S ? cp ( S ? ' , ' S ? cp ) * S ? ' ) '
[ 51 ] Mixed::= ' ( ' S ? ' # PCDATA ' ( S ? ' | ' S ? Name ) * S ? ' ) * '
[ xmlElement ( ) | { error , ? ? ? } .
valid_contents(Rule,XMLS,Rules,S,WSActionMode)->
case parse(Rule,XMLS,Rules,WSActionMode,S) of
{XML_N,[]}->
lists:flatten(XML_N);
{_,[#xmlElement{name=Name}|_T]} ->
exit({error,{element,Name,isnt_comprise_in_the_rule}});
{_,[#xmlText{}=Txt|_T]} ->
exit({error,{element,text,isnt_comprise_in_the_rule,Txt}});
{error,Reason} ->
{error,Reason};
{error,Reason,N} ->
{error,Reason,N}
end.
parse({'*',SubRule},XMLS,Rules,WSaction,S)->
star(SubRule,XMLS,Rules,WSaction,[],S);
parse({'+',SubRule},XMLS,Rules,WSaction,S) ->
plus(SubRule,XMLS,Rules,WSaction,S);
parse({choice,CHOICE},XMLS,Rules,WSaction,S)->
choice(CHOICE,XMLS,Rules,WSaction,S);
parse(empty,[],_Rules,_WSaction,_S) ->
{[],[]};
parse({'?',SubRule},XMLS,Rules,_WSaction,S)->
question(SubRule,XMLS,Rules,S);
parse({seq,List},XMLS,Rules,WSaction,S) ->
seq(List,XMLS,Rules,WSaction,S);
parse(El_Name,[#xmlElement{name=El_Name}=XML|T],Rules,_WSaction,S)
when atom(El_Name)->
case do_validation(read_rules(Rules,el_name(XML)),XML,Rules,S) of
{error,R} ->
exit(R);
{error,R,_N}->
exit(R);
XML_->
{[XML_],T}
end;
parse(any,Cont,Rules,_WSaction,S) ->
case catch parse_any(Cont,Rules,S) of
Err = {error,_} -> Err;
ValidContents -> {ValidContents,[]}
end;
parse(El_Name,[#xmlElement{name=Name}|_T]=S,_Rules,_WSa,_S) when atom(El_Name)->
{error,
{element_seq_not_conform,{wait,El_Name},{is,Name}},
{{next,S},{act,[]}} };
parse(_El_Name,[#xmlPI{}=H|T],_Rules,_WSa,_S) ->
{[H],T};
parse('#PCDATA',XML,_Rules,_WSa,_S)->
PCDATA it is 0 , 1 or more # xmlText { } .
parse_pcdata(XML);
parse(El_Name,[#xmlText{}|_T]=S,_Rules,_WSa,_S)->
{error,
{text_in_place_of,El_Name},
{{next,S},{act,[]}}};
parse([],_,_,_,_) ->
{error,no_rule};
parse(Rule,[],_,_,_) ->
{error,{no_xml_element,Rule}}.
parse_any([],_Rules,_S) ->
[];
parse_any([H|T],Rules,S) ->
case parse_any(H,Rules,S) of
[Cont] ->
[Cont|parse_any(T,Rules,S)];
Err -> throw(Err)
end;
parse_any(#xmlElement{}=XML,Rules,S) ->
case do_validation(read_rules(Rules,el_name(XML)),XML,Rules,S) of
{error,R} ->
{error,R};
{error,R,N}->
{error,R,N};
XML_->
[XML_]
end;
parse_any(El,_Rules,_S) ->
[El].
XXX remove first function clause
choice([CH|CHS],[_XML|_T]=XMLS,Rules,WSaction,S)->
{WS,XMLS1} = whitespace_action(XMLS,ws_action(WSaction,remove)),
case parse(CH,XMLS1,Rules,ws_action(WSaction,remove),S) of
{error,_R} ->
choice(CHS,XMLS,Rules,WSaction,S);
{error,_R,_N} ->
choice(CHS,XMLS,Rules,WSaction,S);
{Tree,XMLS2}->
{WS++[Tree],XMLS2}
end;
choice([],XMLS,_,WSaction,_S)->
case whitespace_action(XMLS,ws_action(WSaction,remove)) of
Res={_,[]} -> Res;
_ ->
{error,element_unauthorize_in_choice,{{next,XMLS},{act,[]}}}
end.
plus(Rule,XMLS,Rules,WSaction,S) ->
1 or more
{WS,XMLS1}=whitespace_action(XMLS,WSaction),
case parse(Rule,XMLS1,Rules,WSaction,S) of
{error, Reason,_XML} ->
{error, Reason};
{error, X} ->
{error, X};
{Tree, XMLS2} ->
case star(Rule, XMLS2,Rules,WSaction,[],S) of
{[], _} ->
{WS++[Tree], XMLS2};
{Tree_1, XMLS3} ->
{WS++[Tree]++Tree_1, XMLS3}
end
end.
star(_Rule,XML,_Rules,_WSa,Tree,_S) when length(XML)==0->
{[Tree],[]};
star(Rule,XMLS,Rules,WSaction,Tree,S) ->
{WS,XMLS1} = whitespace_action(XMLS,WSaction),
case parse(Rule,XMLS1,Rules,WSaction,S) of
{error, _E, {{next,N},{act,A}}}->
{WS++Tree++A,N};
{error, _E}->
{WS++[Tree],[]};
{Tree1,XMLS2}->
star(Rule,XMLS2,Rules,WSaction,Tree++WS++[Tree1],S)
end.
question(Rule, Toks,Rules,S) ->
0 or 1
case parse(Rule, Toks,Rules,preserve,S) of
{error, _E, _Next}->
{[],Toks};
{error, _E} ->
{[], Toks};
{T,Toks1} ->
{T, Toks1}
end.
seq(H,Toks,Rules,WSaction,S)->
case seq2(H,Toks,Rules,[],WSaction,S) of
{error,E}->
{error,E};
{error,R,N}->
{error,R,N};
{Tree,Toks2}->
{Tree,Toks2}
end.
seq2([],[],_,Tree,_WSa,_S)->
{Tree,[]};
seq2([],[#xmlElement{name=Name}|_T]=XMLS,_,Tree,_WSa,_S)->
{error,{sequence_finish,Name,isnt_in_the_right_place},
{{next,XMLS},{act,Tree}}};
seq2([],[#xmlText{}]=XML,_,Tree,_WSa,_S)->
{error,sequence_finish,{{next,XML},{act,Tree}}};
seq2([],Rest,_,Tree,_WSa,_S) ->
{WS,Rest2}=whitespace_action(Rest,remove),
{WS++Tree,Rest2};
seq2([H|T],Toks,Rules,Tree,WSaction,S) ->
{WS,Toks1} = whitespace_action(Toks,ws_action(WSaction,remove)),
H maybe only match parts of Toks
{error,Reason,_XML}->
{error,Reason};
{error,E}->
{error,E};
{[],Toks2}->
seq2(T,Toks2,Rules,Tree,WSaction,S);
{Tree1,Toks2} when list(Tree1)->
seq2(T,Toks2,Rules,Tree++WS++Tree1,WSaction,S);
{Tree1,Toks2}->
seq2(T,Toks2,Rules,Tree++WS++[Tree1],WSaction,S)
end.
el_name(#xmlElement{name=Name})->
Name;
el_name(#xmlText{}) ->
pcdata.
parse_pcdata([#xmlText{}=H|T])->
parse_pcdata(T,[H]);
parse_pcdata(H) ->
{[],H}.
parse_pcdata([#xmlText{}=H|T],Acc)->
parse_pcdata(T,Acc++[H]);
parse_pcdata(H,Acc) ->
{Acc,H}.
whitespace([]) ->
true;
whitespace([H|T]) when ?whitespace(H) ->
whitespace(T);
whitespace(_) ->
false.
whitespace_action(XML,remove) ->
whitespace_remove(XML,[]);
whitespace_action(XML,_) ->
{[],XML}.
whitespace_remove([#xmlText{value=V,type=text}=T|R]=L,Acc) ->
case whitespace(V) of
true ->
whitespace_remove(R,[T|Acc]);
_ ->
{lists:reverse(Acc),L}
end;
whitespace_remove(L,Acc) ->
{lists:reverse(Acc),L}.
ws_action(always_preserve=A,_) ->
A;
ws_action(_,B) ->
B.
scan_name(N,_) when atom(N) ->
N;
scan_name([$\s|T],Acc) ->
{list_to_atom(lists:reverse(Acc)),T};
scan_name([H|T],Acc) ->
scan_name(T,[H|Acc]);
scan_name("",Acc) ->
{list_to_atom(lists:reverse(Acc)),[]}.
|
2d054b0f532218222a47217eec9fb67032e636a0039445f65f8af229e5a7d875 | Perry961002/SICP | exe2.74-company.scm | ; a)
;设立id作为一个员工在整个体系的唯一标识
每个分公司实现get - record过程,并且put进总公司
; (define (get-record branch id)
; ((get 'get-record branch) id))
; b)
设立数据项salary,实现过程get - salary
; c)
;(define (find-employee-record branch-list id)
; (if (null? branch-list)
; (error "Not find" id)
; (let ((record (get-record (car branch-list) id)))
; (if (null? record)
; record
; (find-employee-record (cdr branch-list) id)))))
; d)
;只需要新增的机构实现相应的get-recorde、get-salary方法即可 | null | https://raw.githubusercontent.com/Perry961002/SICP/89d539e600a73bec42d350592f0ac626e041bf16/Chap2/exercise/exe2.74-company.scm | scheme | a)
设立id作为一个员工在整个体系的唯一标识
(define (get-record branch id)
((get 'get-record branch) id))
b)
c)
(define (find-employee-record branch-list id)
(if (null? branch-list)
(error "Not find" id)
(let ((record (get-record (car branch-list) id)))
(if (null? record)
record
(find-employee-record (cdr branch-list) id)))))
d)
只需要新增的机构实现相应的get-recorde、get-salary方法即可 |
每个分公司实现get - record过程,并且put进总公司
设立数据项salary,实现过程get - salary
|
3bb7c428b01e44bb9a2025efcbd8e8cf65ec14d1e95ac0bd2d83d0624342a6cc | styx/Raincat | GameState.hs | module Game.GameState
(GameState(GameRunningState,MainMenuState,HowtoMenuState,PostVictoryState)) where
data GameState = GameRunningState | MainMenuState | HowtoMenuState | PostVictoryState
| null | https://raw.githubusercontent.com/styx/Raincat/49b688c73335c9a4090708bc75f6af9575a65670/src/Game/GameState.hs | haskell | module Game.GameState
(GameState(GameRunningState,MainMenuState,HowtoMenuState,PostVictoryState)) where
data GameState = GameRunningState | MainMenuState | HowtoMenuState | PostVictoryState
|
|
709f20bc4e8a91e335ca4a115de365b775ea1e7fb28bf6f07c733cd77f078501 | clojure/core.typed | warn_on_unannotated_var.clj | (ns clojure.core.typed.test.warn-on-unannotated-var
(:require [clojure.core.typed :as t :refer [ann check-ns ann-form print-env cf]])
(:import (clojure.lang IPersistentMap IPersistentVector)))
(t/warn-on-unannotated-vars)
(defn foo [a]
(fn [s] (+ s 1)))
(def bar (+ 1 1.2))
| null | https://raw.githubusercontent.com/clojure/core.typed/f5b7d00bbb29d09000d7fef7cca5b40416c9fa91/typed/checker.jvm/test/clojure/core/typed/test/warn_on_unannotated_var.clj | clojure | (ns clojure.core.typed.test.warn-on-unannotated-var
(:require [clojure.core.typed :as t :refer [ann check-ns ann-form print-env cf]])
(:import (clojure.lang IPersistentMap IPersistentVector)))
(t/warn-on-unannotated-vars)
(defn foo [a]
(fn [s] (+ s 1)))
(def bar (+ 1 1.2))
|
|
eb45e91e6e53bd38339a7a04701962c3ec1bf7210e9237d8f0974e6204e53d03 | ChrisPenner/comonads-by-example | Zipper.hs | module Comonads.Traced.Zipper where
import Comonads.Zipper
import Comonads.Traced
import Control.Comonad
import Data.Monoid
data Dir = L | R
deriving (Show, Eq)
zipper :: Zipper Int
zipper = fromList [1,2,3,4,5]
Navigate
tZipper :: Zipper a -> Traced (Dual [Dir]) a
tZipper z = traced (extract . follow z . getDual)
where
follow :: Zipper a -> [Dir] -> Zipper a
follow z' [] = z'
follow z' (L : rest) = follow (moveLeft' z') rest
follow z' (R : rest) = follow (moveRight' z') rest
| null | https://raw.githubusercontent.com/ChrisPenner/comonads-by-example/1d7626f759e59ac8019322612ed6d7ff00da75c9/src/Comonads/Traced/Zipper.hs | haskell | module Comonads.Traced.Zipper where
import Comonads.Zipper
import Comonads.Traced
import Control.Comonad
import Data.Monoid
data Dir = L | R
deriving (Show, Eq)
zipper :: Zipper Int
zipper = fromList [1,2,3,4,5]
Navigate
tZipper :: Zipper a -> Traced (Dual [Dir]) a
tZipper z = traced (extract . follow z . getDual)
where
follow :: Zipper a -> [Dir] -> Zipper a
follow z' [] = z'
follow z' (L : rest) = follow (moveLeft' z') rest
follow z' (R : rest) = follow (moveRight' z') rest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.