filename
stringlengths 3
67
| data
stringlengths 0
58.3M
| license
stringlengths 0
19.5k
|
---|---|---|
test.ml | (*
Entrypoint to run the unit tests from the command line.
*)
let test_suites = List.flatten [
Test_tree_sitter_gen.Test.test_suites;
Test_tree_sitter_run.Test.test_suites;
]
let main () = Alcotest.run "ocaml-tree-sitter" test_suites
let () = main ()
| (*
Entrypoint to run the unit tests from the command line.
*) |
split-components.ml | open Cudf
open ExtLib
open Common
module PGraph = Defaultgraphs.PackageGraph
module Options =
struct
open OptParse
let debug = StdOpt.store_true ()
let quite = StdOpt.store_false ()
let outdir = StdOpt.str_option ()
let description = "Create one dot file for connected component of the dependecy graph"
let options = OptParser.make ~description:description ()
open OptParser
add options ~long_name:"debug" ~help:"Print debug information" debug;
add options ~short_name:'q' ~long_name:"quite" ~help:"Do not print additional info" quite;
add options ~long_name:"outdir" ~help:"specify the output directory" outdir;
end;;
let main () =
at_exit (fun () -> Util.dump Format.err_formatter);
let posargs = OptParse.OptParser.parse_argv Options.options in
if OptParse.Opt.get Options.debug then Boilerplate.enable_debug () ;
let (universe,from_cudf,to_cudf) = Boilerplate.load_universe posargs in
let dg = (PGraph.dependency_graph universe) in
let output_ch p =
match OptParse.Opt.opt Options.outdir with
|None -> open_out (Printf.sprintf "%s.dot" p)
|Some s -> open_out (Printf.sprintf "%s/%s.dot" s p)
in
let l =
let cmp g1 g2 = (PGraph.UG.nb_vertex g2) - (PGraph.UG.nb_vertex g1) in
List.sort ~cmp:cmp (PGraph.connected_components (PGraph.undirect dg))
in
List.iter (fun cc ->
let l = ref [] in
PGraph.UG.iter_vertex (fun v -> l := v :: !l) cc;
let u = Cudf.load_universe !l in
let g = PGraph.dependency_graph u in
let maxv = ref (0, ref (List.hd(!l))) in
PGraph.G.iter_vertex (fun v ->
let d = PGraph.G.in_degree g v in
if d > fst(!maxv) then
maxv := (d, ref v)
) g;
if OptParse.Opt.get Options.quite then
Printf.printf "package:%s\nnumber of nodes:%d\nmax inbound:%d\n\n"
(!(snd(!maxv))).package
(PGraph.G.nb_vertex g)
(fst(!maxv));
let outch = output_ch (!(snd(!maxv))).package in
PGraph.D.output_graph outch g;
close_out outch
) l
;;
main ();;
| (**************************************************************************************)
(* Copyright (C) 2009 Pietro Abate <[email protected]> *)
(* Copyright (C) 2009 Mancoosi Project *)
(* *)
(* This library is free software: you can redistribute it and/or modify *)
(* it under the terms of the GNU Lesser General Public License as *)
(* published by the Free Software Foundation, either version 3 of the *)
(* License, or (at your option) any later version. A special linking *)
(* exception to the GNU Lesser General Public License applies to this *)
(* library, see the COPYING file for more information. *)
(**************************************************************************************)
|
FileUtilTEST.ml | open FileUtilTypes
open FilePath
open FileUtilMisc
open FileUtilSize
open FileUtilSTAT
let compile_filter ?(match_compile=(fun s fn -> s = fn)) flt =
let cflt =
let rec cc =
function
| True -> `Val true
| False -> `Val false
| Is_dev_block -> `Stat (`Kind Dev_block)
| Is_dev_char -> `Stat (`Kind Dev_char)
| Is_dir -> `Stat (`Kind Dir)
| Is_file -> `Stat (`Kind File)
| Is_socket -> `Stat (`Kind Socket)
| Is_pipe -> `Stat (`Kind Fifo)
| Is_link -> `Is_link
| Is_set_group_ID -> `Stat `Is_set_group_ID
| Has_sticky_bit -> `Stat `Has_sticky_bit
| Has_set_user_ID -> `Stat `Has_set_user_ID
| Is_readable -> `Stat `Is_readable
| Is_writeable -> `Stat `Is_writeable
| Is_exec -> `Stat `Is_exec
| Size_not_null -> `Stat (`Size (`Bigger, B 0L))
| Size_bigger_than sz -> `Stat (`Size (`Bigger, sz))
| Size_smaller_than sz -> `Stat (`Size (`Smaller, sz))
| Size_equal_to sz -> `Stat (`Size (`Equal, sz))
| Size_fuzzy_equal_to sz -> `Stat (`Size (`FuzzyEqual, sz))
| Is_owned_by_user_ID ->
`Stat (`Is_owned_by_user_ID (Unix.geteuid ()))
| Is_owned_by_group_ID ->
`Stat (`Is_owned_by_group_ID (Unix.getegid ()))
| Exists -> `Stat `Exists
| Is_newer_than fn1 -> `Stat (`Newer (stat fn1).modification_time)
| Is_older_than fn1 -> `Stat (`Older (stat fn1).modification_time)
| Is_newer_than_date(dt) -> `Stat (`Newer dt)
| Is_older_than_date(dt) -> `Stat (`Older dt)
| Has_extension ext -> `Has_extension ext
| Has_no_extension -> `Has_no_extension
| Is_current_dir -> `Is_current_dir
| Is_parent_dir -> `Is_parent_dir
| Basename_is s -> `Basename_is s
| Dirname_is s -> `Dirname_is s
| Custom f -> `Custom f
| Match str -> `Custom (match_compile str)
| And(flt1, flt2) ->
begin
match cc flt1, cc flt2 with
| `Val true, cflt | cflt, `Val true -> cflt
| `Val false, _ | _, `Val false -> `Val false
| cflt1, cflt2 -> `And (cflt1, cflt2)
end
| Or(flt1, flt2) ->
begin
match cc flt1, cc flt2 with
| `Val true, _ | _, `Val true -> `Val true
| `Val false, cflt | cflt, `Val false -> cflt
| cflt1, cflt2 -> `Or (cflt1, cflt2)
end
| Not flt ->
begin
match cc flt with
| `Val b -> `Val (not b)
| cflt -> `Not cflt
end
in
cc flt
in
let need_statL, need_stat =
let rec dfs =
function
| `Val _ | `Has_extension _ | `Has_no_extension | `Is_current_dir
| `Is_parent_dir | `Basename_is _ | `Dirname_is _
| `Custom _ ->
false, false
| `Stat _ ->
true, false
| `Is_link ->
false, true
| `And (cflt1, cflt2) | `Or (cflt1, cflt2) ->
let need_stat1, need_statL1 = dfs cflt1 in
let need_stat2, need_statL2 = dfs cflt2 in
need_stat1 || need_stat2, need_statL1 || need_statL2
| `Not cflt ->
dfs cflt
in
dfs cflt
in
(* Compiled function to return. *)
fun ?st_opt ?stL_opt fn ->
let st_opt =
if need_stat && st_opt = None then begin
try
match stL_opt with
| Some st when not st.is_link -> stL_opt
| _ -> Some (stat fn)
with FileDoesntExist _ ->
None
end else
st_opt
in
let stL_opt =
if need_statL && stL_opt = None then begin
try
match st_opt with
| Some st when not st.is_link -> st_opt
| _ -> Some (stat ~dereference:true fn)
with FileDoesntExist _ ->
None
end else
stL_opt
in
let rec eval =
function
| `Val b -> b
| `Has_extension ext ->
begin
try
check_extension fn ext
with FilePath.NoExtension _ ->
false
end
| `Has_no_extension ->
begin
try
let _str: filename = chop_extension fn in
false
with FilePath.NoExtension _ ->
true
end
| `Is_current_dir -> is_current (basename fn)
| `Is_parent_dir -> is_parent (basename fn)
| `Basename_is bn -> (FilePath.compare (basename fn) bn) = 0
| `Dirname_is dn -> (FilePath.compare (dirname fn) dn) = 0
| `Custom f -> f fn
| `Stat e ->
begin
match stL_opt, e with
| Some _, `Exists -> true
| Some stL, `Kind knd -> stL.kind = knd
| Some stL, `Is_set_group_ID -> stL.permission.group.sticky
| Some stL, `Has_sticky_bit -> stL.permission.other.sticky
| Some stL, `Has_set_user_ID -> stL.permission.user.sticky
| Some stL, `Size (cmp, sz) ->
begin
let diff = size_compare stL.size sz in
match cmp with
| `Bigger -> diff > 0
| `Smaller -> diff < 0
| `Equal -> diff = 0
| `FuzzyEqual ->
(size_compare ~fuzzy:true stL.size sz) = 0
end
| Some stL, `Is_owned_by_user_ID uid -> uid = stL.owner
| Some stL, `Is_owned_by_group_ID gid -> gid = stL.group_owner
| Some stL, `Is_readable ->
let perm = stL.permission in
perm.user.read || perm.group.read || perm.other.read
| Some stL, `Is_writeable ->
let perm = stL.permission in
perm.user.write || perm.group.write || perm.other.write
| Some stL, `Is_exec ->
let perm = stL.permission in
perm.user.exec || perm.group.exec || perm.other.exec
| Some stL, `Newer dt -> stL.modification_time > dt
| Some stL, `Older dt -> stL.modification_time < dt
| None, _ -> false
end
| `Is_link ->
begin
match st_opt with
| Some st -> st.is_link
| None -> false
end
| `And (cflt1, cflt2) -> (eval cflt1) && (eval cflt2)
| `Or (cflt1, cflt2) -> (eval cflt1) || (eval cflt2)
| `Not cflt -> not (eval cflt)
in
eval cflt
let test ?match_compile tst =
let ctst = compile_filter ?match_compile tst in
fun fln -> ctst (solve_dirname fln)
let filter flt lst = List.filter (test flt) lst
let test_exists = test (Or(Exists, Is_link))
| (******************************************************************************)
(* ocaml-fileutils: files and filenames common operations *)
(* *)
(* Copyright (C) 2003-2014, Sylvain Le Gall *)
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
(* the Free Software Foundation; either version 2.1 of the License, or (at *)
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file *)
(* COPYING for more details. *)
(* *)
(* You should have received a copy of the GNU Lesser General Public License *)
(* along with this library; if not, write to the Free Software Foundation, *)
(* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *)
(******************************************************************************)
|
parsing_hacks_go.ml | open Common
open Parser_go
module Flag = Flag_parsing
module T = Parser_go
module TH = Token_helpers_go
module F = Ast_fuzzy
(*****************************************************************************)
(* Prelude *)
(*****************************************************************************)
(* The goal for this module is to retag tokens (e.g, a LBRACE in LBODY),
* or insert tokens (e.g., implicit semicolons) to help the grammar
* remains simple and unambiguous.
*
* See lang_cpp/parsing/parsing_hacks.ml for more information about
* this technique.
*)
(*****************************************************************************)
(* Types *)
(*****************************************************************************)
type env_lbody = InIfHeader | Normal
(*****************************************************************************)
(* Helpers *)
(*****************************************************************************)
(* alt: could have instead a better Ast_fuzzy type instead of putting
* everything in the Tok category?
*)
let is_identifier horigin (info : Parse_info.t) =
match Hashtbl.find_opt horigin info with
| Some (T.LNAME _) -> true
| _ -> false
(*****************************************************************************)
(* ASI *)
(*****************************************************************************)
let fix_tokens_asi xs =
let env = () in
let rec aux env xs =
match xs with
| [] -> []
(* ASI: automatic semicolon insertion, similar in Javascript *)
| (( LNAME _ | LINT _ | LFLOAT _ | LIMAG _ | LRUNE _ | LSTR _ | LBREAK _
| LCONTINUE _ | LFALL _ | LRETURN _ | LINC _ | LDEC _ | RPAREN _
| RBRACE _ | RBRACKET _
(* sgrep-ext: *)
| LDDD _ ) as x)
:: ((TCommentNewline ii | EOF ii) as y)
:: xs -> (
match (x, y, !Flag_parsing.sgrep_mode) with
(* do NOT ASI *)
(* sgrep-ext: only in sgrep-mode *)
| LDDD _, _, false
(* sgrep-ext: we don't want $X==$X to be transformed
* in $X==$X; in sgrep mode
*)
| _, EOF _, true ->
x :: y :: aux env xs
(* otherwise do ASI *)
| _ ->
let iifake = Parse_info.rewrap_str "FAKE ';'" ii in
(* implicit semicolon insertion *)
x :: LSEMICOLON iifake :: y :: aux env xs)
| x :: xs -> x :: aux env xs
in
aux env xs
(*****************************************************************************)
(* LBODY *)
(*****************************************************************************)
(* retagging:
* - '{' when part of a composite literal
* - '{' when composite literal in semgrep at toplevel
* - ':' when part of a keyval in semgrep at toplevel
*
* This is similar to what we do in parsing_hacks_js.ml to overcome
* some shift/reduce limitations by cheating and inventing new tokens.
*)
let fix_tokens_lbody toks =
try
let trees =
Lib_ast_fuzzy.mk_trees
{ Lib_ast_fuzzy.tokf = TH.info_of_tok; kind = TH.token_kind_of_tok }
toks
in
let horigin =
toks |> List.map (fun t -> (TH.info_of_tok t, t)) |> Common.hash_of_list
in
let retag_lbrace = Hashtbl.create 101 in
let retag_lbrace_semgrep = Hashtbl.create 1 in
let retag_lcolon_semgrep = Hashtbl.create 1 in
let retag_lparen_semgrep = Hashtbl.create 1 in
(match trees with
(* TODO: check that actually a composite literal in it? *)
| F.Braces (t1, _body, _) :: _ when !Flag_parsing.sgrep_mode ->
Hashtbl.add retag_lbrace_semgrep t1 true
(* no way it's a label *)
| F.Tok (_s, info) :: F.Tok (":", t2) :: _
when !Flag_parsing.sgrep_mode && is_identifier horigin info ->
Hashtbl.add retag_lcolon_semgrep t2 true
(* TODO: could check that xs looks like a parameter list
* TODO what comes after Parens could be a symbol part of a type
* instead of just a single type like 'int'?
*)
| F.Tok (_s, info) :: F.Parens (l, _xs, _r) :: F.Tok (_s2, info2) :: _
when !Flag_parsing.sgrep_mode && is_identifier horigin info
&& is_identifier horigin info2 ->
Hashtbl.add retag_lparen_semgrep l true
| _ -> ());
let rec aux env trees =
match trees with
| [] -> ()
(* if func(...) bool { return ... }(...) { ... } *)
| F.Braces (_lb1, xs1, _rb1)
:: F.Parens (_lb2, xs2, _rb2)
:: F.Braces (lb3, xs3, _rb3)
:: ys
when env =*= InIfHeader ->
Hashtbl.add retag_lbrace lb3 true;
aux Normal xs1;
xs2
|> List.iter (function
| Left xs -> aux Normal xs
| Right _ -> ());
aux Normal xs3;
aux Normal ys (* for a := struct {...} { ... } { ... } *)
| F.Tok (("struct" | "interface"), _)
:: F.Braces (_lb1, xs1, _rb1)
:: F.Braces (_lb2, xs2, _rb2)
:: F.Braces (lb3, xs3, _rb3)
:: ys
when env =*= InIfHeader ->
Hashtbl.add retag_lbrace lb3 true;
aux Normal xs1;
aux Normal xs2;
aux Normal xs3;
aux Normal ys
(* must be after previous case *)
(* skipping: if ok := interface{} ... *)
| F.Tok (("struct" | "interface"), _) :: F.Braces (_lb1, xs1, _rb1) :: ys
when env =*= InIfHeader ->
aux Normal xs1;
aux env ys
(* for a := range []int{...} { ... } *)
| F.Braces (_lb1, xs1, _rb1) :: F.Braces (lb2, xs2, _rb2) :: ys
when env =*= InIfHeader ->
Hashtbl.add retag_lbrace lb2 true;
aux Normal xs1;
aux Normal xs2;
aux Normal ys (* False Positive (FP): for ... {}[...] *)
| F.Braces (_lb, xs, _rb) :: F.Bracket (_, ys, _) :: zs
when env =*= InIfHeader ->
aux Normal xs;
aux Normal ys;
aux env zs
(* False Positive (FP): if ... {}; ... { *)
| F.Braces (_lb, xs, _rb) :: F.Tok (";", _) :: zs when env =*= InIfHeader
->
aux Normal xs;
aux env zs
| F.Braces (lb, xs, _rb) :: ys ->
(* for ... { ... } *)
if env =*= InIfHeader then Hashtbl.add retag_lbrace lb true;
aux Normal xs;
aux Normal ys
| F.Tok (("if" | "for" | "switch" | "select"), _) :: xs ->
aux InIfHeader xs
| x :: xs ->
(match x with
| F.Parens (_, xs, _) ->
xs
|> List.iter (function
| Left trees -> aux Normal trees
| Right _comma -> ())
| _ -> ());
aux env xs
in
aux Normal trees;
(* use the tagged information and transform tokens *)
toks
|> List.map (function
| T.LBRACE info when Hashtbl.mem retag_lbrace info -> T.LBODY info
| T.LBRACE info when Hashtbl.mem retag_lbrace info -> T.LBODY info
| T.LBRACE info when Hashtbl.mem retag_lbrace_semgrep info ->
T.LBRACE_SEMGREP info
| T.LCOLON info when Hashtbl.mem retag_lcolon_semgrep info ->
T.LCOLON_SEMGREP info
| T.LPAREN info when Hashtbl.mem retag_lparen_semgrep info ->
T.LPAREN_SEMGREP info
| x -> x)
with
| Lib_ast_fuzzy.Unclosed (msg, info) ->
if !Flag.error_recovery then toks
else raise (Parse_info.Lexical_error (msg, info))
(*****************************************************************************)
(* Entry point *)
(*****************************************************************************)
let fix_tokens xs =
let xs = fix_tokens_asi xs in
fix_tokens_lbody xs
| (* Yoann Padioleau
*
* Copyright (C) 2019 r2c
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
* license.txt for more details.
*
*) |
attr8.c |
void foo(char * x) __attribute__((__volatile__));
void foo(char * x) {
while(1) { ; }
}
int main(int argc, char **argv) {
foo(0);
return 0;
}
| |
timedesc_tzdb.ml |
module M = Map.Make (String)
type entry = {
is_dst : bool;
offset : int;
}
type table =
(int64, Bigarray.int64_elt, Bigarray.c_layout) Bigarray.Array1.t * entry array
let db = None
let compressed = Some Tzdb_compressed.s
| |
frx_mem.mli | val init : unit -> unit
(* [init ()] creates the gauge and its updater, but keeps it iconified *)
val f : unit -> unit
(* [f ()] makes the gauge visible if it has not been destroyed *)
| (***********************************************************************)
(* *)
(* MLTk, Tcl/Tk interface of OCaml *)
(* *)
(* Francois Rouaix, Francois Pessaux, Jun Furuse and Pierre Weis *)
(* projet Cristal, INRIA Rocquencourt *)
(* Jacques Garrigue, Kyoto University RIMS *)
(* *)
(* Copyright 2002 Institut National de Recherche en Informatique et *)
(* en Automatique and Kyoto University. All rights reserved. *)
(* This file is distributed under the terms of the GNU Library *)
(* General Public License, with the special exception on linking *)
(* described in file LICENSE found in the OCaml source tree. *)
(* *)
(***********************************************************************)
(* A Garbage Collector Gauge for OCaml *)
|
test_float.ml |
open! Core
open Poly
open! Import
let%expect_test "[Pervasives.float_of_string] supports underscores" =
print_endline (Caml.string_of_float (Caml.float_of_string "1_234.567_8"));
[%expect {|
1234.5678 |}]
;;
let%expect_test "[Sexp.of_float_style] is respected by the various names for [float]" =
let f = 1234.5678 in
let print () =
print_s [%sexp (f : float)];
print_s [%sexp (f : Float.t)];
print_s [%sexp (f : Core.Core_stable.float)]
in
print ();
[%expect {|
1234.5678
1234.5678
1234.5678 |}];
Ref.set_temporarily Sexp.of_float_style `Underscores ~f:print;
[%expect {|
1_234.5678
1_234.5678
1_234.5678 |}]
;;
let%expect_test "[Sexp.of_float_style = `Underscores]" =
let check f =
let sexp style =
Ref.set_temporarily Sexp.of_float_style style ~f:(fun () -> [%sexp (f : float)])
in
print_s [%sexp (sexp `No_underscores : Sexp.t), (sexp `Underscores : Sexp.t)];
if not (Float.is_nan f)
then require [%here] (Float.equal f (sexp `Underscores |> [%of_sexp: Float.t]))
in
List.iter
[ 0.
; Float.min_positive_subnormal_value
; Float.min_positive_normal_value
; 1E-7
; 1.
; 12.
; 123.
; 1234.
; 12345.
; 1234E100
; Float.max_value
; Float.nan
]
~f:(fun f ->
check f;
check (-.f));
[%expect
{|
(0 0)
(-0 -0)
(4.94065645841247E-324 4.94065645841247E-324)
(-4.94065645841247E-324 -4.94065645841247E-324)
(2.2250738585072014E-308 2.2250738585072014E-308)
(-2.2250738585072014E-308 -2.2250738585072014E-308)
(1E-07 1E-07)
(-1E-07 -1E-07)
(1 1)
(-1 -1)
(12 12)
(-12 -12)
(123 123)
(-123 -123)
(1234 1_234)
(-1234 -1_234)
(12345 12_345)
(-12345 -12_345)
(1.234E+103 1.234E+103)
(-1.234E+103 -1.234E+103)
(INF INF)
(-INF -INF)
(NAN NAN)
({-,}NAN {-,}NAN) (glob) |}]
;;
let%test_unit "round_nearest_half_to_even quickcheck" =
Quickcheck.test ~trials:200 (Int.gen_incl (-100_000_000) 100_000_000) ~f:(fun i ->
let x = float i /. 10. in
let y = Float.round_nearest_half_to_even x in
let f = Float.round_nearest x in
let is_tie = Int.( % ) i 10 = 5 in
assert (
(is_tie && Float.mod_float y 2. = 0. && Float.abs (y -. x) = 0.5)
|| ((not is_tie) && y = f));
let x' = Float.one_ulp `Up x in
let x'' = Float.one_ulp `Down x in
assert (Float.round_nearest_half_to_even x' = Float.round_nearest x');
assert (Float.round_nearest_half_to_even x'' = Float.round_nearest x''))
;;
let%expect_test "robust_sign" =
let test n = print_s [%sexp (Float.robust_sign n : Sign.t)] in
test 1e-6;
[%expect "Pos"];
test 1e-8;
[%expect "Zero"];
test (-1e-6);
[%expect "Neg"];
test (-1e-8);
[%expect "Zero"];
test (-0.);
[%expect "Zero"];
test 0.;
[%expect "Zero"];
test Float.neg_infinity;
[%expect "Neg"];
(* preserve this old behavior of [sign] *)
test Float.nan;
[%expect "Zero"]
;;
(* Make sure float comparison didn't accidentally get redefined using [compare]. *)
let%test _ = not (Float.( < ) Float.nan 0.)
(* When we put a similar in base/test, it doesn't behave the same, and undesirable
versions of [Float.is_positive] that allocate when we put the test here don't allocate
when we put the test there. So, we put the test here. *)
let%expect_test (_ [@tags "64-bits-only", "x-library-inlining-sensitive"]) =
let a = [| 1. |] in
(* a.(0) is unboxed *)
let one = 1. in
(* [one] is boxed *)
ignore (require_no_allocation [%here] (fun () -> Float.( > ) a.(0) 0.) : bool);
[%expect {| |}];
ignore (require_no_allocation [%here] (fun () -> Float.compare a.(0) 0. > 0) : bool);
[%expect {| |}];
ignore (require_no_allocation [%here] (fun () -> Float.is_positive a.(0)) : bool);
[%expect {| |}];
ignore (require_no_allocation [%here] (fun () -> Float.is_positive one) : bool);
[%expect {| |}]
;;
let%test_module "round_significant" =
(module struct
let round_significant = Float.round_significant
let%test_unit "key values" =
[%test_result: float]
(round_significant ~significant_digits:3 0.0045678)
~expect:0.00457;
[%test_result: float]
(round_significant ~significant_digits:3 123456.)
~expect:123000.;
[%test_result: float] (round_significant ~significant_digits:3 0.) ~expect:0.;
[%test_result: float]
(round_significant ~significant_digits:3 Float.nan)
~expect:Float.nan;
[%test_result: float]
(round_significant ~significant_digits:3 Float.infinity)
~expect:Float.infinity;
[%test_result: float]
(round_significant ~significant_digits:3 Float.neg_infinity)
~expect:Float.neg_infinity;
[%test_result: float]
(round_significant ~significant_digits:1 (-5.85884163457842E+100))
~expect:(-6E+100);
[%test_result: float]
(round_significant ~significant_digits:16 (-129361178280336660.))
~expect:(-129361178280336700.);
(* An example where it appears like we don't round to even (since the argument is
under-represented as a float). *)
[%test_result: float]
(round_significant ~significant_digits:11 4.36083208835)
~expect:4.3608320883
;;
let%test_unit ("round_significant vs sprintf quickcheck 1" [@tags "64-bits-only"]) =
for significant_digits = 1 to 16 do
let open Quickcheck in
test
Float.gen_without_nan
~trials:10_000
~sexp_of:(fun float -> [%message "" (float : float) (significant_digits : int)])
~f:(fun x ->
let s = sprintf "%.*g" significant_digits x |> Float.of_string in
assert (
s = round_significant ~significant_digits x
|| s = round_significant ~significant_digits (Float.one_ulp `Up x)
|| s = round_significant ~significant_digits (Float.one_ulp `Down x)))
done
;;
let%test_unit ("round_significant vs sprintf quickcheck 2" [@tags "64-bits-only"]) =
(* this test is much more likely to exercise cases when we're off by an ulp *)
let num_digits_gen = Int.gen_incl 1 18 in
let digits_gen num_digits =
let x = Int63.(pow (of_int 10) (of_int num_digits) - of_int 1) in
Int63.gen_incl Int63.(~-x) x
in
let scale_gen = Int.gen_incl (-20) 20 in
let sf_gen = Int.gen_incl 1 18 in
Quickcheck.test
~trials:1000
(Quickcheck.Generator.tuple3
(Quickcheck.Generator.bind num_digits_gen ~f:digits_gen)
scale_gen
sf_gen)
~f:(fun (digits, scale, sf) ->
let x =
if scale > 0
then Int63.to_float digits *. (10. ** float scale)
else Int63.to_float digits /. (10. ** float (-scale))
in
let r = round_significant ~significant_digits:sf x in
let r1 = round_significant ~significant_digits:sf (Float.one_ulp `Up x) in
let r2 = round_significant ~significant_digits:sf (Float.one_ulp `Down x) in
let s = sprintf "%.*g" sf x |> Float.of_string in
assert (s = r || s = r1 || s = r2))
;;
let%test "0 significant digits" =
Exn.does_raise (fun () ->
ignore (round_significant ~significant_digits:0 1.3 : float))
;;
end)
;;
let%test_module "round_decimal" =
(module struct
let round_decimal = Float.round_decimal
let%test_unit "key values" =
[%test_result: float] (round_decimal ~decimal_digits:3 0.0045678) ~expect:0.005;
[%test_result: float] (round_decimal ~decimal_digits:0 0.0045678) ~expect:0.;
[%test_result: float] (round_decimal ~decimal_digits:0 1.0045678) ~expect:1.;
[%test_result: float] (round_decimal ~decimal_digits:3 123456.) ~expect:123456.;
[%test_result: float] (round_decimal ~decimal_digits:(-3) 123456.) ~expect:123000.;
[%test_result: float] (round_decimal ~decimal_digits:3 0.) ~expect:0.;
[%test_result: float] (round_decimal ~decimal_digits:3 Float.nan) ~expect:Float.nan;
[%test_result: float]
(round_decimal ~decimal_digits:3 Float.infinity)
~expect:Float.infinity;
[%test_result: float]
(round_decimal ~decimal_digits:3 Float.neg_infinity)
~expect:Float.neg_infinity;
[%test_result: float]
(round_decimal ~decimal_digits:(-100) (-5.85884163457842E+100))
~expect:(-6E+100);
[%test_result: float]
(round_decimal ~decimal_digits:0 (-129361178280336660.))
~expect:(-129361178280336660.);
[%test_result: float]
(round_decimal ~decimal_digits:(-2) (-129361178280336660.))
~expect:(-129361178280336700.);
[%test_result: float]
(round_decimal ~decimal_digits:10 4.36083208835)
~expect:4.3608320883
;;
let%test_unit ("round_decimal vs sprintf quickcheck 1" [@tags "64-bits-only"]) =
for decimal_digits = 1 to 16 do
let open Quickcheck in
test
Float.gen_without_nan
~trials:10_000
~sexp_of:(fun float -> [%message "" (float : float) (decimal_digits : int)])
~f:(fun x ->
let s = sprintf "%.*f" decimal_digits x |> Float.of_string in
assert (
s = round_decimal ~decimal_digits x
|| s = round_decimal ~decimal_digits (Float.one_ulp `Up x)
|| s = round_decimal ~decimal_digits (Float.one_ulp `Down x)))
done
;;
end)
;;
open! Float
let test_class quickcheck_generator expect =
Quickcheck.test quickcheck_generator ~f:(fun float ->
let actual = classify float in
if not (Int.equal (Class.compare actual expect) 0)
then
raise_s
[%message
"generator produced float in wrong class"
(float : t)
(expect : Class.t)
(actual : Class.t)])
;;
let%test_unit _ = test_class gen_zero Zero
let%test_unit _ = test_class gen_subnormal Subnormal
let%test_unit _ = test_class gen_normal Normal
let%test_unit _ = test_class gen_infinite Infinite
let%test_unit _ = test_class gen_nan Nan
(* Additional tests of Base.Float requiring the Gc module *)
let%test (_ [@tags "64-bits-only"]) =
let before = Gc.minor_words () in
assert (Int63.equal (int63_round_nearest_exn 0.8) (Int63.of_int_exn 1));
let after = Gc.minor_words () in
Int.equal before after
;;
let%test_unit "Float.validate_positive doesn't allocate on success" =
let initial_words = Gc.minor_words () in
let (_ : Validate.t) = validate_positive 1. in
let allocated = Int.( - ) (Gc.minor_words ()) initial_words in
[%test_result: int] allocated ~expect:0
;;
let%test_module _ =
(module struct
let check v expect =
match Validate.result v, expect with
| Ok (), `Ok | Error _, `Error -> ()
| r, expect ->
raise_s [%message "mismatch" (r : unit Or_error.t) (expect : [ `Ok | `Error ])]
;;
let%test_unit _ = check (validate_lbound ~min:(Incl 0.) nan) `Error
let%test_unit _ = check (validate_lbound ~min:(Incl 0.) infinity) `Error
let%test_unit _ = check (validate_lbound ~min:(Incl 0.) neg_infinity) `Error
let%test_unit _ = check (validate_lbound ~min:(Incl 0.) (-1.)) `Error
let%test_unit _ = check (validate_lbound ~min:(Incl 0.) 0.) `Ok
let%test_unit _ = check (validate_lbound ~min:(Incl 0.) 1.) `Ok
let%test_unit _ = check (validate_ubound ~max:(Incl 0.) nan) `Error
let%test_unit _ = check (validate_ubound ~max:(Incl 0.) infinity) `Error
let%test_unit _ = check (validate_ubound ~max:(Incl 0.) neg_infinity) `Error
let%test_unit _ = check (validate_ubound ~max:(Incl 0.) (-1.)) `Ok
let%test_unit _ = check (validate_ubound ~max:(Incl 0.) 0.) `Ok
let%test_unit _ = check (validate_ubound ~max:(Incl 0.) 1.) `Error
end)
;;
| |
dune |
(mdx)
| |
skip_list_benchmarks.ml | (** This module provides benchmarks for skip list operations for basis = 4. *)
open Protocol
module Skip_list = Skip_list_repr.Make (struct
(** The benchmarks must be run again if [basis] is changed. *)
let basis = 4
end)
let ns = Namespace.make Registration_helpers.ns "skip_list"
let fv s = Free_variable.of_namespace (ns s)
(** Benchmark for the [Skip_list_repr.next] function. It is used for estimating
the parameters for [Skip_list_cost_model.model_next]. *)
module Next : Benchmark.S = struct
include Skip_list
let name = ns "next"
let info = "Benchmark for Skip_list_repr.next"
let tags = ["skip_list"]
type config = {max_items : int}
let default_config = {max_items = 10000}
let config_encoding =
let open Data_encoding in
conv (fun {max_items} -> max_items) (fun max_items -> {max_items}) int31
type workload = int
let workload_encoding = Data_encoding.int31
let workload_to_vector len =
Sparse_vec.String.of_list [("len", float_of_int @@ len)]
let next_model =
let conv x = (x, ()) in
Model.make ~conv ~model:(Model.logn ~coeff:(fv "len_coeff"))
let models = [("skip_list_next", next_model)]
let create_skip_list_of_len len =
let rec go n cell =
if n = 0 then cell
else go (pred n) @@ next ~prev_cell:cell ~prev_cell_ptr:() ()
in
go len (genesis ())
let create_benchmarks ~rng_state ~bench_num ({max_items} : config) =
List.repeat bench_num @@ fun () ->
let workload =
Base_samplers.sample_in_interval
rng_state
~range:{min = 0; max = max_items}
in
let prev_cell = create_skip_list_of_len workload in
let prev_cell_ptr = () in
let closure () = ignore (next ~prev_cell ~prev_cell_ptr ()) in
Generator.Plain {workload; closure}
let () =
Registration.register_for_codegen
(Namespace.basename name)
(Model.For_codegen next_model)
end
(** Benchmark for the [Sc_rollup_inbox_repr.hash_skip_list_cell]
function. It is used for estimating the parameters for
[Skip_list_cost_model.model_hash_cell]. The model estimates hashing
a skip_list cell content and all its back pointers. *)
module Hash_cell = struct
let name = ns "hash_cell"
let info = "Estimating the costs of hashing a skip list cell"
let tags = ["skip_list"]
include Skip_list
module Hash = Sc_rollup_inbox_repr.Hash
let hash merkelized =
let payload_hash = Skip_list.content merkelized in
let back_pointers_hashes = Skip_list.back_pointers merkelized in
Hash.to_bytes payload_hash :: List.map Hash.to_bytes back_pointers_hashes
|> Hash.hash_bytes
type config = {max_index : int}
let config_encoding =
let open Data_encoding in
conv
(fun {max_index} -> max_index)
(fun max_index -> {max_index})
(obj1 (req "max_index" int31))
let default_config = {max_index = 65536}
type workload = {nb_backpointers : int}
let workload_encoding =
let open Data_encoding in
conv
(fun {nb_backpointers} -> nb_backpointers)
(fun nb_backpointers -> {nb_backpointers})
(obj1 (req "max_nb_backpointers" int31))
let workload_to_vector {nb_backpointers} =
Sparse_vec.String.of_list
[("nb_backpointers", float_of_int nb_backpointers)]
let hash_skip_list_cell_model =
Model.make
~conv:(fun {nb_backpointers} -> (nb_backpointers, ()))
~model:
(Model.affine
~intercept:(Free_variable.of_string "cost_hash_skip_list_cell")
~coeff:(Free_variable.of_string "cost_hash_skip_list_cell_coef"))
let models = [("skip_list_hash", hash_skip_list_cell_model)]
let benchmark rng_state conf () =
let skip_list_len =
Base_samplers.sample_in_interval
~range:{min = 1; max = conf.max_index}
rng_state
in
let random_hash () =
Hash.hash_string
[Base_samplers.string ~size:{min = 32; max = 32} rng_state]
in
let cell =
let rec repeat n cell =
if n = 0 then cell
else
let prev_cell = cell and prev_cell_ptr = hash cell in
repeat
(n - 1)
(Skip_list.next ~prev_cell ~prev_cell_ptr (random_hash ()))
in
repeat skip_list_len (Skip_list.genesis (random_hash ()))
in
let nb_backpointers = List.length (Skip_list.back_pointers cell) in
let workload = {nb_backpointers} in
let closure () = ignore (hash cell) in
Generator.Plain {workload; closure}
let create_benchmarks ~rng_state ~bench_num config =
List.repeat bench_num (benchmark rng_state config)
let () =
Registration.register_for_codegen
(Namespace.basename name)
(Model.For_codegen hash_skip_list_cell_model)
end
let () = Registration_helpers.register (module Next)
let () = Registration_helpers.register (module Hash_cell)
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2022 Nomadic Labs, <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
alpha_context.ml | type t = Raw_context.t
type context = t
module type BASIC_DATA = sig
type t
include Compare.S with type t := t
val encoding: t Data_encoding.t
val pp: Format.formatter -> t -> unit
end
module Tez = Tez_repr
module Period = Period_repr
module Timestamp = struct
include Time_repr
let current = Raw_context.current_timestamp
end
include Operation_repr
module Operation = struct
type 'kind t = 'kind operation = {
shell: Operation.shell_header ;
protocol_data: 'kind protocol_data ;
}
type packed = packed_operation
let unsigned_encoding = unsigned_operation_encoding
include Operation_repr
end
module Block_header = Block_header_repr
module Vote = struct
include Vote_repr
include Vote_storage
end
module Raw_level = Raw_level_repr
module Cycle = Cycle_repr
module Script_int = Script_int_repr
module Script_timestamp = struct
include Script_timestamp_repr
let now ctxt =
Raw_context.current_timestamp ctxt
|> Timestamp.to_seconds
|> of_int64
end
module Script = struct
include Michelson_v1_primitives
include Script_repr
let force_decode ctxt lexpr =
Lwt.return
(Script_repr.force_decode lexpr >>? fun (v, cost) ->
Raw_context.consume_gas ctxt cost >|? fun ctxt ->
(v, ctxt))
let force_bytes ctxt lexpr =
Lwt.return
(Script_repr.force_bytes lexpr >>? fun (b, cost) ->
Raw_context.consume_gas ctxt cost >|? fun ctxt ->
(b, ctxt))
end
module Fees = Fees_storage
type public_key = Signature.Public_key.t
type public_key_hash = Signature.Public_key_hash.t
type signature = Signature.t
module Constants = struct
include Constants_repr
include Constants_storage
end
module Voting_period = Voting_period_repr
module Gas = struct
include Gas_limit_repr
type error += Gas_limit_too_high = Raw_context.Gas_limit_too_high
let check_limit = Raw_context.check_gas_limit
let set_limit = Raw_context.set_gas_limit
let set_unlimited = Raw_context.set_gas_unlimited
let consume = Raw_context.consume_gas
let check_enough = Raw_context.check_enough_gas
let level = Raw_context.gas_level
let consumed = Raw_context.gas_consumed
let block_level = Raw_context.block_gas_level
end
module Level = struct
include Level_repr
include Level_storage
end
module Contract = struct
include Contract_repr
include Contract_storage
let originate c contract ~balance ~manager ?script ~delegate
~spendable ~delegatable =
originate c contract ~balance ~manager ?script ~delegate
~spendable ~delegatable
let init_origination_nonce = Raw_context.init_origination_nonce
let unset_origination_nonce = Raw_context.unset_origination_nonce
end
module Delegate = Delegate_storage
module Roll = struct
include Roll_repr
include Roll_storage
end
module Nonce = Nonce_storage
module Seed = struct
include Seed_repr
include Seed_storage
end
module Fitness = struct
include Fitness_repr
include Fitness
type fitness = t
include Fitness_storage
end
module Bootstrap = Bootstrap_storage
module Commitment = struct
include Commitment_repr
include Commitment_storage
end
module Global = struct
let get_last_block_priority = Storage.Last_block_priority.get
let set_last_block_priority = Storage.Last_block_priority.set
end
let prepare_first_block = Init_storage.prepare_first_block
let prepare = Init_storage.prepare
let finalize ?commit_message:message c =
let fitness = Fitness.from_int64 (Fitness.current c) in
let context = Raw_context.recover c in
{ Updater.context ; fitness ; message ; max_operations_ttl = 60 ;
last_allowed_fork_level =
Raw_level.to_int32 @@ Level.last_allowed_fork_level c;
}
let activate = Raw_context.activate
let fork_test_chain = Raw_context.fork_test_chain
let record_endorsement = Raw_context.record_endorsement
let allowed_endorsements = Raw_context.allowed_endorsements
let init_endorsements = Raw_context.init_endorsements
let reset_internal_nonce = Raw_context.reset_internal_nonce
let fresh_internal_nonce = Raw_context.fresh_internal_nonce
let record_internal_nonce = Raw_context.record_internal_nonce
let internal_nonce_already_recorded = Raw_context.internal_nonce_already_recorded
let add_deposit = Raw_context.add_deposit
let add_fees = Raw_context.add_fees
let add_rewards = Raw_context.add_rewards
let get_deposits = Raw_context.get_deposits
let get_fees = Raw_context.get_fees
let get_rewards = Raw_context.get_rewards
let description = Raw_context.description
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
bignum_bench.mli | (*_ Deliberately empty. *)
| (*_ Deliberately empty. *)
|
context.mli |
(**
This module allows the creation of Sapling transactions: shield, unshield and
transfer.
Because Sapling uses an UTXO model, it is necessary for the client to
maintain locally the set of unspent outputs for each viewing key, for each
smart contract. This operation is called scanning.
This local cache is updated downloading from the node only the difference
from the last scanned state.
*)
open Tezos_sapling.Core.Client
module Tez : module type of Protocol.Alpha_context.Tez
(** This module is used to represent any shielded token to avoid confusing it
with Tez. *)
module Shielded_tez : sig
type t
val encoding : t Data_encoding.t
val pp : Format.formatter -> t -> unit
val zero : t
val of_mutez : int64 -> t option
val to_mutez : t -> int64
val of_tez : Tez.t -> t
val ( +? ) : t -> t -> t tzresult
end
(** Convert a Sapling transaction to a suitable argument for the Smart Contract. *)
val sapling_transaction_as_arg : UTXO.transaction -> string
(** Account corresponding to a contract and a viewing key *)
module Account : sig
type t
val balance : t -> Shielded_tez.t
val pp_unspent : Format.formatter -> t -> unit
end
(** State of a contract, potentially involving several viewing keys *)
module Contract_state : sig
type t
val find_account : Viewing_key.t -> t -> Account.t option
end
module Client_state : sig
type t
val find :
Protocol_client_context.full ->
Protocol.Alpha_context.Contract.t ->
t ->
Contract_state.t tzresult Lwt.t
val register :
Protocol_client_context.full ->
force:bool ->
default_memo_size:int option ->
Protocol.Alpha_context.Contract.t ->
Viewing_key.t ->
unit tzresult Lwt.t
(** Synchronise our local state with the blockchain's.
The state must be recent enough to craft correct transactions.
The limit enforced by the protocol if 120 blocks.
Also scans, ie. checks for incoming payments and add
them to our balance.
**)
val sync_and_scan :
Protocol_client_context.full ->
Protocol.Alpha_context.Contract.t ->
Contract_state.t tzresult Lwt.t
end
(** [shield ~message ~dst tez cstate anti-replay] returns a transaction
shielding [tez] tez to a sapling address [dst] using a sapling
storage [cstate] and the anti-replay string. *)
val shield :
#Client_context.full ->
dst:Viewing_key.address ->
?message:bytes ->
Tez.t ->
Contract_state.t ->
string ->
UTXO.transaction tzresult Lwt.t
(** [unshield ~src_name ~src ~dst ~backdst stez cstate storage] returns
a transaction unshielding [stez] shielded tokens from a sapling wallet
[src] to a transparent tezos address [dst], sending the change back to
[backdst] and using a Sapling storage [cstate] and a anti-replay string.
The transaction is refused if there is an insufficient amount of shielded
tez in the wallet [src], the error is raised with [src_name].
*)
val unshield :
src:Spending_key.t ->
bound_data:string ->
backdst:Viewing_key.address ->
Shielded_tez.t ->
Contract_state.t ->
string ->
UTXO.transaction tzresult
(** [transfer ~message ~src ~dst ~backdst amount cstate anti-replay] creates a
Sapling transaction of [amount] shielded tez from Sapling wallet [src] to
Sapling address [dst], sending the change to [backdst], using a Sapling
storage [cstate] and a anti-replay string.
[~message] is a message that will be uploaded encrypted on chain. *)
val transfer :
#Client_context.full ->
src:Spending_key.t ->
dst:Viewing_key.address ->
backdst:Viewing_key.address ->
?message:bytes ->
Shielded_tez.t ->
Contract_state.t ->
string ->
UTXO.transaction tzresult Lwt.t
| (* The MIT License (MIT)
*
* Copyright (c) 2019-2020 Nomadic Labs <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE. *) |
dune |
(rule
(action (write-file b "It should work as well!")))
(alias
(name infer)
(deps b)
(action (cat %{deps})))
| |
tests_common.ml |
(* Functions for test stub generation. *)
open Ctypes
let filenames argv =
let usage = "arguments: [--ml-file $filename] [--c-file $filename]" in
let ml_filename = ref ""
and c_filename = ref ""
and c_struct_filename = ref "" in
let spec = Arg.([("--ml-file",
Set_string ml_filename, "ML filename");
("--c-file",
Set_string c_filename, "C filename");
("--c-struct-file",
Set_string c_struct_filename, "C struct filename");]) in
let no_positional_args _ =
prerr_endline "No positional arguments" in
begin
Arg.parse spec no_positional_args usage;
(!ml_filename, !c_filename, !c_struct_filename)
end
module Foreign_binder : Cstubs.FOREIGN
with type 'a result = 'a
and type 'a return = 'a =
struct
type 'a fn = 'a Ctypes.fn
type 'a return = 'a
let (@->) = Ctypes.(@->)
let returning = Ctypes.returning
type 'a result = 'a
let foreign name fn = Foreign.foreign name fn
let foreign_value name fn = Foreign.foreign_value name fn
end
module type STUBS = functor (F : Cstubs.FOREIGN) -> sig end
let with_open_formatter filename f =
let out = open_out filename in
let fmt = Format.formatter_of_out_channel out in
let close_channel () = close_out out in
try
let rv = f fmt in
close_channel ();
rv
with e ->
close_channel ();
raise e
let header = "#include \"clib/test_functions.h\""
let run ?concurrency ?errno ?(cheader="") argv ?structs specs =
let ml_filename, c_filename, c_struct_filename = filenames argv
in
if ml_filename <> "" then
with_open_formatter ml_filename
(fun fmt -> Cstubs.write_ml ?concurrency ?errno
fmt ~prefix:"cstubs_tests" specs);
if c_filename <> "" then
with_open_formatter c_filename
(fun fmt ->
Format.fprintf fmt "%s@\n%s@\n" header cheader;
Cstubs.write_c ?concurrency ?errno
fmt ~prefix:"cstubs_tests" specs);
begin match structs, c_struct_filename with
| None, _ -> ()
| Some _, "" -> ()
| Some specs, c_filename ->
with_open_formatter c_filename
(fun fmt ->
Format.fprintf fmt "%s@\n%s@\n" header cheader;
Cstubs_structs.write_c fmt specs)
end
| (*
* Copyright (c) 2014 Jeremy Yallop.
*
* This file is distributed under the terms of the MIT License.
* See the file LICENSE for details.
*) |
tty.c |
#include <assert.h>
#include <io.h>
#include <string.h>
#include <stdlib.h>
#if defined(_MSC_VER) && _MSC_VER < 1600
# include "uv/stdint-msvc2008.h"
#else
# include <stdint.h>
#endif
#ifndef COMMON_LVB_REVERSE_VIDEO
# define COMMON_LVB_REVERSE_VIDEO 0x4000
#endif
#include "uv.h"
#include "internal.h"
#include "handle-inl.h"
#include "stream-inl.h"
#include "req-inl.h"
#ifndef InterlockedOr
# define InterlockedOr _InterlockedOr
#endif
#define UNICODE_REPLACEMENT_CHARACTER (0xfffd)
#define ANSI_NORMAL 0x0000
#define ANSI_ESCAPE_SEEN 0x0002
#define ANSI_CSI 0x0004
#define ANSI_ST_CONTROL 0x0008
#define ANSI_IGNORE 0x0010
#define ANSI_IN_ARG 0x0020
#define ANSI_IN_STRING 0x0040
#define ANSI_BACKSLASH_SEEN 0x0080
#define ANSI_EXTENSION 0x0100
#define ANSI_DECSCUSR 0x0200
#define MAX_INPUT_BUFFER_LENGTH 8192
#define MAX_CONSOLE_CHAR 8192
#ifndef ENABLE_VIRTUAL_TERMINAL_PROCESSING
#define ENABLE_VIRTUAL_TERMINAL_PROCESSING 0x0004
#endif
#define CURSOR_SIZE_SMALL 25
#define CURSOR_SIZE_LARGE 100
static void uv__tty_capture_initial_style(
CONSOLE_SCREEN_BUFFER_INFO* screen_buffer_info,
CONSOLE_CURSOR_INFO* cursor_info);
static void uv__tty_update_virtual_window(CONSOLE_SCREEN_BUFFER_INFO* info);
static int uv__cancel_read_console(uv_tty_t* handle);
/* Null uv_buf_t */
static const uv_buf_t uv_null_buf_ = { 0, NULL };
enum uv__read_console_status_e {
NOT_STARTED,
IN_PROGRESS,
TRAP_REQUESTED,
COMPLETED
};
static volatile LONG uv__read_console_status = NOT_STARTED;
static volatile LONG uv__restore_screen_state;
static CONSOLE_SCREEN_BUFFER_INFO uv__saved_screen_state;
/*
* The console virtual window.
*
* Normally cursor movement in windows is relative to the console screen buffer,
* e.g. the application is allowed to overwrite the 'history'. This is very
* inconvenient, it makes absolute cursor movement pretty useless. There is
* also the concept of 'client rect' which is defined by the actual size of
* the console window and the scroll position of the screen buffer, but it's
* very volatile because it changes when the user scrolls.
*
* To make cursor movement behave sensibly we define a virtual window to which
* cursor movement is confined. The virtual window is always as wide as the
* console screen buffer, but it's height is defined by the size of the
* console window. The top of the virtual window aligns with the position
* of the caret when the first stdout/err handle is created, unless that would
* mean that it would extend beyond the bottom of the screen buffer - in that
* that case it's located as far down as possible.
*
* When the user writes a long text or many newlines, such that the output
* reaches beyond the bottom of the virtual window, the virtual window is
* shifted downwards, but not resized.
*
* Since all tty i/o happens on the same console, this window is shared
* between all stdout/stderr handles.
*/
static int uv_tty_virtual_offset = -1;
static int uv_tty_virtual_height = -1;
static int uv_tty_virtual_width = -1;
/* The console window size
* We keep this separate from uv_tty_virtual_*. We use those values to only
* handle signalling SIGWINCH
*/
static HANDLE uv__tty_console_handle = INVALID_HANDLE_VALUE;
static int uv__tty_console_height = -1;
static int uv__tty_console_width = -1;
static HANDLE uv__tty_console_resized = INVALID_HANDLE_VALUE;
static uv_mutex_t uv__tty_console_resize_mutex;
static DWORD WINAPI uv__tty_console_resize_message_loop_thread(void* param);
static void CALLBACK uv__tty_console_resize_event(HWINEVENTHOOK hWinEventHook,
DWORD event,
HWND hwnd,
LONG idObject,
LONG idChild,
DWORD dwEventThread,
DWORD dwmsEventTime);
static DWORD WINAPI uv__tty_console_resize_watcher_thread(void* param);
static void uv__tty_console_signal_resize(void);
/* We use a semaphore rather than a mutex or critical section because in some
cases (uv__cancel_read_console) we need take the lock in the main thread and
release it in another thread. Using a semaphore ensures that in such
scenario the main thread will still block when trying to acquire the lock. */
static uv_sem_t uv_tty_output_lock;
static WORD uv_tty_default_text_attributes =
FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE;
static char uv_tty_default_fg_color = 7;
static char uv_tty_default_bg_color = 0;
static char uv_tty_default_fg_bright = 0;
static char uv_tty_default_bg_bright = 0;
static char uv_tty_default_inverse = 0;
static CONSOLE_CURSOR_INFO uv_tty_default_cursor_info;
/* Determine whether or not ANSI support is enabled. */
static BOOL uv__need_check_vterm_state = TRUE;
static uv_tty_vtermstate_t uv__vterm_state = UV_TTY_UNSUPPORTED;
static void uv__determine_vterm_state(HANDLE handle);
void uv__console_init(void) {
if (uv_sem_init(&uv_tty_output_lock, 1))
abort();
uv__tty_console_handle = CreateFileW(L"CONOUT$",
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_WRITE,
0,
OPEN_EXISTING,
0,
0);
if (uv__tty_console_handle != INVALID_HANDLE_VALUE) {
CONSOLE_SCREEN_BUFFER_INFO sb_info;
QueueUserWorkItem(uv__tty_console_resize_message_loop_thread,
NULL,
WT_EXECUTELONGFUNCTION);
uv_mutex_init(&uv__tty_console_resize_mutex);
if (GetConsoleScreenBufferInfo(uv__tty_console_handle, &sb_info)) {
uv__tty_console_width = sb_info.dwSize.X;
uv__tty_console_height = sb_info.srWindow.Bottom - sb_info.srWindow.Top + 1;
}
}
}
int uv_tty_init(uv_loop_t* loop, uv_tty_t* tty, uv_file fd, int unused) {
BOOL readable;
DWORD NumberOfEvents;
HANDLE handle;
CONSOLE_SCREEN_BUFFER_INFO screen_buffer_info;
CONSOLE_CURSOR_INFO cursor_info;
(void)unused;
uv__once_init();
handle = (HANDLE) uv__get_osfhandle(fd);
if (handle == INVALID_HANDLE_VALUE)
return UV_EBADF;
if (fd <= 2) {
/* In order to avoid closing a stdio file descriptor 0-2, duplicate the
* underlying OS handle and forget about the original fd.
* We could also opt to use the original OS handle and just never close it,
* but then there would be no reliable way to cancel pending read operations
* upon close.
*/
if (!DuplicateHandle(INVALID_HANDLE_VALUE,
handle,
INVALID_HANDLE_VALUE,
&handle,
0,
FALSE,
DUPLICATE_SAME_ACCESS))
return uv_translate_sys_error(GetLastError());
fd = -1;
}
readable = GetNumberOfConsoleInputEvents(handle, &NumberOfEvents);
if (!readable) {
/* Obtain the screen buffer info with the output handle. */
if (!GetConsoleScreenBufferInfo(handle, &screen_buffer_info)) {
return uv_translate_sys_error(GetLastError());
}
/* Obtain the cursor info with the output handle. */
if (!GetConsoleCursorInfo(handle, &cursor_info)) {
return uv_translate_sys_error(GetLastError());
}
/* Obtain the tty_output_lock because the virtual window state is shared
* between all uv_tty_t handles. */
uv_sem_wait(&uv_tty_output_lock);
if (uv__need_check_vterm_state)
uv__determine_vterm_state(handle);
/* Remember the original console text attributes and cursor info. */
uv__tty_capture_initial_style(&screen_buffer_info, &cursor_info);
uv__tty_update_virtual_window(&screen_buffer_info);
uv_sem_post(&uv_tty_output_lock);
}
uv__stream_init(loop, (uv_stream_t*) tty, UV_TTY);
uv__connection_init((uv_stream_t*) tty);
tty->handle = handle;
tty->u.fd = fd;
tty->reqs_pending = 0;
tty->flags |= UV_HANDLE_BOUND;
if (readable) {
/* Initialize TTY input specific fields. */
tty->flags |= UV_HANDLE_TTY_READABLE | UV_HANDLE_READABLE;
/* TODO: remove me in v2.x. */
tty->tty.rd.unused_ = NULL;
tty->tty.rd.read_line_buffer = uv_null_buf_;
tty->tty.rd.read_raw_wait = NULL;
/* Init keycode-to-vt100 mapper state. */
tty->tty.rd.last_key_len = 0;
tty->tty.rd.last_key_offset = 0;
tty->tty.rd.last_utf16_high_surrogate = 0;
memset(&tty->tty.rd.last_input_record, 0, sizeof tty->tty.rd.last_input_record);
} else {
/* TTY output specific fields. */
tty->flags |= UV_HANDLE_WRITABLE;
/* Init utf8-to-utf16 conversion state. */
tty->tty.wr.utf8_bytes_left = 0;
tty->tty.wr.utf8_codepoint = 0;
/* Initialize eol conversion state */
tty->tty.wr.previous_eol = 0;
/* Init ANSI parser state. */
tty->tty.wr.ansi_parser_state = ANSI_NORMAL;
}
return 0;
}
/* Set the default console text attributes based on how the console was
* configured when libuv started.
*/
static void uv__tty_capture_initial_style(
CONSOLE_SCREEN_BUFFER_INFO* screen_buffer_info,
CONSOLE_CURSOR_INFO* cursor_info) {
static int style_captured = 0;
/* Only do this once.
Assumption: Caller has acquired uv_tty_output_lock. */
if (style_captured)
return;
/* Save raw win32 attributes. */
uv_tty_default_text_attributes = screen_buffer_info->wAttributes;
/* Convert black text on black background to use white text. */
if (uv_tty_default_text_attributes == 0)
uv_tty_default_text_attributes = 7;
/* Convert Win32 attributes to ANSI colors. */
uv_tty_default_fg_color = 0;
uv_tty_default_bg_color = 0;
uv_tty_default_fg_bright = 0;
uv_tty_default_bg_bright = 0;
uv_tty_default_inverse = 0;
if (uv_tty_default_text_attributes & FOREGROUND_RED)
uv_tty_default_fg_color |= 1;
if (uv_tty_default_text_attributes & FOREGROUND_GREEN)
uv_tty_default_fg_color |= 2;
if (uv_tty_default_text_attributes & FOREGROUND_BLUE)
uv_tty_default_fg_color |= 4;
if (uv_tty_default_text_attributes & BACKGROUND_RED)
uv_tty_default_bg_color |= 1;
if (uv_tty_default_text_attributes & BACKGROUND_GREEN)
uv_tty_default_bg_color |= 2;
if (uv_tty_default_text_attributes & BACKGROUND_BLUE)
uv_tty_default_bg_color |= 4;
if (uv_tty_default_text_attributes & FOREGROUND_INTENSITY)
uv_tty_default_fg_bright = 1;
if (uv_tty_default_text_attributes & BACKGROUND_INTENSITY)
uv_tty_default_bg_bright = 1;
if (uv_tty_default_text_attributes & COMMON_LVB_REVERSE_VIDEO)
uv_tty_default_inverse = 1;
/* Save the cursor size and the cursor state. */
uv_tty_default_cursor_info = *cursor_info;
style_captured = 1;
}
int uv_tty_set_mode(uv_tty_t* tty, uv_tty_mode_t mode) {
DWORD flags;
unsigned char was_reading;
uv_alloc_cb alloc_cb;
uv_read_cb read_cb;
int err;
if (!(tty->flags & UV_HANDLE_TTY_READABLE)) {
return UV_EINVAL;
}
if (!!mode == !!(tty->flags & UV_HANDLE_TTY_RAW)) {
return 0;
}
switch (mode) {
case UV_TTY_MODE_NORMAL:
flags = ENABLE_ECHO_INPUT | ENABLE_LINE_INPUT | ENABLE_PROCESSED_INPUT;
break;
case UV_TTY_MODE_RAW:
flags = ENABLE_WINDOW_INPUT;
break;
case UV_TTY_MODE_IO:
return UV_ENOTSUP;
default:
return UV_EINVAL;
}
/* If currently reading, stop, and restart reading. */
if (tty->flags & UV_HANDLE_READING) {
was_reading = 1;
alloc_cb = tty->alloc_cb;
read_cb = tty->read_cb;
err = uv__tty_read_stop(tty);
if (err) {
return uv_translate_sys_error(err);
}
} else {
was_reading = 0;
alloc_cb = NULL;
read_cb = NULL;
}
uv_sem_wait(&uv_tty_output_lock);
if (!SetConsoleMode(tty->handle, flags)) {
err = uv_translate_sys_error(GetLastError());
uv_sem_post(&uv_tty_output_lock);
return err;
}
uv_sem_post(&uv_tty_output_lock);
/* Update flag. */
tty->flags &= ~UV_HANDLE_TTY_RAW;
tty->flags |= mode ? UV_HANDLE_TTY_RAW : 0;
/* If we just stopped reading, restart. */
if (was_reading) {
err = uv__tty_read_start(tty, alloc_cb, read_cb);
if (err) {
return uv_translate_sys_error(err);
}
}
return 0;
}
int uv_tty_get_winsize(uv_tty_t* tty, int* width, int* height) {
CONSOLE_SCREEN_BUFFER_INFO info;
if (!GetConsoleScreenBufferInfo(tty->handle, &info)) {
return uv_translate_sys_error(GetLastError());
}
uv_sem_wait(&uv_tty_output_lock);
uv__tty_update_virtual_window(&info);
uv_sem_post(&uv_tty_output_lock);
*width = uv_tty_virtual_width;
*height = uv_tty_virtual_height;
return 0;
}
static void CALLBACK uv_tty_post_raw_read(void* data, BOOLEAN didTimeout) {
uv_loop_t* loop;
uv_tty_t* handle;
uv_req_t* req;
assert(data);
assert(!didTimeout);
req = (uv_req_t*) data;
handle = (uv_tty_t*) req->data;
loop = handle->loop;
UnregisterWait(handle->tty.rd.read_raw_wait);
handle->tty.rd.read_raw_wait = NULL;
SET_REQ_SUCCESS(req);
POST_COMPLETION_FOR_REQ(loop, req);
}
static void uv__tty_queue_read_raw(uv_loop_t* loop, uv_tty_t* handle) {
uv_read_t* req;
BOOL r;
assert(handle->flags & UV_HANDLE_READING);
assert(!(handle->flags & UV_HANDLE_READ_PENDING));
assert(handle->handle && handle->handle != INVALID_HANDLE_VALUE);
handle->tty.rd.read_line_buffer = uv_null_buf_;
req = &handle->read_req;
memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
r = RegisterWaitForSingleObject(&handle->tty.rd.read_raw_wait,
handle->handle,
uv_tty_post_raw_read,
(void*) req,
INFINITE,
WT_EXECUTEINWAITTHREAD | WT_EXECUTEONLYONCE);
if (!r) {
handle->tty.rd.read_raw_wait = NULL;
SET_REQ_ERROR(req, GetLastError());
uv__insert_pending_req(loop, (uv_req_t*)req);
}
handle->flags |= UV_HANDLE_READ_PENDING;
handle->reqs_pending++;
}
static DWORD CALLBACK uv_tty_line_read_thread(void* data) {
uv_loop_t* loop;
uv_tty_t* handle;
uv_req_t* req;
DWORD bytes, read_bytes;
WCHAR utf16[MAX_INPUT_BUFFER_LENGTH / 3];
DWORD chars, read_chars;
LONG status;
COORD pos;
BOOL read_console_success;
assert(data);
req = (uv_req_t*) data;
handle = (uv_tty_t*) req->data;
loop = handle->loop;
assert(handle->tty.rd.read_line_buffer.base != NULL);
assert(handle->tty.rd.read_line_buffer.len > 0);
/* ReadConsole can't handle big buffers. */
if (handle->tty.rd.read_line_buffer.len < MAX_INPUT_BUFFER_LENGTH) {
bytes = handle->tty.rd.read_line_buffer.len;
} else {
bytes = MAX_INPUT_BUFFER_LENGTH;
}
/* At last, unicode! One utf-16 codeunit never takes more than 3 utf-8
* codeunits to encode. */
chars = bytes / 3;
status = InterlockedExchange(&uv__read_console_status, IN_PROGRESS);
if (status == TRAP_REQUESTED) {
SET_REQ_SUCCESS(req);
InterlockedExchange(&uv__read_console_status, COMPLETED);
req->u.io.overlapped.InternalHigh = 0;
POST_COMPLETION_FOR_REQ(loop, req);
return 0;
}
read_console_success = ReadConsoleW(handle->handle,
(void*) utf16,
chars,
&read_chars,
NULL);
if (read_console_success) {
read_bytes = WideCharToMultiByte(CP_UTF8,
0,
utf16,
read_chars,
handle->tty.rd.read_line_buffer.base,
bytes,
NULL,
NULL);
SET_REQ_SUCCESS(req);
req->u.io.overlapped.InternalHigh = read_bytes;
} else {
SET_REQ_ERROR(req, GetLastError());
}
status = InterlockedExchange(&uv__read_console_status, COMPLETED);
if (status == TRAP_REQUESTED) {
/* If we canceled the read by sending a VK_RETURN event, restore the
screen state to undo the visual effect of the VK_RETURN */
if (read_console_success && InterlockedOr(&uv__restore_screen_state, 0)) {
HANDLE active_screen_buffer;
active_screen_buffer = CreateFileA("conout$",
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
NULL,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
NULL);
if (active_screen_buffer != INVALID_HANDLE_VALUE) {
pos = uv__saved_screen_state.dwCursorPosition;
/* If the cursor was at the bottom line of the screen buffer, the
VK_RETURN would have caused the buffer contents to scroll up by one
line. The right position to reset the cursor to is therefore one line
higher */
if (pos.Y == uv__saved_screen_state.dwSize.Y - 1)
pos.Y--;
SetConsoleCursorPosition(active_screen_buffer, pos);
CloseHandle(active_screen_buffer);
}
}
uv_sem_post(&uv_tty_output_lock);
}
POST_COMPLETION_FOR_REQ(loop, req);
return 0;
}
static void uv__tty_queue_read_line(uv_loop_t* loop, uv_tty_t* handle) {
uv_read_t* req;
BOOL r;
assert(handle->flags & UV_HANDLE_READING);
assert(!(handle->flags & UV_HANDLE_READ_PENDING));
assert(handle->handle && handle->handle != INVALID_HANDLE_VALUE);
req = &handle->read_req;
memset(&req->u.io.overlapped, 0, sizeof(req->u.io.overlapped));
handle->tty.rd.read_line_buffer = uv_buf_init(NULL, 0);
handle->alloc_cb((uv_handle_t*) handle, 8192, &handle->tty.rd.read_line_buffer);
if (handle->tty.rd.read_line_buffer.base == NULL ||
handle->tty.rd.read_line_buffer.len == 0) {
handle->read_cb((uv_stream_t*) handle,
UV_ENOBUFS,
&handle->tty.rd.read_line_buffer);
return;
}
assert(handle->tty.rd.read_line_buffer.base != NULL);
/* Reset flags No locking is required since there cannot be a line read
in progress. We are also relying on the memory barrier provided by
QueueUserWorkItem*/
uv__restore_screen_state = FALSE;
uv__read_console_status = NOT_STARTED;
r = QueueUserWorkItem(uv_tty_line_read_thread,
(void*) req,
WT_EXECUTELONGFUNCTION);
if (!r) {
SET_REQ_ERROR(req, GetLastError());
uv__insert_pending_req(loop, (uv_req_t*)req);
}
handle->flags |= UV_HANDLE_READ_PENDING;
handle->reqs_pending++;
}
static void uv__tty_queue_read(uv_loop_t* loop, uv_tty_t* handle) {
if (handle->flags & UV_HANDLE_TTY_RAW) {
uv__tty_queue_read_raw(loop, handle);
} else {
uv__tty_queue_read_line(loop, handle);
}
}
static const char* get_vt100_fn_key(DWORD code, char shift, char ctrl,
size_t* len) {
#define VK_CASE(vk, normal_str, shift_str, ctrl_str, shift_ctrl_str) \
case (vk): \
if (shift && ctrl) { \
*len = sizeof shift_ctrl_str; \
return "\033" shift_ctrl_str; \
} else if (shift) { \
*len = sizeof shift_str ; \
return "\033" shift_str; \
} else if (ctrl) { \
*len = sizeof ctrl_str; \
return "\033" ctrl_str; \
} else { \
*len = sizeof normal_str; \
return "\033" normal_str; \
}
switch (code) {
/* These mappings are the same as Cygwin's. Unmodified and alt-modified
* keypad keys comply with linux console, modifiers comply with xterm
* modifier usage. F1. f12 and shift-f1. f10 comply with linux console, f6.
* f12 with and without modifiers comply with rxvt. */
VK_CASE(VK_INSERT, "[2~", "[2;2~", "[2;5~", "[2;6~")
VK_CASE(VK_END, "[4~", "[4;2~", "[4;5~", "[4;6~")
VK_CASE(VK_DOWN, "[B", "[1;2B", "[1;5B", "[1;6B")
VK_CASE(VK_NEXT, "[6~", "[6;2~", "[6;5~", "[6;6~")
VK_CASE(VK_LEFT, "[D", "[1;2D", "[1;5D", "[1;6D")
VK_CASE(VK_CLEAR, "[G", "[1;2G", "[1;5G", "[1;6G")
VK_CASE(VK_RIGHT, "[C", "[1;2C", "[1;5C", "[1;6C")
VK_CASE(VK_UP, "[A", "[1;2A", "[1;5A", "[1;6A")
VK_CASE(VK_HOME, "[1~", "[1;2~", "[1;5~", "[1;6~")
VK_CASE(VK_PRIOR, "[5~", "[5;2~", "[5;5~", "[5;6~")
VK_CASE(VK_DELETE, "[3~", "[3;2~", "[3;5~", "[3;6~")
VK_CASE(VK_NUMPAD0, "[2~", "[2;2~", "[2;5~", "[2;6~")
VK_CASE(VK_NUMPAD1, "[4~", "[4;2~", "[4;5~", "[4;6~")
VK_CASE(VK_NUMPAD2, "[B", "[1;2B", "[1;5B", "[1;6B")
VK_CASE(VK_NUMPAD3, "[6~", "[6;2~", "[6;5~", "[6;6~")
VK_CASE(VK_NUMPAD4, "[D", "[1;2D", "[1;5D", "[1;6D")
VK_CASE(VK_NUMPAD5, "[G", "[1;2G", "[1;5G", "[1;6G")
VK_CASE(VK_NUMPAD6, "[C", "[1;2C", "[1;5C", "[1;6C")
VK_CASE(VK_NUMPAD7, "[A", "[1;2A", "[1;5A", "[1;6A")
VK_CASE(VK_NUMPAD8, "[1~", "[1;2~", "[1;5~", "[1;6~")
VK_CASE(VK_NUMPAD9, "[5~", "[5;2~", "[5;5~", "[5;6~")
VK_CASE(VK_DECIMAL, "[3~", "[3;2~", "[3;5~", "[3;6~")
VK_CASE(VK_F1, "[[A", "[23~", "[11^", "[23^" )
VK_CASE(VK_F2, "[[B", "[24~", "[12^", "[24^" )
VK_CASE(VK_F3, "[[C", "[25~", "[13^", "[25^" )
VK_CASE(VK_F4, "[[D", "[26~", "[14^", "[26^" )
VK_CASE(VK_F5, "[[E", "[28~", "[15^", "[28^" )
VK_CASE(VK_F6, "[17~", "[29~", "[17^", "[29^" )
VK_CASE(VK_F7, "[18~", "[31~", "[18^", "[31^" )
VK_CASE(VK_F8, "[19~", "[32~", "[19^", "[32^" )
VK_CASE(VK_F9, "[20~", "[33~", "[20^", "[33^" )
VK_CASE(VK_F10, "[21~", "[34~", "[21^", "[34^" )
VK_CASE(VK_F11, "[23~", "[23$", "[23^", "[23@" )
VK_CASE(VK_F12, "[24~", "[24$", "[24^", "[24@" )
default:
*len = 0;
return NULL;
}
#undef VK_CASE
}
void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
uv_req_t* req) {
/* Shortcut for handle->tty.rd.last_input_record.Event.KeyEvent. */
#define KEV handle->tty.rd.last_input_record.Event.KeyEvent
DWORD records_left, records_read;
uv_buf_t buf;
off_t buf_used;
assert(handle->type == UV_TTY);
assert(handle->flags & UV_HANDLE_TTY_READABLE);
handle->flags &= ~UV_HANDLE_READ_PENDING;
if (!(handle->flags & UV_HANDLE_READING) ||
!(handle->flags & UV_HANDLE_TTY_RAW)) {
goto out;
}
if (!REQ_SUCCESS(req)) {
/* An error occurred while waiting for the event. */
if ((handle->flags & UV_HANDLE_READING)) {
handle->flags &= ~UV_HANDLE_READING;
handle->read_cb((uv_stream_t*)handle,
uv_translate_sys_error(GET_REQ_ERROR(req)),
&uv_null_buf_);
}
goto out;
}
/* Fetch the number of events */
if (!GetNumberOfConsoleInputEvents(handle->handle, &records_left)) {
handle->flags &= ~UV_HANDLE_READING;
DECREASE_ACTIVE_COUNT(loop, handle);
handle->read_cb((uv_stream_t*)handle,
uv_translate_sys_error(GetLastError()),
&uv_null_buf_);
goto out;
}
/* Windows sends a lot of events that we're not interested in, so buf will be
* allocated on demand, when there's actually something to emit. */
buf = uv_null_buf_;
buf_used = 0;
while ((records_left > 0 || handle->tty.rd.last_key_len > 0) &&
(handle->flags & UV_HANDLE_READING)) {
if (handle->tty.rd.last_key_len == 0) {
/* Read the next input record */
if (!ReadConsoleInputW(handle->handle,
&handle->tty.rd.last_input_record,
1,
&records_read)) {
handle->flags &= ~UV_HANDLE_READING;
DECREASE_ACTIVE_COUNT(loop, handle);
handle->read_cb((uv_stream_t*) handle,
uv_translate_sys_error(GetLastError()),
&buf);
goto out;
}
records_left--;
/* We might be not subscribed to EVENT_CONSOLE_LAYOUT or we might be
* running under some TTY emulator that does not send those events. */
if (handle->tty.rd.last_input_record.EventType == WINDOW_BUFFER_SIZE_EVENT) {
uv__tty_console_signal_resize();
}
/* Ignore other events that are not key events. */
if (handle->tty.rd.last_input_record.EventType != KEY_EVENT) {
continue;
}
/* Ignore keyup events, unless the left alt key was held and a valid
* unicode character was emitted. */
if (!KEV.bKeyDown &&
(KEV.wVirtualKeyCode != VK_MENU ||
KEV.uChar.UnicodeChar == 0)) {
continue;
}
/* Ignore keypresses to numpad number keys if the left alt is held
* because the user is composing a character, or windows simulating this.
*/
if ((KEV.dwControlKeyState & LEFT_ALT_PRESSED) &&
!(KEV.dwControlKeyState & ENHANCED_KEY) &&
(KEV.wVirtualKeyCode == VK_INSERT ||
KEV.wVirtualKeyCode == VK_END ||
KEV.wVirtualKeyCode == VK_DOWN ||
KEV.wVirtualKeyCode == VK_NEXT ||
KEV.wVirtualKeyCode == VK_LEFT ||
KEV.wVirtualKeyCode == VK_CLEAR ||
KEV.wVirtualKeyCode == VK_RIGHT ||
KEV.wVirtualKeyCode == VK_HOME ||
KEV.wVirtualKeyCode == VK_UP ||
KEV.wVirtualKeyCode == VK_PRIOR ||
KEV.wVirtualKeyCode == VK_NUMPAD0 ||
KEV.wVirtualKeyCode == VK_NUMPAD1 ||
KEV.wVirtualKeyCode == VK_NUMPAD2 ||
KEV.wVirtualKeyCode == VK_NUMPAD3 ||
KEV.wVirtualKeyCode == VK_NUMPAD4 ||
KEV.wVirtualKeyCode == VK_NUMPAD5 ||
KEV.wVirtualKeyCode == VK_NUMPAD6 ||
KEV.wVirtualKeyCode == VK_NUMPAD7 ||
KEV.wVirtualKeyCode == VK_NUMPAD8 ||
KEV.wVirtualKeyCode == VK_NUMPAD9)) {
continue;
}
if (KEV.uChar.UnicodeChar != 0) {
int prefix_len, char_len;
/* Character key pressed */
if (KEV.uChar.UnicodeChar >= 0xD800 &&
KEV.uChar.UnicodeChar < 0xDC00) {
/* UTF-16 high surrogate */
handle->tty.rd.last_utf16_high_surrogate = KEV.uChar.UnicodeChar;
continue;
}
/* Prefix with \u033 if alt was held, but alt was not used as part a
* compose sequence. */
if ((KEV.dwControlKeyState & (LEFT_ALT_PRESSED | RIGHT_ALT_PRESSED))
&& !(KEV.dwControlKeyState & (LEFT_CTRL_PRESSED |
RIGHT_CTRL_PRESSED)) && KEV.bKeyDown) {
handle->tty.rd.last_key[0] = '\033';
prefix_len = 1;
} else {
prefix_len = 0;
}
if (KEV.uChar.UnicodeChar >= 0xDC00 &&
KEV.uChar.UnicodeChar < 0xE000) {
/* UTF-16 surrogate pair */
WCHAR utf16_buffer[2];
utf16_buffer[0] = handle->tty.rd.last_utf16_high_surrogate;
utf16_buffer[1] = KEV.uChar.UnicodeChar;
char_len = WideCharToMultiByte(CP_UTF8,
0,
utf16_buffer,
2,
&handle->tty.rd.last_key[prefix_len],
sizeof handle->tty.rd.last_key,
NULL,
NULL);
} else {
/* Single UTF-16 character */
char_len = WideCharToMultiByte(CP_UTF8,
0,
&KEV.uChar.UnicodeChar,
1,
&handle->tty.rd.last_key[prefix_len],
sizeof handle->tty.rd.last_key,
NULL,
NULL);
}
/* Whatever happened, the last character wasn't a high surrogate. */
handle->tty.rd.last_utf16_high_surrogate = 0;
/* If the utf16 character(s) couldn't be converted something must be
* wrong. */
if (!char_len) {
handle->flags &= ~UV_HANDLE_READING;
DECREASE_ACTIVE_COUNT(loop, handle);
handle->read_cb((uv_stream_t*) handle,
uv_translate_sys_error(GetLastError()),
&buf);
goto out;
}
handle->tty.rd.last_key_len = (unsigned char) (prefix_len + char_len);
handle->tty.rd.last_key_offset = 0;
continue;
} else {
/* Function key pressed */
const char* vt100;
size_t prefix_len, vt100_len;
vt100 = get_vt100_fn_key(KEV.wVirtualKeyCode,
!!(KEV.dwControlKeyState & SHIFT_PRESSED),
!!(KEV.dwControlKeyState & (
LEFT_CTRL_PRESSED |
RIGHT_CTRL_PRESSED)),
&vt100_len);
/* If we were unable to map to a vt100 sequence, just ignore. */
if (!vt100) {
continue;
}
/* Prefix with \x033 when the alt key was held. */
if (KEV.dwControlKeyState & (LEFT_ALT_PRESSED | RIGHT_ALT_PRESSED)) {
handle->tty.rd.last_key[0] = '\033';
prefix_len = 1;
} else {
prefix_len = 0;
}
/* Copy the vt100 sequence to the handle buffer. */
assert(prefix_len + vt100_len < sizeof handle->tty.rd.last_key);
memcpy(&handle->tty.rd.last_key[prefix_len], vt100, vt100_len);
handle->tty.rd.last_key_len = (unsigned char) (prefix_len + vt100_len);
handle->tty.rd.last_key_offset = 0;
continue;
}
} else {
/* Copy any bytes left from the last keypress to the user buffer. */
if (handle->tty.rd.last_key_offset < handle->tty.rd.last_key_len) {
/* Allocate a buffer if needed */
if (buf_used == 0) {
buf = uv_buf_init(NULL, 0);
handle->alloc_cb((uv_handle_t*) handle, 1024, &buf);
if (buf.base == NULL || buf.len == 0) {
handle->read_cb((uv_stream_t*) handle, UV_ENOBUFS, &buf);
goto out;
}
assert(buf.base != NULL);
}
buf.base[buf_used++] = handle->tty.rd.last_key[handle->tty.rd.last_key_offset++];
/* If the buffer is full, emit it */
if ((size_t) buf_used == buf.len) {
handle->read_cb((uv_stream_t*) handle, buf_used, &buf);
buf = uv_null_buf_;
buf_used = 0;
}
continue;
}
/* Apply dwRepeat from the last input record. */
if (--KEV.wRepeatCount > 0) {
handle->tty.rd.last_key_offset = 0;
continue;
}
handle->tty.rd.last_key_len = 0;
continue;
}
}
/* Send the buffer back to the user */
if (buf_used > 0) {
handle->read_cb((uv_stream_t*) handle, buf_used, &buf);
}
out:
/* Wait for more input events. */
if ((handle->flags & UV_HANDLE_READING) &&
!(handle->flags & UV_HANDLE_READ_PENDING)) {
uv__tty_queue_read(loop, handle);
}
DECREASE_PENDING_REQ_COUNT(handle);
#undef KEV
}
void uv_process_tty_read_line_req(uv_loop_t* loop, uv_tty_t* handle,
uv_req_t* req) {
uv_buf_t buf;
assert(handle->type == UV_TTY);
assert(handle->flags & UV_HANDLE_TTY_READABLE);
buf = handle->tty.rd.read_line_buffer;
handle->flags &= ~UV_HANDLE_READ_PENDING;
handle->tty.rd.read_line_buffer = uv_null_buf_;
if (!REQ_SUCCESS(req)) {
/* Read was not successful */
if (handle->flags & UV_HANDLE_READING) {
/* Real error */
handle->flags &= ~UV_HANDLE_READING;
DECREASE_ACTIVE_COUNT(loop, handle);
handle->read_cb((uv_stream_t*) handle,
uv_translate_sys_error(GET_REQ_ERROR(req)),
&buf);
}
} else {
if (!(handle->flags & UV_HANDLE_CANCELLATION_PENDING) &&
req->u.io.overlapped.InternalHigh != 0) {
/* Read successful. TODO: read unicode, convert to utf-8 */
DWORD bytes = req->u.io.overlapped.InternalHigh;
handle->read_cb((uv_stream_t*) handle, bytes, &buf);
}
handle->flags &= ~UV_HANDLE_CANCELLATION_PENDING;
}
/* Wait for more input events. */
if ((handle->flags & UV_HANDLE_READING) &&
!(handle->flags & UV_HANDLE_READ_PENDING)) {
uv__tty_queue_read(loop, handle);
}
DECREASE_PENDING_REQ_COUNT(handle);
}
void uv__process_tty_read_req(uv_loop_t* loop, uv_tty_t* handle,
uv_req_t* req) {
assert(handle->type == UV_TTY);
assert(handle->flags & UV_HANDLE_TTY_READABLE);
/* If the read_line_buffer member is zero, it must have been an raw read.
* Otherwise it was a line-buffered read. FIXME: This is quite obscure. Use a
* flag or something. */
if (handle->tty.rd.read_line_buffer.len == 0) {
uv_process_tty_read_raw_req(loop, handle, req);
} else {
uv_process_tty_read_line_req(loop, handle, req);
}
}
int uv__tty_read_start(uv_tty_t* handle, uv_alloc_cb alloc_cb,
uv_read_cb read_cb) {
uv_loop_t* loop = handle->loop;
if (!(handle->flags & UV_HANDLE_TTY_READABLE)) {
return ERROR_INVALID_PARAMETER;
}
handle->flags |= UV_HANDLE_READING;
INCREASE_ACTIVE_COUNT(loop, handle);
handle->read_cb = read_cb;
handle->alloc_cb = alloc_cb;
/* If reading was stopped and then started again, there could still be a read
* request pending. */
if (handle->flags & UV_HANDLE_READ_PENDING) {
return 0;
}
/* Maybe the user stopped reading half-way while processing key events.
* Short-circuit if this could be the case. */
if (handle->tty.rd.last_key_len > 0) {
SET_REQ_SUCCESS(&handle->read_req);
uv__insert_pending_req(handle->loop, (uv_req_t*) &handle->read_req);
/* Make sure no attempt is made to insert it again until it's handled. */
handle->flags |= UV_HANDLE_READ_PENDING;
handle->reqs_pending++;
return 0;
}
uv__tty_queue_read(loop, handle);
return 0;
}
int uv__tty_read_stop(uv_tty_t* handle) {
INPUT_RECORD record;
DWORD written, err;
handle->flags &= ~UV_HANDLE_READING;
DECREASE_ACTIVE_COUNT(handle->loop, handle);
if (!(handle->flags & UV_HANDLE_READ_PENDING))
return 0;
if (handle->flags & UV_HANDLE_TTY_RAW) {
/* Cancel raw read. Write some bullshit event to force the console wait to
* return. */
memset(&record, 0, sizeof record);
record.EventType = FOCUS_EVENT;
if (!WriteConsoleInputW(handle->handle, &record, 1, &written)) {
return GetLastError();
}
} else if (!(handle->flags & UV_HANDLE_CANCELLATION_PENDING)) {
/* Cancel line-buffered read if not already pending */
err = uv__cancel_read_console(handle);
if (err)
return err;
handle->flags |= UV_HANDLE_CANCELLATION_PENDING;
}
return 0;
}
static int uv__cancel_read_console(uv_tty_t* handle) {
HANDLE active_screen_buffer = INVALID_HANDLE_VALUE;
INPUT_RECORD record;
DWORD written;
DWORD err = 0;
LONG status;
assert(!(handle->flags & UV_HANDLE_CANCELLATION_PENDING));
/* Hold the output lock during the cancellation, to ensure that further
writes don't interfere with the screen state. It will be the ReadConsole
thread's responsibility to release the lock. */
uv_sem_wait(&uv_tty_output_lock);
status = InterlockedExchange(&uv__read_console_status, TRAP_REQUESTED);
if (status != IN_PROGRESS) {
/* Either we have managed to set a trap for the other thread before
ReadConsole is called, or ReadConsole has returned because the user
has pressed ENTER. In either case, there is nothing else to do. */
uv_sem_post(&uv_tty_output_lock);
return 0;
}
/* Save screen state before sending the VK_RETURN event */
active_screen_buffer = CreateFileA("conout$",
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
NULL,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
NULL);
if (active_screen_buffer != INVALID_HANDLE_VALUE &&
GetConsoleScreenBufferInfo(active_screen_buffer,
&uv__saved_screen_state)) {
InterlockedOr(&uv__restore_screen_state, 1);
}
/* Write enter key event to force the console wait to return. */
record.EventType = KEY_EVENT;
record.Event.KeyEvent.bKeyDown = TRUE;
record.Event.KeyEvent.wRepeatCount = 1;
record.Event.KeyEvent.wVirtualKeyCode = VK_RETURN;
record.Event.KeyEvent.wVirtualScanCode =
MapVirtualKeyW(VK_RETURN, MAPVK_VK_TO_VSC);
record.Event.KeyEvent.uChar.UnicodeChar = L'\r';
record.Event.KeyEvent.dwControlKeyState = 0;
if (!WriteConsoleInputW(handle->handle, &record, 1, &written))
err = GetLastError();
if (active_screen_buffer != INVALID_HANDLE_VALUE)
CloseHandle(active_screen_buffer);
return err;
}
static void uv__tty_update_virtual_window(CONSOLE_SCREEN_BUFFER_INFO* info) {
uv_tty_virtual_width = info->dwSize.X;
uv_tty_virtual_height = info->srWindow.Bottom - info->srWindow.Top + 1;
/* Recompute virtual window offset row. */
if (uv_tty_virtual_offset == -1) {
uv_tty_virtual_offset = info->dwCursorPosition.Y;
} else if (uv_tty_virtual_offset < info->dwCursorPosition.Y -
uv_tty_virtual_height + 1) {
/* If suddenly find the cursor outside of the virtual window, it must have
* somehow scrolled. Update the virtual window offset. */
uv_tty_virtual_offset = info->dwCursorPosition.Y -
uv_tty_virtual_height + 1;
}
if (uv_tty_virtual_offset + uv_tty_virtual_height > info->dwSize.Y) {
uv_tty_virtual_offset = info->dwSize.Y - uv_tty_virtual_height;
}
if (uv_tty_virtual_offset < 0) {
uv_tty_virtual_offset = 0;
}
}
static COORD uv__tty_make_real_coord(uv_tty_t* handle,
CONSOLE_SCREEN_BUFFER_INFO* info, int x, unsigned char x_relative, int y,
unsigned char y_relative) {
COORD result;
uv__tty_update_virtual_window(info);
/* Adjust y position */
if (y_relative) {
y = info->dwCursorPosition.Y + y;
} else {
y = uv_tty_virtual_offset + y;
}
/* Clip y to virtual client rectangle */
if (y < uv_tty_virtual_offset) {
y = uv_tty_virtual_offset;
} else if (y >= uv_tty_virtual_offset + uv_tty_virtual_height) {
y = uv_tty_virtual_offset + uv_tty_virtual_height - 1;
}
/* Adjust x */
if (x_relative) {
x = info->dwCursorPosition.X + x;
}
/* Clip x */
if (x < 0) {
x = 0;
} else if (x >= uv_tty_virtual_width) {
x = uv_tty_virtual_width - 1;
}
result.X = (unsigned short) x;
result.Y = (unsigned short) y;
return result;
}
static int uv__tty_emit_text(uv_tty_t* handle, WCHAR buffer[], DWORD length,
DWORD* error) {
DWORD written;
if (*error != ERROR_SUCCESS) {
return -1;
}
if (!WriteConsoleW(handle->handle,
(void*) buffer,
length,
&written,
NULL)) {
*error = GetLastError();
return -1;
}
return 0;
}
static int uv__tty_move_caret(uv_tty_t* handle, int x, unsigned char x_relative,
int y, unsigned char y_relative, DWORD* error) {
CONSOLE_SCREEN_BUFFER_INFO info;
COORD pos;
if (*error != ERROR_SUCCESS) {
return -1;
}
retry:
if (!GetConsoleScreenBufferInfo(handle->handle, &info)) {
*error = GetLastError();
}
pos = uv__tty_make_real_coord(handle, &info, x, x_relative, y, y_relative);
if (!SetConsoleCursorPosition(handle->handle, pos)) {
if (GetLastError() == ERROR_INVALID_PARAMETER) {
/* The console may be resized - retry */
goto retry;
} else {
*error = GetLastError();
return -1;
}
}
return 0;
}
static int uv__tty_reset(uv_tty_t* handle, DWORD* error) {
const COORD origin = {0, 0};
const WORD char_attrs = uv_tty_default_text_attributes;
CONSOLE_SCREEN_BUFFER_INFO screen_buffer_info;
DWORD count, written;
if (*error != ERROR_SUCCESS) {
return -1;
}
/* Reset original text attributes. */
if (!SetConsoleTextAttribute(handle->handle, char_attrs)) {
*error = GetLastError();
return -1;
}
/* Move the cursor position to (0, 0). */
if (!SetConsoleCursorPosition(handle->handle, origin)) {
*error = GetLastError();
return -1;
}
/* Clear the screen buffer. */
retry:
if (!GetConsoleScreenBufferInfo(handle->handle, &screen_buffer_info)) {
*error = GetLastError();
return -1;
}
count = screen_buffer_info.dwSize.X * screen_buffer_info.dwSize.Y;
if (!(FillConsoleOutputCharacterW(handle->handle,
L'\x20',
count,
origin,
&written) &&
FillConsoleOutputAttribute(handle->handle,
char_attrs,
written,
origin,
&written))) {
if (GetLastError() == ERROR_INVALID_PARAMETER) {
/* The console may be resized - retry */
goto retry;
} else {
*error = GetLastError();
return -1;
}
}
/* Move the virtual window up to the top. */
uv_tty_virtual_offset = 0;
uv__tty_update_virtual_window(&screen_buffer_info);
/* Reset the cursor size and the cursor state. */
if (!SetConsoleCursorInfo(handle->handle, &uv_tty_default_cursor_info)) {
*error = GetLastError();
return -1;
}
return 0;
}
static int uv__tty_clear(uv_tty_t* handle, int dir, char entire_screen,
DWORD* error) {
CONSOLE_SCREEN_BUFFER_INFO info;
COORD start, end;
DWORD count, written;
int x1, x2, y1, y2;
int x1r, x2r, y1r, y2r;
if (*error != ERROR_SUCCESS) {
return -1;
}
if (dir == 0) {
/* Clear from current position */
x1 = 0;
x1r = 1;
} else {
/* Clear from column 0 */
x1 = 0;
x1r = 0;
}
if (dir == 1) {
/* Clear to current position */
x2 = 0;
x2r = 1;
} else {
/* Clear to end of row. We pretend the console is 65536 characters wide,
* uv__tty_make_real_coord will clip it to the actual console width. */
x2 = 0xffff;
x2r = 0;
}
if (!entire_screen) {
/* Stay on our own row */
y1 = y2 = 0;
y1r = y2r = 1;
} else {
/* Apply columns direction to row */
y1 = x1;
y1r = x1r;
y2 = x2;
y2r = x2r;
}
retry:
if (!GetConsoleScreenBufferInfo(handle->handle, &info)) {
*error = GetLastError();
return -1;
}
start = uv__tty_make_real_coord(handle, &info, x1, x1r, y1, y1r);
end = uv__tty_make_real_coord(handle, &info, x2, x2r, y2, y2r);
count = (end.Y * info.dwSize.X + end.X) -
(start.Y * info.dwSize.X + start.X) + 1;
if (!(FillConsoleOutputCharacterW(handle->handle,
L'\x20',
count,
start,
&written) &&
FillConsoleOutputAttribute(handle->handle,
info.wAttributes,
written,
start,
&written))) {
if (GetLastError() == ERROR_INVALID_PARAMETER) {
/* The console may be resized - retry */
goto retry;
} else {
*error = GetLastError();
return -1;
}
}
return 0;
}
#define FLIP_FGBG \
do { \
WORD fg = info.wAttributes & 0xF; \
WORD bg = info.wAttributes & 0xF0; \
info.wAttributes &= 0xFF00; \
info.wAttributes |= fg << 4; \
info.wAttributes |= bg >> 4; \
} while (0)
static int uv__tty_set_style(uv_tty_t* handle, DWORD* error) {
unsigned short argc = handle->tty.wr.ansi_csi_argc;
unsigned short* argv = handle->tty.wr.ansi_csi_argv;
int i;
CONSOLE_SCREEN_BUFFER_INFO info;
char fg_color = -1, bg_color = -1;
char fg_bright = -1, bg_bright = -1;
char inverse = -1;
if (argc == 0) {
/* Reset mode */
fg_color = uv_tty_default_fg_color;
bg_color = uv_tty_default_bg_color;
fg_bright = uv_tty_default_fg_bright;
bg_bright = uv_tty_default_bg_bright;
inverse = uv_tty_default_inverse;
}
for (i = 0; i < argc; i++) {
short arg = argv[i];
if (arg == 0) {
/* Reset mode */
fg_color = uv_tty_default_fg_color;
bg_color = uv_tty_default_bg_color;
fg_bright = uv_tty_default_fg_bright;
bg_bright = uv_tty_default_bg_bright;
inverse = uv_tty_default_inverse;
} else if (arg == 1) {
/* Foreground bright on */
fg_bright = 1;
} else if (arg == 2) {
/* Both bright off */
fg_bright = 0;
bg_bright = 0;
} else if (arg == 5) {
/* Background bright on */
bg_bright = 1;
} else if (arg == 7) {
/* Inverse: on */
inverse = 1;
} else if (arg == 21 || arg == 22) {
/* Foreground bright off */
fg_bright = 0;
} else if (arg == 25) {
/* Background bright off */
bg_bright = 0;
} else if (arg == 27) {
/* Inverse: off */
inverse = 0;
} else if (arg >= 30 && arg <= 37) {
/* Set foreground color */
fg_color = arg - 30;
} else if (arg == 39) {
/* Default text color */
fg_color = uv_tty_default_fg_color;
fg_bright = uv_tty_default_fg_bright;
} else if (arg >= 40 && arg <= 47) {
/* Set background color */
bg_color = arg - 40;
} else if (arg == 49) {
/* Default background color */
bg_color = uv_tty_default_bg_color;
bg_bright = uv_tty_default_bg_bright;
} else if (arg >= 90 && arg <= 97) {
/* Set bold foreground color */
fg_bright = 1;
fg_color = arg - 90;
} else if (arg >= 100 && arg <= 107) {
/* Set bold background color */
bg_bright = 1;
bg_color = arg - 100;
}
}
if (fg_color == -1 && bg_color == -1 && fg_bright == -1 &&
bg_bright == -1 && inverse == -1) {
/* Nothing changed */
return 0;
}
if (!GetConsoleScreenBufferInfo(handle->handle, &info)) {
*error = GetLastError();
return -1;
}
if ((info.wAttributes & COMMON_LVB_REVERSE_VIDEO) > 0) {
FLIP_FGBG;
}
if (fg_color != -1) {
info.wAttributes &= ~(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE);
if (fg_color & 1) info.wAttributes |= FOREGROUND_RED;
if (fg_color & 2) info.wAttributes |= FOREGROUND_GREEN;
if (fg_color & 4) info.wAttributes |= FOREGROUND_BLUE;
}
if (fg_bright != -1) {
if (fg_bright) {
info.wAttributes |= FOREGROUND_INTENSITY;
} else {
info.wAttributes &= ~FOREGROUND_INTENSITY;
}
}
if (bg_color != -1) {
info.wAttributes &= ~(BACKGROUND_RED | BACKGROUND_GREEN | BACKGROUND_BLUE);
if (bg_color & 1) info.wAttributes |= BACKGROUND_RED;
if (bg_color & 2) info.wAttributes |= BACKGROUND_GREEN;
if (bg_color & 4) info.wAttributes |= BACKGROUND_BLUE;
}
if (bg_bright != -1) {
if (bg_bright) {
info.wAttributes |= BACKGROUND_INTENSITY;
} else {
info.wAttributes &= ~BACKGROUND_INTENSITY;
}
}
if (inverse != -1) {
if (inverse) {
info.wAttributes |= COMMON_LVB_REVERSE_VIDEO;
} else {
info.wAttributes &= ~COMMON_LVB_REVERSE_VIDEO;
}
}
if ((info.wAttributes & COMMON_LVB_REVERSE_VIDEO) > 0) {
FLIP_FGBG;
}
if (!SetConsoleTextAttribute(handle->handle, info.wAttributes)) {
*error = GetLastError();
return -1;
}
return 0;
}
static int uv__tty_save_state(uv_tty_t* handle, unsigned char save_attributes,
DWORD* error) {
CONSOLE_SCREEN_BUFFER_INFO info;
if (*error != ERROR_SUCCESS) {
return -1;
}
if (!GetConsoleScreenBufferInfo(handle->handle, &info)) {
*error = GetLastError();
return -1;
}
uv__tty_update_virtual_window(&info);
handle->tty.wr.saved_position.X = info.dwCursorPosition.X;
handle->tty.wr.saved_position.Y = info.dwCursorPosition.Y -
uv_tty_virtual_offset;
handle->flags |= UV_HANDLE_TTY_SAVED_POSITION;
if (save_attributes) {
handle->tty.wr.saved_attributes = info.wAttributes &
(FOREGROUND_INTENSITY | BACKGROUND_INTENSITY);
handle->flags |= UV_HANDLE_TTY_SAVED_ATTRIBUTES;
}
return 0;
}
static int uv__tty_restore_state(uv_tty_t* handle,
unsigned char restore_attributes, DWORD* error) {
CONSOLE_SCREEN_BUFFER_INFO info;
WORD new_attributes;
if (*error != ERROR_SUCCESS) {
return -1;
}
if (handle->flags & UV_HANDLE_TTY_SAVED_POSITION) {
if (uv__tty_move_caret(handle,
handle->tty.wr.saved_position.X,
0,
handle->tty.wr.saved_position.Y,
0,
error) != 0) {
return -1;
}
}
if (restore_attributes &&
(handle->flags & UV_HANDLE_TTY_SAVED_ATTRIBUTES)) {
if (!GetConsoleScreenBufferInfo(handle->handle, &info)) {
*error = GetLastError();
return -1;
}
new_attributes = info.wAttributes;
new_attributes &= ~(FOREGROUND_INTENSITY | BACKGROUND_INTENSITY);
new_attributes |= handle->tty.wr.saved_attributes;
if (!SetConsoleTextAttribute(handle->handle, new_attributes)) {
*error = GetLastError();
return -1;
}
}
return 0;
}
static int uv__tty_set_cursor_visibility(uv_tty_t* handle,
BOOL visible,
DWORD* error) {
CONSOLE_CURSOR_INFO cursor_info;
if (!GetConsoleCursorInfo(handle->handle, &cursor_info)) {
*error = GetLastError();
return -1;
}
cursor_info.bVisible = visible;
if (!SetConsoleCursorInfo(handle->handle, &cursor_info)) {
*error = GetLastError();
return -1;
}
return 0;
}
static int uv__tty_set_cursor_shape(uv_tty_t* handle, int style, DWORD* error) {
CONSOLE_CURSOR_INFO cursor_info;
if (!GetConsoleCursorInfo(handle->handle, &cursor_info)) {
*error = GetLastError();
return -1;
}
if (style == 0) {
cursor_info.dwSize = uv_tty_default_cursor_info.dwSize;
} else if (style <= 2) {
cursor_info.dwSize = CURSOR_SIZE_LARGE;
} else {
cursor_info.dwSize = CURSOR_SIZE_SMALL;
}
if (!SetConsoleCursorInfo(handle->handle, &cursor_info)) {
*error = GetLastError();
return -1;
}
return 0;
}
static int uv__tty_write_bufs(uv_tty_t* handle,
const uv_buf_t bufs[],
unsigned int nbufs,
DWORD* error) {
/* We can only write 8k characters at a time. Windows can't handle much more
* characters in a single console write anyway. */
WCHAR utf16_buf[MAX_CONSOLE_CHAR];
DWORD utf16_buf_used = 0;
unsigned int i;
#define FLUSH_TEXT() \
do { \
if (utf16_buf_used > 0) { \
uv__tty_emit_text(handle, utf16_buf, utf16_buf_used, error); \
utf16_buf_used = 0; \
} \
} while (0)
#define ENSURE_BUFFER_SPACE(wchars_needed) \
if (wchars_needed > ARRAY_SIZE(utf16_buf) - utf16_buf_used) { \
FLUSH_TEXT(); \
}
/* Cache for fast access */
unsigned char utf8_bytes_left = handle->tty.wr.utf8_bytes_left;
unsigned int utf8_codepoint = handle->tty.wr.utf8_codepoint;
unsigned char previous_eol = handle->tty.wr.previous_eol;
unsigned short ansi_parser_state = handle->tty.wr.ansi_parser_state;
/* Store the error here. If we encounter an error, stop trying to do i/o but
* keep parsing the buffer so we leave the parser in a consistent state. */
*error = ERROR_SUCCESS;
uv_sem_wait(&uv_tty_output_lock);
for (i = 0; i < nbufs; i++) {
uv_buf_t buf = bufs[i];
unsigned int j;
for (j = 0; j < buf.len; j++) {
unsigned char c = buf.base[j];
/* Run the character through the utf8 decoder We happily accept non
* shortest form encodings and invalid code points - there's no real harm
* that can be done. */
if (utf8_bytes_left == 0) {
/* Read utf-8 start byte */
DWORD first_zero_bit;
unsigned char not_c = ~c;
#ifdef _MSC_VER /* msvc */
if (_BitScanReverse(&first_zero_bit, not_c)) {
#else /* assume gcc */
if (c != 0) {
first_zero_bit = (sizeof(int) * 8) - 1 - __builtin_clz(not_c);
#endif
if (first_zero_bit == 7) {
/* Ascii - pass right through */
utf8_codepoint = (unsigned int) c;
} else if (first_zero_bit <= 5) {
/* Multibyte sequence */
utf8_codepoint = (0xff >> (8 - first_zero_bit)) & c;
utf8_bytes_left = (char) (6 - first_zero_bit);
} else {
/* Invalid continuation */
utf8_codepoint = UNICODE_REPLACEMENT_CHARACTER;
}
} else {
/* 0xff -- invalid */
utf8_codepoint = UNICODE_REPLACEMENT_CHARACTER;
}
} else if ((c & 0xc0) == 0x80) {
/* Valid continuation of utf-8 multibyte sequence */
utf8_bytes_left--;
utf8_codepoint <<= 6;
utf8_codepoint |= ((unsigned int) c & 0x3f);
} else {
/* Start byte where continuation was expected. */
utf8_bytes_left = 0;
utf8_codepoint = UNICODE_REPLACEMENT_CHARACTER;
/* Patch buf offset so this character will be parsed again as a start
* byte. */
j--;
}
/* Maybe we need to parse more bytes to find a character. */
if (utf8_bytes_left != 0) {
continue;
}
/* Parse vt100/ansi escape codes */
if (uv__vterm_state == UV_TTY_SUPPORTED) {
/* Pass through escape codes if conhost supports them. */
} else if (ansi_parser_state == ANSI_NORMAL) {
switch (utf8_codepoint) {
case '\033':
ansi_parser_state = ANSI_ESCAPE_SEEN;
continue;
case 0233:
ansi_parser_state = ANSI_CSI;
handle->tty.wr.ansi_csi_argc = 0;
continue;
}
} else if (ansi_parser_state == ANSI_ESCAPE_SEEN) {
switch (utf8_codepoint) {
case '[':
ansi_parser_state = ANSI_CSI;
handle->tty.wr.ansi_csi_argc = 0;
continue;
case '^':
case '_':
case 'P':
case ']':
/* Not supported, but we'll have to parse until we see a stop code,
* e. g. ESC \ or BEL. */
ansi_parser_state = ANSI_ST_CONTROL;
continue;
case '\033':
/* Ignore double escape. */
continue;
case 'c':
/* Full console reset. */
FLUSH_TEXT();
uv__tty_reset(handle, error);
ansi_parser_state = ANSI_NORMAL;
continue;
case '7':
/* Save the cursor position and text attributes. */
FLUSH_TEXT();
uv__tty_save_state(handle, 1, error);
ansi_parser_state = ANSI_NORMAL;
continue;
case '8':
/* Restore the cursor position and text attributes */
FLUSH_TEXT();
uv__tty_restore_state(handle, 1, error);
ansi_parser_state = ANSI_NORMAL;
continue;
default:
if (utf8_codepoint >= '@' && utf8_codepoint <= '_') {
/* Single-char control. */
ansi_parser_state = ANSI_NORMAL;
continue;
} else {
/* Invalid - proceed as normal, */
ansi_parser_state = ANSI_NORMAL;
}
}
} else if (ansi_parser_state == ANSI_IGNORE) {
/* We're ignoring this command. Stop only on command character. */
if (utf8_codepoint >= '@' && utf8_codepoint <= '~') {
ansi_parser_state = ANSI_NORMAL;
}
continue;
} else if (ansi_parser_state == ANSI_DECSCUSR) {
/* So far we've the sequence `ESC [ arg space`, and we're waiting for
* the final command byte. */
if (utf8_codepoint >= '@' && utf8_codepoint <= '~') {
/* Command byte */
if (utf8_codepoint == 'q') {
/* Change the cursor shape */
int style = handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 1;
if (style >= 0 && style <= 6) {
FLUSH_TEXT();
uv__tty_set_cursor_shape(handle, style, error);
}
}
/* Sequence ended - go back to normal state. */
ansi_parser_state = ANSI_NORMAL;
continue;
}
/* Unexpected character, but sequence hasn't ended yet. Ignore the rest
* of the sequence. */
ansi_parser_state = ANSI_IGNORE;
} else if (ansi_parser_state & ANSI_CSI) {
/* So far we've seen `ESC [`, and we may or may not have already parsed
* some of the arguments that follow. */
if (utf8_codepoint >= '0' && utf8_codepoint <= '9') {
/* Parse a numerical argument. */
if (!(ansi_parser_state & ANSI_IN_ARG)) {
/* We were not currently parsing a number, add a new one. */
/* Check for that there are too many arguments. */
if (handle->tty.wr.ansi_csi_argc >=
ARRAY_SIZE(handle->tty.wr.ansi_csi_argv)) {
ansi_parser_state = ANSI_IGNORE;
continue;
}
ansi_parser_state |= ANSI_IN_ARG;
handle->tty.wr.ansi_csi_argc++;
handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1] =
(unsigned short) utf8_codepoint - '0';
continue;
} else {
/* We were already parsing a number. Parse next digit. */
uint32_t value = 10 *
handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1];
/* Check for overflow. */
if (value > UINT16_MAX) {
ansi_parser_state = ANSI_IGNORE;
continue;
}
handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1] =
(unsigned short) value + (utf8_codepoint - '0');
continue;
}
} else if (utf8_codepoint == ';') {
/* Denotes the end of an argument. */
if (ansi_parser_state & ANSI_IN_ARG) {
ansi_parser_state &= ~ANSI_IN_ARG;
continue;
} else {
/* If ANSI_IN_ARG is not set, add another argument and default
* it to 0. */
/* Check for too many arguments */
if (handle->tty.wr.ansi_csi_argc >=
ARRAY_SIZE(handle->tty.wr.ansi_csi_argv)) {
ansi_parser_state = ANSI_IGNORE;
continue;
}
handle->tty.wr.ansi_csi_argc++;
handle->tty.wr.ansi_csi_argv[handle->tty.wr.ansi_csi_argc - 1] = 0;
continue;
}
} else if (utf8_codepoint == '?' &&
!(ansi_parser_state & ANSI_IN_ARG) &&
!(ansi_parser_state & ANSI_EXTENSION) &&
handle->tty.wr.ansi_csi_argc == 0) {
/* Pass through '?' if it is the first character after CSI */
/* This is an extension character from the VT100 codeset */
/* that is supported and used by most ANSI terminals today. */
ansi_parser_state |= ANSI_EXTENSION;
continue;
} else if (utf8_codepoint == ' ' &&
!(ansi_parser_state & ANSI_EXTENSION)) {
/* We expect a command byte to follow after this space. The only
* command that we current support is 'set cursor style'. */
ansi_parser_state = ANSI_DECSCUSR;
continue;
} else if (utf8_codepoint >= '@' && utf8_codepoint <= '~') {
/* Command byte */
if (ansi_parser_state & ANSI_EXTENSION) {
/* Sequence is `ESC [ ? args command`. */
switch (utf8_codepoint) {
case 'l':
/* Hide the cursor */
if (handle->tty.wr.ansi_csi_argc == 1 &&
handle->tty.wr.ansi_csi_argv[0] == 25) {
FLUSH_TEXT();
uv__tty_set_cursor_visibility(handle, 0, error);
}
break;
case 'h':
/* Show the cursor */
if (handle->tty.wr.ansi_csi_argc == 1 &&
handle->tty.wr.ansi_csi_argv[0] == 25) {
FLUSH_TEXT();
uv__tty_set_cursor_visibility(handle, 1, error);
}
break;
}
} else {
/* Sequence is `ESC [ args command`. */
int x, y, d;
switch (utf8_codepoint) {
case 'A':
/* cursor up */
FLUSH_TEXT();
y = -(handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 1);
uv__tty_move_caret(handle, 0, 1, y, 1, error);
break;
case 'B':
/* cursor down */
FLUSH_TEXT();
y = handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 1;
uv__tty_move_caret(handle, 0, 1, y, 1, error);
break;
case 'C':
/* cursor forward */
FLUSH_TEXT();
x = handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 1;
uv__tty_move_caret(handle, x, 1, 0, 1, error);
break;
case 'D':
/* cursor back */
FLUSH_TEXT();
x = -(handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 1);
uv__tty_move_caret(handle, x, 1, 0, 1, error);
break;
case 'E':
/* cursor next line */
FLUSH_TEXT();
y = handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 1;
uv__tty_move_caret(handle, 0, 0, y, 1, error);
break;
case 'F':
/* cursor previous line */
FLUSH_TEXT();
y = -(handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 1);
uv__tty_move_caret(handle, 0, 0, y, 1, error);
break;
case 'G':
/* cursor horizontal move absolute */
FLUSH_TEXT();
x = (handle->tty.wr.ansi_csi_argc >= 1 &&
handle->tty.wr.ansi_csi_argv[0])
? handle->tty.wr.ansi_csi_argv[0] - 1 : 0;
uv__tty_move_caret(handle, x, 0, 0, 1, error);
break;
case 'H':
case 'f':
/* cursor move absolute */
FLUSH_TEXT();
y = (handle->tty.wr.ansi_csi_argc >= 1 &&
handle->tty.wr.ansi_csi_argv[0])
? handle->tty.wr.ansi_csi_argv[0] - 1 : 0;
x = (handle->tty.wr.ansi_csi_argc >= 2 &&
handle->tty.wr.ansi_csi_argv[1])
? handle->tty.wr.ansi_csi_argv[1] - 1 : 0;
uv__tty_move_caret(handle, x, 0, y, 0, error);
break;
case 'J':
/* Erase screen */
FLUSH_TEXT();
d = handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 0;
if (d >= 0 && d <= 2) {
uv__tty_clear(handle, d, 1, error);
}
break;
case 'K':
/* Erase line */
FLUSH_TEXT();
d = handle->tty.wr.ansi_csi_argc
? handle->tty.wr.ansi_csi_argv[0] : 0;
if (d >= 0 && d <= 2) {
uv__tty_clear(handle, d, 0, error);
}
break;
case 'm':
/* Set style */
FLUSH_TEXT();
uv__tty_set_style(handle, error);
break;
case 's':
/* Save the cursor position. */
FLUSH_TEXT();
uv__tty_save_state(handle, 0, error);
break;
case 'u':
/* Restore the cursor position */
FLUSH_TEXT();
uv__tty_restore_state(handle, 0, error);
break;
}
}
/* Sequence ended - go back to normal state. */
ansi_parser_state = ANSI_NORMAL;
continue;
} else {
/* We don't support commands that use private mode characters or
* intermediaries. Ignore the rest of the sequence. */
ansi_parser_state = ANSI_IGNORE;
continue;
}
} else if (ansi_parser_state & ANSI_ST_CONTROL) {
/* Unsupported control code.
* Ignore everything until we see `BEL` or `ESC \`. */
if (ansi_parser_state & ANSI_IN_STRING) {
if (!(ansi_parser_state & ANSI_BACKSLASH_SEEN)) {
if (utf8_codepoint == '"') {
ansi_parser_state &= ~ANSI_IN_STRING;
} else if (utf8_codepoint == '\\') {
ansi_parser_state |= ANSI_BACKSLASH_SEEN;
}
} else {
ansi_parser_state &= ~ANSI_BACKSLASH_SEEN;
}
} else {
if (utf8_codepoint == '\007' || (utf8_codepoint == '\\' &&
(ansi_parser_state & ANSI_ESCAPE_SEEN))) {
/* End of sequence */
ansi_parser_state = ANSI_NORMAL;
} else if (utf8_codepoint == '\033') {
/* Escape character */
ansi_parser_state |= ANSI_ESCAPE_SEEN;
} else if (utf8_codepoint == '"') {
/* String starting */
ansi_parser_state |= ANSI_IN_STRING;
ansi_parser_state &= ~ANSI_ESCAPE_SEEN;
ansi_parser_state &= ~ANSI_BACKSLASH_SEEN;
} else {
ansi_parser_state &= ~ANSI_ESCAPE_SEEN;
}
}
continue;
} else {
/* Inconsistent state */
abort();
}
if (utf8_codepoint == 0x0a || utf8_codepoint == 0x0d) {
/* EOL conversion - emit \r\n when we see \n. */
if (utf8_codepoint == 0x0a && previous_eol != 0x0d) {
/* \n was not preceded by \r; print \r\n. */
ENSURE_BUFFER_SPACE(2);
utf16_buf[utf16_buf_used++] = L'\r';
utf16_buf[utf16_buf_used++] = L'\n';
} else if (utf8_codepoint == 0x0d && previous_eol == 0x0a) {
/* \n was followed by \r; do not print the \r, since the source was
* either \r\n\r (so the second \r is redundant) or was \n\r (so the
* \n was processed by the last case and an \r automatically
* inserted). */
} else {
/* \r without \n; print \r as-is. */
ENSURE_BUFFER_SPACE(1);
utf16_buf[utf16_buf_used++] = (WCHAR) utf8_codepoint;
}
previous_eol = (char) utf8_codepoint;
} else if (utf8_codepoint <= 0xffff) {
/* Encode character into utf-16 buffer. */
ENSURE_BUFFER_SPACE(1);
utf16_buf[utf16_buf_used++] = (WCHAR) utf8_codepoint;
previous_eol = 0;
} else {
ENSURE_BUFFER_SPACE(2);
utf8_codepoint -= 0x10000;
utf16_buf[utf16_buf_used++] = (WCHAR) (utf8_codepoint / 0x400 + 0xD800);
utf16_buf[utf16_buf_used++] = (WCHAR) (utf8_codepoint % 0x400 + 0xDC00);
previous_eol = 0;
}
}
}
/* Flush remaining characters */
FLUSH_TEXT();
/* Copy cached values back to struct. */
handle->tty.wr.utf8_bytes_left = utf8_bytes_left;
handle->tty.wr.utf8_codepoint = utf8_codepoint;
handle->tty.wr.previous_eol = previous_eol;
handle->tty.wr.ansi_parser_state = ansi_parser_state;
uv_sem_post(&uv_tty_output_lock);
if (*error == STATUS_SUCCESS) {
return 0;
} else {
return -1;
}
#undef FLUSH_TEXT
}
int uv__tty_write(uv_loop_t* loop,
uv_write_t* req,
uv_tty_t* handle,
const uv_buf_t bufs[],
unsigned int nbufs,
uv_write_cb cb) {
DWORD error;
UV_REQ_INIT(req, UV_WRITE);
req->handle = (uv_stream_t*) handle;
req->cb = cb;
handle->reqs_pending++;
handle->stream.conn.write_reqs_pending++;
REGISTER_HANDLE_REQ(loop, handle, req);
req->u.io.queued_bytes = 0;
if (!uv__tty_write_bufs(handle, bufs, nbufs, &error)) {
SET_REQ_SUCCESS(req);
} else {
SET_REQ_ERROR(req, error);
}
uv__insert_pending_req(loop, (uv_req_t*) req);
return 0;
}
int uv__tty_try_write(uv_tty_t* handle,
const uv_buf_t bufs[],
unsigned int nbufs) {
DWORD error;
if (handle->stream.conn.write_reqs_pending > 0)
return UV_EAGAIN;
if (uv__tty_write_bufs(handle, bufs, nbufs, &error))
return uv_translate_sys_error(error);
return uv__count_bufs(bufs, nbufs);
}
void uv__process_tty_write_req(uv_loop_t* loop, uv_tty_t* handle,
uv_write_t* req) {
int err;
handle->write_queue_size -= req->u.io.queued_bytes;
UNREGISTER_HANDLE_REQ(loop, handle, req);
if (req->cb) {
err = GET_REQ_ERROR(req);
req->cb(req, uv_translate_sys_error(err));
}
handle->stream.conn.write_reqs_pending--;
if (handle->stream.conn.write_reqs_pending == 0)
if (handle->flags & UV_HANDLE_SHUTTING)
uv__process_tty_shutdown_req(loop,
handle,
handle->stream.conn.shutdown_req);
DECREASE_PENDING_REQ_COUNT(handle);
}
void uv__tty_close(uv_tty_t* handle) {
assert(handle->u.fd == -1 || handle->u.fd > 2);
if (handle->flags & UV_HANDLE_READING)
uv__tty_read_stop(handle);
if (handle->u.fd == -1)
CloseHandle(handle->handle);
else
close(handle->u.fd);
handle->u.fd = -1;
handle->handle = INVALID_HANDLE_VALUE;
handle->flags &= ~(UV_HANDLE_READABLE | UV_HANDLE_WRITABLE);
uv__handle_closing(handle);
if (handle->reqs_pending == 0)
uv__want_endgame(handle->loop, (uv_handle_t*) handle);
}
void uv__process_tty_shutdown_req(uv_loop_t* loop, uv_tty_t* stream, uv_shutdown_t* req) {
assert(stream->stream.conn.write_reqs_pending == 0);
assert(req);
stream->stream.conn.shutdown_req = NULL;
stream->flags &= ~UV_HANDLE_SHUTTING;
UNREGISTER_HANDLE_REQ(loop, stream, req);
/* TTY shutdown is really just a no-op */
if (req->cb) {
if (stream->flags & UV_HANDLE_CLOSING) {
req->cb(req, UV_ECANCELED);
} else {
req->cb(req, 0);
}
}
DECREASE_PENDING_REQ_COUNT(stream);
}
void uv__tty_endgame(uv_loop_t* loop, uv_tty_t* handle) {
assert(handle->flags & UV_HANDLE_CLOSING);
assert(handle->reqs_pending == 0);
/* The wait handle used for raw reading should be unregistered when the
* wait callback runs. */
assert(!(handle->flags & UV_HANDLE_TTY_READABLE) ||
handle->tty.rd.read_raw_wait == NULL);
assert(!(handle->flags & UV_HANDLE_CLOSED));
uv__handle_close(handle);
}
/*
* uv__process_tty_accept_req() is a stub to keep DELEGATE_STREAM_REQ working
* TODO: find a way to remove it
*/
void uv__process_tty_accept_req(uv_loop_t* loop, uv_tty_t* handle,
uv_req_t* raw_req) {
abort();
}
/*
* uv__process_tty_connect_req() is a stub to keep DELEGATE_STREAM_REQ working
* TODO: find a way to remove it
*/
void uv__process_tty_connect_req(uv_loop_t* loop, uv_tty_t* handle,
uv_connect_t* req) {
abort();
}
int uv_tty_reset_mode(void) {
/* Not necessary to do anything. */
return 0;
}
/* Determine whether or not this version of windows supports
* proper ANSI color codes. Should be supported as of windows
* 10 version 1511, build number 10.0.10586.
*/
static void uv__determine_vterm_state(HANDLE handle) {
DWORD dwMode = 0;
uv__need_check_vterm_state = FALSE;
if (!GetConsoleMode(handle, &dwMode)) {
return;
}
dwMode |= ENABLE_VIRTUAL_TERMINAL_PROCESSING;
if (!SetConsoleMode(handle, dwMode)) {
return;
}
uv__vterm_state = UV_TTY_SUPPORTED;
}
static DWORD WINAPI uv__tty_console_resize_message_loop_thread(void* param) {
NTSTATUS status;
ULONG_PTR conhost_pid;
MSG msg;
if (pSetWinEventHook == NULL || pNtQueryInformationProcess == NULL)
return 0;
status = pNtQueryInformationProcess(GetCurrentProcess(),
ProcessConsoleHostProcess,
&conhost_pid,
sizeof(conhost_pid),
NULL);
if (!NT_SUCCESS(status)) {
/* We couldn't retrieve our console host process, probably because this
* is a 32-bit process running on 64-bit Windows. Fall back to receiving
* console events from the input stream only. */
return 0;
}
/* Ensure the PID is a multiple of 4, which is required by SetWinEventHook */
conhost_pid &= ~(ULONG_PTR)0x3;
uv__tty_console_resized = CreateEvent(NULL, TRUE, FALSE, NULL);
if (uv__tty_console_resized == NULL)
return 0;
if (QueueUserWorkItem(uv__tty_console_resize_watcher_thread,
NULL,
WT_EXECUTELONGFUNCTION) == 0)
return 0;
if (!pSetWinEventHook(EVENT_CONSOLE_LAYOUT,
EVENT_CONSOLE_LAYOUT,
NULL,
uv__tty_console_resize_event,
(DWORD)conhost_pid,
0,
WINEVENT_OUTOFCONTEXT))
return 0;
while (GetMessage(&msg, NULL, 0, 0)) {
TranslateMessage(&msg);
DispatchMessage(&msg);
}
return 0;
}
static void CALLBACK uv__tty_console_resize_event(HWINEVENTHOOK hWinEventHook,
DWORD event,
HWND hwnd,
LONG idObject,
LONG idChild,
DWORD dwEventThread,
DWORD dwmsEventTime) {
SetEvent(uv__tty_console_resized);
}
static DWORD WINAPI uv__tty_console_resize_watcher_thread(void* param) {
for (;;) {
/* Make sure to not overwhelm the system with resize events */
Sleep(33);
WaitForSingleObject(uv__tty_console_resized, INFINITE);
uv__tty_console_signal_resize();
ResetEvent(uv__tty_console_resized);
}
return 0;
}
static void uv__tty_console_signal_resize(void) {
CONSOLE_SCREEN_BUFFER_INFO sb_info;
int width, height;
if (!GetConsoleScreenBufferInfo(uv__tty_console_handle, &sb_info))
return;
width = sb_info.dwSize.X;
height = sb_info.srWindow.Bottom - sb_info.srWindow.Top + 1;
uv_mutex_lock(&uv__tty_console_resize_mutex);
assert(uv__tty_console_width != -1 && uv__tty_console_height != -1);
if (width != uv__tty_console_width || height != uv__tty_console_height) {
uv__tty_console_width = width;
uv__tty_console_height = height;
uv_mutex_unlock(&uv__tty_console_resize_mutex);
uv__signal_dispatch(SIGWINCH);
} else {
uv_mutex_unlock(&uv__tty_console_resize_mutex);
}
}
void uv_tty_set_vterm_state(uv_tty_vtermstate_t state) {
uv_sem_wait(&uv_tty_output_lock);
uv__need_check_vterm_state = FALSE;
uv__vterm_state = state;
uv_sem_post(&uv_tty_output_lock);
}
int uv_tty_get_vterm_state(uv_tty_vtermstate_t* state) {
uv_sem_wait(&uv_tty_output_lock);
*state = uv__vterm_state;
uv_sem_post(&uv_tty_output_lock);
return 0;
}
| /* Copyright Joyent, Inc. and other Node contributors. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/ |
client_proto_main.ml | open Protocol
open Protocol_client_context
let bake cctxt ?timestamp block command sk =
let timestamp =
match timestamp with
| Some t ->
t
| None ->
Time.System.(to_protocol (Tezos_stdlib_unix.Systime_os.now ()))
in
let protocol_data = {command; signature = Signature.zero} in
Genesis_block_services.Helpers.Preapply.block
cctxt
~block
~timestamp
~protocol_data
[]
>>=? fun (shell_header, _) ->
let blk = Data.Command.forge shell_header command in
Shell_services.Chain.chain_id cctxt ~chain:`Main ()
>>=? fun chain_id ->
Client_keys.append cctxt sk ~watermark:(Block_header chain_id) blk
>>=? fun signed_blk -> Shell_services.Injection.block cctxt signed_blk []
let int64_parameter =
Clic.parameter (fun _ p ->
try return (Int64.of_string p) with _ -> failwith "Cannot read int64")
let file_parameter =
Clic.parameter (fun _ p ->
if not (Sys.file_exists p) then failwith "File doesn't exist: '%s'" p
else return p)
let fitness_from_int64 fitness =
(* definition taken from src/proto_alpha/lib_protocol/src/constants_repr.ml *)
let version_number = "\000" in
(* definitions taken from src/proto_alpha/lib_protocol/src/fitness_repr.ml *)
let int64_to_bytes i =
let b = Bytes.create 8 in
TzEndian.set_int64 b 0 i ; b
in
[Bytes.of_string version_number; int64_to_bytes fitness]
let timestamp_arg =
Clic.arg
~long:"timestamp"
~placeholder:"date"
~doc:"Set the timestamp of the block (and initial time of the chain)"
(Clic.parameter (fun _ t ->
match Time.System.of_notation_opt t with
| None ->
Error_monad.failwith
"Could not parse value provided to -timestamp option"
| Some t ->
return t))
let test_delay_arg =
Clic.default_arg
~long:"delay"
~placeholder:"time"
~doc:"Set the life span of the test chain (in seconds)"
~default:(Int64.to_string (Int64.mul 24L 3600L))
(Clic.parameter (fun _ t ->
match Int64.of_string_opt t with
| None ->
Error_monad.failwith
"Could not parse value provided to -delay option"
| Some t ->
return t))
let proto_param ~name ~desc t =
Clic.param
~name
~desc
(Clic.parameter (fun _ str -> Lwt.return (Protocol_hash.of_b58check str)))
t
let commands () =
let open Clic in
let args =
args1
(arg
~long:"timestamp"
~placeholder:"date"
~doc:"Set the timestamp of the block (and initial time of the chain)"
(parameter (fun _ t ->
match Time.Protocol.of_notation t with
| None ->
Error_monad.failwith
"Could not parse value provided to -timestamp option"
| Some t ->
return t)))
in
[ command
~desc:"Activate a protocol"
args
( prefixes ["activate"; "protocol"]
@@ proto_param ~name:"version" ~desc:"Protocol version (b58check)"
@@ prefixes ["with"; "fitness"]
@@ param
~name:"fitness"
~desc:"Hardcoded fitness of the first block (integer)"
int64_parameter
@@ prefixes ["and"; "key"]
@@ Client_keys.Secret_key.source_param
~name:"password"
~desc:"Activator's key"
@@ prefixes ["and"; "parameters"]
@@ param
~name:"parameters"
~desc:"Protocol parameters (as JSON file)"
file_parameter
@@ stop )
(fun timestamp
hash
fitness
sk
param_json_file
(cctxt : Client_context.full) ->
let fitness = fitness_from_int64 fitness in
Tezos_stdlib_unix.Lwt_utils_unix.Json.read_file param_json_file
>>=? fun json ->
let protocol_parameters =
Data_encoding.Binary.to_bytes_exn Data_encoding.json json
in
bake
cctxt
?timestamp
cctxt#block
(Activate {protocol = hash; fitness; protocol_parameters})
sk
>>=? fun hash ->
cctxt#answer "Injected %a" Block_hash.pp_short hash
>>= fun () -> return_unit);
command
~desc:"Fork a test protocol"
(args2 timestamp_arg test_delay_arg)
( prefixes ["fork"; "test"; "protocol"]
@@ proto_param ~name:"version" ~desc:"Protocol version (b58check)"
@@ prefixes ["with"; "fitness"]
@@ param
~name:"fitness"
~desc:
"Hardcoded fitness of the first block of the testchain (integer)"
int64_parameter
@@ prefixes ["and"; "key"]
@@ Client_keys.Secret_key.source_param
~name:"password"
~desc:"Activator's key"
@@ prefixes ["and"; "parameters"]
@@ param
~name:"parameters"
~desc:"Testchain protocol parameters (as JSON file)"
file_parameter
@@ stop )
(fun (timestamp, delay) hash fitness sk param_json_file cctxt ->
let fitness = fitness_from_int64 fitness in
Tezos_stdlib_unix.Lwt_utils_unix.Json.read_file param_json_file
>>=? fun json ->
let protocol_parameters =
Data_encoding.Binary.to_bytes_exn Data_encoding.json json
in
let timestamp = Option.map ~f:Time.System.to_protocol timestamp in
bake
cctxt
?timestamp
cctxt#block
(Activate_testchain
{protocol = hash; fitness; protocol_parameters; delay})
sk
>>=? fun hash ->
cctxt#answer "Injected %a" Block_hash.pp_short hash
>>= fun () -> return_unit) ]
let () = Client_commands.register Protocol.hash @@ fun _network -> commands ()
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
test_dkml_install_api.ml |
open Dkml_install_register
open More_testables
let test_add_once () =
Alcotest.(check unit)
"no errors" ()
(let reg = Component_registry.get () in
Component_registry.add_component reg
(module struct
include Dkml_install_api.Default_component_config
let component_name = "add-once"
end))
let test_add_twice () =
Alcotest.(check string_starts_with)
"fail to add same component name" "FATAL [debe504f]"
(let reg = Component_registry.get () in
Component_registry.add_component ~raise_on_error:true reg
(module struct
include Dkml_install_api.Default_component_config
let component_name = "add-twice"
end);
try
Component_registry.add_component ~raise_on_error:true reg
(module struct
include Dkml_install_api.Default_component_config
let component_name = "add-twice"
end);
"Was supposed to raise an exception, but didn't"
with Invalid_argument msg -> msg)
let () =
let open Alcotest in
run "Dkml_install_api"
[
( "basic",
[
test_case "Add once" `Quick test_add_once;
test_case "Add twice" `Quick test_add_twice;
] );
]
| |
repository_model.ml |
module QuestionRow = struct
type t =
{ id : string
; label : string option
; help_text : string option
; text : string
; default_value : string option
; validation_regex : string
; question_type : string
; max_file_size : int option
; mime_types : string option
; options : string option
}
[@@deriving make, show, eq]
let of_question question =
let open Model.Question in
let question_row =
{ id = ""
; label = Some ""
; help_text = Some ""
; text = ""
; default_value = None
; validation_regex = ""
; question_type = ""
; max_file_size = None
; mime_types = None
; options = None
}
in
match question with
| Country (id, label, help_text, text, _) ->
{ question_row with id; label; help_text; text; question_type = "country" }
| File (id, label, help_text, text, mime_types, max_file_size, _) ->
{ question_row with
id
; label
; help_text
; text
; question_type = "file"
; max_file_size = Some max_file_size
; mime_types = Some (mime_types |> String.concat ",")
}
| Date (id, label, help_text, text, _) ->
{ question_row with id; label; help_text; text; question_type = "date" }
| Select (id, label, help_text, text, possible_options, _) ->
{ question_row with
id
; label
; help_text
; text
; question_type = "select"
; options = Some (possible_options |> String.concat ",")
}
| Text (id, label, help_text, text, default, regex, _) ->
{ question_row with
id
; label
; help_text
; text
; default_value = default
; validation_regex = regex
; question_type = "text"
}
| Year (id, label, help_text, text, _) ->
{ question_row with id; label; help_text; text; question_type = "year" }
| YesNo (id, label, help_text, text, _) ->
{ question_row with id; label; help_text; text; question_type = "y/n" }
;;
let t =
let encode m =
Ok
( m.id
, ( m.label
, ( m.help_text
, ( m.text
, ( m.default_value
, ( m.validation_regex
, (m.question_type, (m.max_file_size, (m.mime_types, m.options))) ) ) )
) ) )
in
let decode
( id
, ( label
, ( help_text
, ( text
, ( default_value
, ( validation_regex
, (question_type, (max_file_size, (mime_types, options))) ) ) ) ) ) )
=
Ok
{ id
; label
; help_text
; text
; default_value
; validation_regex
; question_type
; max_file_size
; mime_types
; options
}
in
let open Caqti_type in
custom
~encode
~decode
(tup2
string
(tup2
(option string)
(tup2
(option string)
(tup2
string
(tup2
(option string)
(tup2
string
(tup2
string
(tup2 (option int) (tup2 (option string) (option string))))))))))
;;
end
module QuestionAnswerRow = struct
type t =
{ question_uuid : string
; question_label : string option
; question_help_text : string option
; question_text : string
; question_required : int
; question_default_value : string option
; question_validation_regex : string
; question_type : string
; question_max_file_size : int option
; question_mime_types : string option
; question_options : string option
; answer_uuid : string option
; answer_text : string option
; answer_asset_uuid : string option
; answer_asset_filename : string option
; answer_asset_size : int option
; answer_asset_mime : string option
}
[@@deriving make, show, eq]
let int_to_bool n = n != 0
let row_to_question_answer_input row =
match row with
| { question_uuid
; question_label
; question_help_text
; question_text
; question_default_value = default_value
; question_validation_regex = regex
; question_type = "text"
; question_required = required
; answer_uuid = Some _
; answer_text = Some text
; _
} ->
Ok
( Model.Question.Text
( question_uuid
, question_label
, question_help_text
, question_text
, default_value
, regex
, int_to_bool required )
, Some (Model.AnswerInput.Text text) )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_default_value = default_value
; question_validation_regex = regex
; question_type = "text"
; question_required = required
; answer_uuid = None
; _
} ->
Ok
( Model.Question.Text
( question_uuid
, question_label
, question_help_text
, question_text
, default_value
, regex
, int_to_bool required )
, None )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_type = "y/n"
; question_required = required
; answer_uuid = Some _
; answer_text = Some text
; _
} ->
Ok
( Model.Question.YesNo
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, Some (Model.AnswerInput.Text text) )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "y/n"
; answer_uuid = None
; _
} ->
Ok
( Model.Question.YesNo
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, None )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "date"
; answer_uuid = Some _
; answer_text = Some text
; _
} ->
Ok
( Model.Question.Date
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, Some (Model.AnswerInput.Text text) )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "date"
; answer_uuid = None
; _
} ->
Ok
( Model.Question.Date
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, None )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "country"
; answer_uuid = Some _
; answer_text = Some text
; _
} ->
Ok
( Model.Question.Country
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, Some (Model.AnswerInput.Text text) )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "country"
; answer_uuid = None
; _
} ->
Ok
( Model.Question.Country
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, None )
| { question_uuid
; question_label = label
; question_help_text = help_text
; question_text = text
; question_required = required
; question_type = "file"
; question_max_file_size = Some max_file_size
; question_mime_types = Some supported_mime_types
; answer_uuid = Some _
; answer_asset_uuid = Some asset_id
; answer_asset_filename = Some filename
; answer_asset_size = Some size
; answer_asset_mime = Some mime
; _
} ->
Ok
( Model.Question.File
( question_uuid
, label
, help_text
, text
, Str.split (Str.regexp_string ",") supported_mime_types
, max_file_size
, int_to_bool required )
, Some (Model.AnswerInput.Asset (Some asset_id, filename, size, mime, "")) )
| { question_uuid
; question_label = label
; question_help_text = help_text
; question_text = text
; question_required = required
; question_type = "file"
; question_max_file_size = Some max_file_size
; question_mime_types = Some supported_mime_types
; answer_asset_uuid = None
; _
} ->
Ok
( Model.Question.File
( question_uuid
, label
, help_text
, text
, Str.split (Str.regexp_string ",") supported_mime_types
, max_file_size
, int_to_bool required )
, None )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "year"
; answer_uuid = Some _
; answer_text = Some text
; _
} ->
Ok
( Model.Question.Year
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, Some (Model.AnswerInput.Text text) )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "year"
; answer_uuid = None
; _
} ->
Ok
( Model.Question.Year
( question_uuid
, question_label
, question_help_text
, question_text
, int_to_bool required )
, None )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "select"
; question_options = Some options
; answer_uuid = Some _
; answer_text = Some text
; _
} ->
Ok
( Model.Question.Select
( question_uuid
, question_label
, question_help_text
, question_text
, Str.split (Str.regexp_string ",") options
, int_to_bool required )
, Some (Model.AnswerInput.Text text) )
| { question_uuid
; question_label
; question_help_text
; question_text
; question_required = required
; question_type = "select"
; question_options = Some options
; answer_uuid = None
; _
} ->
Ok
( Model.Question.Select
( question_uuid
, question_label
, question_help_text
, question_text
, Str.split (Str.regexp_string ",") options
, int_to_bool required )
, None )
| { question_type = type_; question_uuid = id; _ } ->
let msg =
Caml.Format.asprintf
"Invalid question type encountered %s for question with id %s"
type_
id
in
Logs.err (fun m -> m "%s" msg);
Error msg
;;
let to_question_answer_input rows = rows |> CCResult.map_l row_to_question_answer_input
let t =
let encode m =
Ok
( m.question_uuid
, ( m.question_label
, ( m.question_help_text
, ( m.question_text
, ( m.question_required
, ( m.question_default_value
, ( m.question_validation_regex
, ( m.question_type
, ( m.question_max_file_size
, ( m.question_mime_types
, ( m.question_options
, ( m.answer_uuid
, ( m.answer_text
, ( m.answer_asset_uuid
, ( m.answer_asset_filename
, (m.answer_asset_size, m.answer_asset_mime) ) ) ) )
) ) ) ) ) ) ) ) ) ) )
in
let decode
( question_uuid
, ( question_label
, ( question_help_text
, ( question_text
, ( question_required
, ( question_default_value
, ( question_validation_regex
, ( question_type
, ( question_max_file_size
, ( question_mime_types
, ( question_options
, ( answer_uuid
, ( answer_text
, ( answer_asset_uuid
, ( answer_asset_filename
, (answer_asset_size, answer_asset_mime) ) ) ) ) ) )
) ) ) ) ) ) ) ) )
=
Ok
{ question_uuid
; question_label
; question_help_text
; question_text
; question_default_value
; question_validation_regex
; question_type
; question_max_file_size
; question_mime_types
; question_options
; question_required
; answer_uuid
; answer_text
; answer_asset_uuid
; answer_asset_filename
; answer_asset_size
; answer_asset_mime
}
in
let open Caqti_type in
custom
~encode
~decode
(tup2
string
(tup2
(option string)
(tup2
(option string)
(tup2
string
(tup2
int
(tup2
(option string)
(tup2
string
(tup2
string
(tup2
(option int)
(tup2
(option string)
(tup2
(option string)
(tup2
(option string)
(tup2
(option string)
(tup2
(option string)
(tup2
(option string)
(tup2 (option int) (option string)))))))))))))))))
;;
end
module QuestionnaireRow = struct
type t =
{ uuid : string
; template_uuid : string
; template_label : string
; template_description : string option
}
[@@deriving make, show, eq]
let to_questionnaire row question_rows =
let questions =
QuestionAnswerRow.to_question_answer_input question_rows |> CCResult.get_or_failwith
in
Model.Questionnaire.make
~uuid:row.uuid
~template_uuid:row.template_uuid
~label:row.template_label
~description:(row.template_description |> Option.value ~default:"")
~questions
()
;;
let t =
let encode m =
Ok (m.uuid, m.template_uuid, m.template_label, m.template_description)
in
let decode (uuid, template_uuid, template_label, template_description) =
Ok { uuid; template_uuid; template_label; template_description }
in
let open Caqti_type in
custom ~encode ~decode (tup4 string string string (option string))
;;
end
module AnswerRow = struct
type t =
{ uuid : string
; text : string option
; asset : string option
}
[@@deriving make, fields]
let asset_exn answer =
match answer |> asset with
| None ->
failwith (Caml.Format.asprintf "Asset with id %s has no asset" (uuid answer))
| Some answer -> answer
;;
let t =
let encode m = Ok (m.uuid, m.text, m.asset) in
let decode (uuid, text, asset) = Ok { uuid; text; asset } in
let open Caqti_type in
custom ~encode ~decode (tup3 string (option string) (option string))
;;
end
| |
test_blit_intf.ml | (** Produce unit tests for blittable values. *)
open! Base
open! Blit
module type Elt = sig
type t
val equal : t -> t -> bool
(** [of_bool] is used to generate two distinct values of type [t], used in unit tests.
It is required that [of_bool false <> of_bool true]. *)
val of_bool : bool -> t
end
module type Elt1 = sig
type 'a t
val equal : bool t -> bool t -> bool
val of_bool : bool -> bool t
end
module type Sequence = sig
type t
type elt
val create : len:int -> t
val length : t -> int
val get : t -> int -> elt
val set : t -> int -> elt -> unit
end
type 'a poly = 'a
module type Sequence1 = sig
type 'a t
(** [Make1*] guarantees to only call [create_like ~len t] with [len > 0] if [length t >
0]. *)
val length : _ t -> int
(** [create_bool], [get], and [set] are just used for unit tests. [z] is needed for
[Flat_tuple_array], [elt] is needed for [Option_array]. *)
type 'a z
type 'a elt
val create_bool : len:int -> bool z t
val get : 'a z t -> int -> 'a elt
val set : 'a z t -> int -> 'a elt -> unit
end
module type Test_blit = sig
module type Elt = Elt
module type Elt1 = Elt1
module type Sequence = Sequence
module type Sequence1 = Sequence1
module Test
(Elt : Elt)
(Sequence : Sequence with type elt := Elt.t)
(Tested : S with type t := Sequence.t) : sig end
module Test_distinct
(Elt : Elt)
(Src : Sequence with type elt := Elt.t)
(Dst : Sequence with type elt := Elt.t)
(Tested : S_distinct with type src := Src.t with type dst := Dst.t) : sig end
module Test1
(Sequence : Sequence1 with type 'a elt := 'a poly)
(Tested : S1 with type 'a t := 'a Sequence.t) : sig end
module Test1_generic
(Elt : Elt1)
(Sequence : Sequence1 with type 'a elt := 'a Elt.t)
(Tested : S1 with type 'a t := 'a Sequence.t) : sig end
(** [Make_and_test] uses the [Blit.Make] functor and the [Test] functor. *)
module Make_and_test
(Elt : Elt) (Sequence : sig
include Sequence with type elt := Elt.t
val unsafe_blit : (t, t) blit
end) : S with type t := Sequence.t
module Make_distinct_and_test
(Elt : Elt)
(Src : Sequence with type elt := Elt.t) (Dst : sig
include Sequence with type elt := Elt.t
val unsafe_blit : (Src.t, t) blit
end) : S_distinct with type src := Src.t with type dst := Dst.t
module Make1_and_test (Sequence : sig
include Blit.Sequence1
include Sequence1 with type 'a t := 'a t with type 'a elt := 'a poly
end) : S1 with type 'a t := 'a Sequence.t
module Make1_generic_and_test
(Elt : Elt1) (Sequence : sig
include Blit.Sequence1
include Sequence1 with type 'a t := 'a t with type 'a elt := 'a Elt.t
end) : S1 with type 'a t := 'a Sequence.t
end
| (** Produce unit tests for blittable values. *)
|
RPC_legacy.mli | (** Legacy node RPCs. *)
(** THIS MODULE IS DEPRECATED: ITS FUNCTIONS SHOULD BE PORTED TO THE NEW RPC
ENGINE (IN [RPC.ml], USING MODULE [RPC_core]). *)
(** In all RPCs, default [chain] is "main" and default [block] is
"head~2" to pick the finalized branch for Tenderbake. *)
(** {2 Protocol RPCs} *)
type ctxt_type = Bytes | Json
module Seed : sig
val get_seed :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
Client.t ->
string Lwt.t
val get_seed_status :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
Client.t ->
JSON.t Lwt.t
end
module Script_cache : sig
(** Call RPC /chain/[chain]/blocks/[block]/context/cache/contracts/all *)
val get_cached_contracts :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
Client.t ->
JSON.t Lwt.t
end
module Tx_rollup : sig
(** Call RPC /chain/[chain]/blocks/[block]/context/tx_rollup/[tx_rollup_id]/state *)
val get_state :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
rollup:string ->
Client.t ->
JSON.t Runnable.process
(** Call RPC /chain/[chain]/blocks/[block]/context/tx_rollup/[tx_rollup_id]/inbox/[level] *)
val get_inbox :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
rollup:string ->
level:int ->
Client.t ->
JSON.t Runnable.process
(** Call RPC /chain/[chain]/blocks/[block]/context/tx_rollup/[rollup_hash]/commitment/[level] *)
val get_commitment :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
rollup:string ->
level:int ->
Client.t ->
JSON.t Runnable.process
(** Call RPC /chain/[chain]/blocks/[block]/context/[rollup_hash]/pending_bonded_commitments *)
val get_pending_bonded_commitments :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
rollup:string ->
pkh:string ->
Client.t ->
JSON.t Runnable.process
module Forge : sig
module Inbox : sig
val message_hash :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
data:Client.data ->
Client.t ->
JSON.t Runnable.process
val merkle_tree_hash :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
data:Client.data ->
Client.t ->
JSON.t Runnable.process
val merkle_tree_path :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
data:Client.data ->
Client.t ->
JSON.t Runnable.process
end
module Commitment : sig
val merkle_tree_hash :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
data:Client.data ->
Client.t ->
JSON.t Runnable.process
val merkle_tree_path :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
data:Client.data ->
Client.t ->
JSON.t Runnable.process
val message_result_hash :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
data:Client.data ->
Client.t ->
JSON.t Runnable.process
end
module Withdraw : sig
val withdraw_list_hash :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
data:Client.data ->
Client.t ->
JSON.t Runnable.process
end
end
end
val raw_bytes :
?endpoint:Client.endpoint ->
?hooks:Process.hooks ->
?chain:string ->
?block:string ->
?path:string list ->
Client.t ->
JSON.t Lwt.t
module Curl : sig
(** [get url] returns a runnable requesting [url] with curl.
The response is parsed and returned as JSON.
Fails if [curl] is not found in path.
*)
val get : ?args:string list -> string -> JSON.t Runnable.process
(** Same as [get] but does not parse the returned value *)
val get_raw : ?args:string list -> string -> string Runnable.process
(** [post url data] returns a runnable posting [data] to [url] with curl.
The response is parsed and returned as JSON.
Fails if [curl] is not found in path. *)
val post : ?args:string list -> string -> JSON.t -> JSON.t Runnable.process
end
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2020 Nomadic Labs <[email protected]> *)
(* Copyright (c) 2022 TriliTech <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
bitv.ml | (*i $Id: bitv.ml,v 1.26 2012/08/14 07:26:00 filliatr Exp $ i*)
(*s Bit vectors. The interface and part of the code are borrowed from the
[Array] module of the ocaml standard library (but things are simplified
here since we can always initialize a bit vector). This module also
provides bitwise operations. *)
(*s We represent a bit vector by a vector of integers (field [bits]),
and we keep the information of the size of the bit vector since it
can not be found out with the size of the array (field [length]). *)
type t = {
length : int;
bits : int array }
let length v = v.length
(*s Each element of the array is an integer containing [bpi] bits, where
[bpi] is determined according to the machine word size. Since we do not
use the sign bit, [bpi] is 30 on a 32-bits machine and 62 on a 64-bits
machines. We maintain the following invariant:
{\em The unused bits of the last integer are always
zeros.} This is ensured by [create] and maintained in other functions
using [normalize]. [bit_j], [bit_not_j], [low_mask] and [up_mask]
are arrays used to extract and mask bits in a single integer. *)
let bpi = Sys.word_size - 2
let bit_j = Array.init bpi (fun j -> 1 lsl j)
let bit_not_j = Array.init bpi (fun j -> max_int - bit_j.(j))
let low_mask = Array.make (succ bpi) 0
let _ =
for i = 1 to bpi do low_mask.(i) <- low_mask.(i-1) lor bit_j.(pred i) done
let keep_lowest_bits a j = a land low_mask.(j)
let high_mask = Array.init (succ bpi) (fun j -> low_mask.(j) lsl (bpi-j))
let keep_highest_bits a j = a land high_mask.(j)
let max_length = Sys.max_array_length * bpi
let exceeds_max_length n =
let s = n / bpi in
(if n mod bpi = 0 then s else s + 1) > Sys.max_array_length
(*s Creating and normalizing a bit vector is easy: it is just a matter of
taking care of the invariant. Copy is immediate. *)
let create n b =
if n < 0 || exceeds_max_length n then invalid_arg "Bitv.create";
let initv = if b then max_int else 0 in
let r = n mod bpi in
if r = 0 then
{ length = n; bits = Array.make (n / bpi) initv }
else begin
let s = n / bpi in
let b = Array.make (succ s) initv in
b.(s) <- b.(s) land low_mask.(r);
{ length = n; bits = b }
end
let normalize v =
let r = v.length mod bpi in
if r > 0 then
let b = v.bits in
let s = Array.length b in
b.(s-1) <- b.(s-1) land low_mask.(r)
let copy v = { length = v.length; bits = Array.copy v.bits }
(*s Access and assignment. The [n]th bit of a bit vector is the [j]th
bit of the [i]th integer, where [i = n / bpi] and [j = n mod
bpi]. Both [i] and [j] and computed by the function [pos].
Accessing a bit is testing whether the result of the corresponding
mask operation is non-zero, and assigning it is done with a
bitwiwe operation: an {\em or} with [bit_j] to set it, and an {\em
and} with [bit_not_j] to unset it. *)
let pos n = n / bpi, n mod bpi
let unsafe_get v n =
let (i,j) = pos n in
((Array.unsafe_get v.bits i) land (Array.unsafe_get bit_j j)) > 0
let unsafe_set v n b =
let (i,j) = pos n in
if b then
Array.unsafe_set v.bits i
((Array.unsafe_get v.bits i) lor (Array.unsafe_get bit_j j))
else
Array.unsafe_set v.bits i
((Array.unsafe_get v.bits i) land (Array.unsafe_get bit_not_j j))
(*s The corresponding safe operations test the validiy of the access. *)
let get v n =
if n < 0 || n >= v.length then invalid_arg "Bitv.get";
let (i,j) = pos n in
((Array.unsafe_get v.bits i) land (Array.unsafe_get bit_j j)) > 0
let set v n b =
if n < 0 || n >= v.length then invalid_arg "Bitv.set";
let (i,j) = pos n in
if b then
Array.unsafe_set v.bits i
((Array.unsafe_get v.bits i) lor (Array.unsafe_get bit_j j))
else
Array.unsafe_set v.bits i
((Array.unsafe_get v.bits i) land (Array.unsafe_get bit_not_j j))
(*s [init] is implemented naively using [unsafe_set]. *)
let init n f =
let v = create n false in
for i = 0 to pred n do
unsafe_set v i (f i)
done;
v
(*s Handling bits by packets is the key for efficiency of functions
[append], [concat], [sub] and [blit].
We start by a very general function [blit_bits a i m v n] which blits
the bits [i] to [i+m-1] of a native integer [a]
onto the bit vector [v] at index [n]. It assumes that [i..i+m-1] and
[n..n+m-1] are respectively valid subparts of [a] and [v].
It is optimized when the bits fit the lowest boundary of an integer
(case [j == 0]). *)
let blit_bits a i m v n =
let (i',j) = pos n in
if j == 0 then
Array.unsafe_set v i'
((keep_lowest_bits (a lsr i) m) lor
(keep_highest_bits (Array.unsafe_get v i') (bpi - m)))
else
let d = m + j - bpi in
if d > 0 then begin
Array.unsafe_set v i'
(((keep_lowest_bits (a lsr i) (bpi - j)) lsl j) lor
(keep_lowest_bits (Array.unsafe_get v i') j));
Array.unsafe_set v (succ i')
((keep_lowest_bits (a lsr (i + bpi - j)) d) lor
(keep_highest_bits (Array.unsafe_get v (succ i')) (bpi - d)))
end else
Array.unsafe_set v i'
(((keep_lowest_bits (a lsr i) m) lsl j) lor
((Array.unsafe_get v i') land (low_mask.(j) lor high_mask.(-d))))
(*s [blit_int] implements [blit_bits] in the particular case when
[i=0] and [m=bpi] i.e. when we blit all the bits of [a]. *)
let blit_int a v n =
let (i,j) = pos n in
if j == 0 then
Array.unsafe_set v i a
else begin
Array.unsafe_set v i
( (keep_lowest_bits (Array.unsafe_get v i) j) lor
((keep_lowest_bits a (bpi - j)) lsl j));
Array.unsafe_set v (succ i)
((keep_highest_bits (Array.unsafe_get v (succ i)) (bpi - j)) lor
(a lsr (bpi - j)))
end
(*s When blitting a subpart of a bit vector into another bit vector, there
are two possible cases: (1) all the bits are contained in a single integer
of the first bit vector, and a single call to [blit_bits] is the
only thing to do, or (2) the source bits overlap on several integers of
the source array, and then we do a loop of [blit_int], with two calls
to [blit_bits] for the two bounds. *)
let unsafe_blit v1 ofs1 v2 ofs2 len =
if len > 0 then
let (bi,bj) = pos ofs1 in
let (ei,ej) = pos (ofs1 + len - 1) in
if bi == ei then
blit_bits (Array.unsafe_get v1 bi) bj len v2 ofs2
else begin
blit_bits (Array.unsafe_get v1 bi) bj (bpi - bj) v2 ofs2;
let n = ref (ofs2 + bpi - bj) in
for i = succ bi to pred ei do
blit_int (Array.unsafe_get v1 i) v2 !n;
n := !n + bpi
done;
blit_bits (Array.unsafe_get v1 ei) 0 (succ ej) v2 !n
end
let blit v1 ofs1 v2 ofs2 len =
if len < 0 || ofs1 < 0 || ofs1 + len > v1.length
|| ofs2 < 0 || ofs2 + len > v2.length
then invalid_arg "Bitv.blit";
unsafe_blit v1.bits ofs1 v2.bits ofs2 len
(*s Extracting the subvector [ofs..ofs+len-1] of [v] is just creating a
new vector of length [len] and blitting the subvector of [v] inside. *)
let sub v ofs len =
if ofs < 0 || len < 0 || ofs + len > v.length then invalid_arg "Bitv.sub";
let r = create len false in
unsafe_blit v.bits ofs r.bits 0 len;
r
(*s The concatenation of two bit vectors [v1] and [v2] is obtained by
creating a vector for the result and blitting inside the two vectors.
[v1] is copied directly. *)
let append v1 v2 =
let l1 = v1.length
and l2 = v2.length in
let r = create (l1 + l2) false in
let b1 = v1.bits in
let b2 = v2.bits in
let b = r.bits in
for i = 0 to Array.length b1 - 1 do
Array.unsafe_set b i (Array.unsafe_get b1 i)
done;
unsafe_blit b2 0 b l1 l2;
r
(*s The concatenation of a list of bit vectors is obtained by iterating
[unsafe_blit]. *)
let concat vl =
let size = List.fold_left (fun sz v -> sz + v.length) 0 vl in
let res = create size false in
let b = res.bits in
let pos = ref 0 in
List.iter
(fun v ->
let n = v.length in
unsafe_blit v.bits 0 b !pos n;
pos := !pos + n)
vl;
res
(*s Filling is a particular case of blitting with a source made of all
ones || all zeros. Thus we instanciate [unsafe_blit], with 0 and
[max_int]. *)
let blit_zeros v ofs len =
if len > 0 then
let (bi,bj) = pos ofs in
let (ei,ej) = pos (ofs + len - 1) in
if bi == ei then
blit_bits 0 bj len v ofs
else begin
blit_bits 0 bj (bpi - bj) v ofs;
let n = ref (ofs + bpi - bj) in
for _i = succ bi to pred ei do
blit_int 0 v !n;
n := !n + bpi
done;
blit_bits 0 0 (succ ej) v !n
end
let blit_ones v ofs len =
if len > 0 then
let (bi,bj) = pos ofs in
let (ei,ej) = pos (ofs + len - 1) in
if bi == ei then
blit_bits max_int bj len v ofs
else begin
blit_bits max_int bj (bpi - bj) v ofs;
let n = ref (ofs + bpi - bj) in
for _i = succ bi to pred ei do
blit_int max_int v !n;
n := !n + bpi
done;
blit_bits max_int 0 (succ ej) v !n
end
let fill v ofs len b =
if ofs < 0 || len < 0 || ofs + len > v.length then invalid_arg "Bitv.fill";
if b then blit_ones v.bits ofs len else blit_zeros v.bits ofs len
(*s All the iterators are implemented as for traditional arrays, using
[unsafe_get]. For [iter] and [map], we do not precompute [(f
true)] and [(f false)] since [f] may have side-effects. *)
let iter f v =
for i = 0 to v.length - 1 do f (unsafe_get v i) done
let map f v =
let l = v.length in
let r = create l false in
for i = 0 to l - 1 do
unsafe_set r i (f (unsafe_get v i))
done;
r
let iteri f v =
for i = 0 to v.length - 1 do f i (unsafe_get v i) done
let mapi f v =
let l = v.length in
let r = create l false in
for i = 0 to l - 1 do
unsafe_set r i (f i (unsafe_get v i))
done;
r
let fold_left f x v =
let r = ref x in
for i = 0 to v.length - 1 do
r := f !r (unsafe_get v i)
done;
!r
let fold_right f v x =
let r = ref x in
for i = v.length - 1 downto 0 do
r := f (unsafe_get v i) !r
done;
!r
let foldi_left f x v =
let r = ref x in
for i = 0 to v.length - 1 do
r := f !r i (unsafe_get v i)
done;
!r
let foldi_right f v x =
let r = ref x in
for i = v.length - 1 downto 0 do
r := f i (unsafe_get v i) !r
done;
!r
(*s Population count *)
let rec naive_pop x =
assert (x < 0x10000);
if x = 0 then 0 else 1 + naive_pop (x - (x land -x))
let pop16 = Array.init 0x10000 naive_pop
let pop16 n = Array.unsafe_get pop16 n
let popi x = match Sys.word_size with
| 32 -> pop16 (x land 0xffff) + pop16 ((x lsr 16) land 0xffff)
| 64 -> pop16 (x land 0xffff) + pop16 ((x lsr 16) land 0xffff)
+ pop16 ((x lsr 32) land 0xffff) + pop16 ((x lsr 48) land 0xffff)
| _ -> assert false
let pop v =
Array.fold_left (fun acc n -> acc + popi n) 0 v.bits
(*s Number of trailing zeros (on a 32-bit machine) *)
let hash32 x = ((0x34ca8b09 * x) land 0x3fffffff) lsr 24
let ntz_arr32 = Array.make 64 0
let () = for i = 0 to 30 do ntz_arr32.(hash32 (1 lsl i)) <- i done
let ntz32 x = if x == 0 then 31 else ntz_arr32.(hash32 (x land (-x)))
let iteri_true_ntz32 f v =
Array.iteri
(fun i n ->
let i_bpi = i * bpi in
let rec visit x =
if x != 0 then begin
let b = x land (-x) in
f (i_bpi + ntz32 b);
visit (x - b)
end
in
visit n)
v.bits
let martin_constant = (0x03f79d71b lsl 28) lor 0x4ca8b09 (*0x03f79d71b4ca8b09*)
let hash64 x = ((martin_constant * x) land max_int) lsr 56
let ntz_arr64 = Array.make 64 0
let () = if Sys.word_size >= 64 then
for i = 0 to 62 do ntz_arr64.(hash64 (1 lsl i)) <- i done
let ntz64 x = if x == 0 then 63 else ntz_arr64.(hash64 (x land (-x)))
let iteri_true_ntz64 f v =
Array.iteri
(fun i n ->
let i_bpi = i * bpi in
let rec visit x =
if x != 0 then begin
let b = x land (-x) in
f (i_bpi + ntz64 b);
visit (x - b)
end
in
visit n)
v.bits
let iteri_true = match Sys.word_size with
| 32 -> iteri_true_ntz32
| 64 -> iteri_true_ntz64
| _ -> assert false
(*s Bitwise operations. It is straigthforward, since bitwise operations
can be realized by the corresponding bitwise operations over integers.
However, one has to take care of normalizing the result of [bwnot]
which introduces ones in highest significant positions. *)
let bw_and v1 v2 =
let l = v1.length in
if l <> v2.length then invalid_arg "Bitv.bw_and";
let b1 = v1.bits
and b2 = v2.bits in
let n = Array.length b1 in
let a = Array.make n 0 in
for i = 0 to n - 1 do
a.(i) <- b1.(i) land b2.(i)
done;
{ length = l; bits = a }
let bw_or v1 v2 =
let l = v1.length in
if l <> v2.length then invalid_arg "Bitv.bw_or";
let b1 = v1.bits
and b2 = v2.bits in
let n = Array.length b1 in
let a = Array.make n 0 in
for i = 0 to n - 1 do
a.(i) <- b1.(i) lor b2.(i)
done;
{ length = l; bits = a }
let bw_xor v1 v2 =
let l = v1.length in
if l <> v2.length then invalid_arg "Bitv.bw_xor";
let b1 = v1.bits
and b2 = v2.bits in
let n = Array.length b1 in
let a = Array.make n 0 in
for i = 0 to n - 1 do
a.(i) <- b1.(i) lxor b2.(i)
done;
{ length = l; bits = a }
let bw_not v =
let b = v.bits in
let n = Array.length b in
let a = Array.make n 0 in
for i = 0 to n - 1 do
a.(i) <- max_int land (lnot b.(i))
done;
let r = { length = v.length; bits = a } in
normalize r;
r
(*s Shift operations. It is easy to reuse [unsafe_blit], although it is
probably slightly less efficient than a ad-hoc piece of code. *)
let rec shiftl v d =
if d == 0 then
copy v
else if d < 0 then
shiftr v (-d)
else begin
let n = v.length in
let r = create n false in
if d < n then unsafe_blit v.bits 0 r.bits d (n - d);
r
end
and shiftr v d =
if d == 0 then
copy v
else if d < 0 then
shiftl v (-d)
else begin
let n = v.length in
let r = create n false in
if d < n then unsafe_blit v.bits d r.bits 0 (n - d);
r
end
(*s Rotate operations. It is easy to reuse [unsafe_blit], although it is
probably slightly less efficient than an ad-hoc piece of code. *)
let rec rotatel v d =
if d < 0 then
rotater v (-d)
else
let n = v.length in
let d = d mod n in
if d == 0 then
copy v
else begin
let r = create n false in
unsafe_blit v.bits 0 r.bits d (n - d); (* shiftl *)
unsafe_blit v.bits (n - d) r.bits 0 d; (* wraparound ms to ls *)
r
end
and rotater v d =
if d < 0 then
rotatel v (-d)
else
let n = v.length in
let d = d mod n in
if d == 0 then
copy v
else begin
let r = create n false in
unsafe_blit v.bits d r.bits 0 (n - d); (* shiftr *)
unsafe_blit v.bits 0 r.bits (n - d) d; (* wraparound ls to ms *)
r
end
(*s Testing for all zeros and all ones. *)
let all_zeros v =
let b = v.bits in
let n = Array.length b in
let rec test i =
(i == n) || ((Array.unsafe_get b i == 0) && test (succ i))
in
test 0
let all_ones v =
let b = v.bits in
let n = Array.length b in
let rec test i =
if i == n - 1 then
let m = v.length mod bpi in
(Array.unsafe_get b i) == (if m == 0 then max_int else low_mask.(m))
else
((Array.unsafe_get b i) == max_int) && test (succ i)
in
test 0
(*s Conversions to and from strings. *)
module S(I : sig val least_first : bool end) = struct
let to_string v =
let n = v.length in
let s = Bytes.make n '0' in
for i = 0 to n - 1 do
if unsafe_get v i then Bytes.set s (if I.least_first then i else n-1-i) '1'
done;
Bytes.unsafe_to_string s
let print fmt v = Format.pp_print_string fmt (to_string v)
let of_string s =
let n = String.length s in
let v = create n false in
for i = 0 to n - 1 do
let c = String.unsafe_get s i in
if c = '1' then
unsafe_set v (if I.least_first then i else n-1-i) true
else
if c <> '0' then invalid_arg "Bitv.of_string"
done;
v
end
module L = S(struct let least_first = true end)
module M = S(struct let least_first = false end)
(*s Input/output in a machine-independent format. *)
let bytes_of_int x =
Bytes.init 8 (fun i -> Char.chr ((x lsr (8 * i)) land 0xFF))
let int_of_bytes b =
assert (Bytes.length b = 8);
let rec build x i =
if i < 0 then x
else build ((x lsl 8) lor Char.code (Bytes.get b i)) (pred i)
in
build 0 7
let to_bin write v =
let len = length v in
let rec loop i pow byte =
let byte = if unsafe_get v i then byte lor pow else byte in
if i = len - 1 then
write byte
else if i mod 8 = 7 then begin
write byte;
loop (i + 1) 1 0
end else
loop (i + 1) (pow * 2) byte
in
bytes_of_int len
|> Bytes.iter (fun b -> Char.code b |> write);
if len > 0 then loop 0 1 0
let output_bin out_ch v =
let write = output_byte out_ch in
to_bin write v
let to_bytes t =
let buf = Buffer.create 0 in
let write i = Buffer.add_char buf (Char.chr i) in
to_bin write t;
Buffer.to_bytes buf
let of_bin read =
let len = Bytes.init 8 (fun _ -> read () |> Char.chr) |> int_of_bytes in
let bits = create len false in
let rec loop i byte =
if i < len then begin
let byte = if i mod 8 = 0 then read () else byte in
if byte land 1 = 1 then unsafe_set bits i true;
loop (i+1) (byte / 2)
end
in
if len > 0 then loop 0 0;
bits
let input_bin in_ch =
let read () = input_byte in_ch in
of_bin read
let of_bytes b =
let read =
let p = ref 0 in
fun () ->
let ret = Bytes.get b !p |> Char.code in
incr p;
ret
in
of_bin read
(* Iteration on all bit vectors of length [n] using a Gray code. *)
let first_set v n =
let rec lookup i =
if i = n then raise Not_found ;
if unsafe_get v i then i else lookup (i + 1)
in
lookup 0
let gray_iter f n =
let bv = create n false in
let rec iter () =
f bv;
unsafe_set bv 0 (not (unsafe_get bv 0));
f bv;
let pos = succ (first_set bv n) in
if pos < n then begin
unsafe_set bv pos (not (unsafe_get bv pos));
iter ()
end
in
if n > 0 then iter ()
(*s Coercions to/from lists of integers *)
let of_list l =
let n = List.fold_left max 0 l in
let b = create (succ n) false in
let add_element i =
(* negative numbers are invalid *)
if i < 0 then invalid_arg "Bitv.of_list";
unsafe_set b i true
in
List.iter add_element l;
b
let of_list_with_length l len =
let b = create len false in
let add_element i =
if i < 0 || i >= len then invalid_arg "Bitv.of_list_with_length";
unsafe_set b i true
in
List.iter add_element l;
b
let to_list b =
let n = length b in
let rec make i acc =
if i < 0 then acc
else make (pred i) (if unsafe_get b i then i :: acc else acc)
in
make (pred n) []
(*s To/from integers. *)
(* [int] *)
let of_int_us i =
{ length = bpi; bits = [| i land max_int |] }
let to_int_us v =
if v.length < bpi then invalid_arg "Bitv.to_int_us";
v.bits.(0)
let of_int_s i =
{ length = succ bpi; bits = [| i land max_int; (i lsr bpi) land 1 |] }
let to_int_s v =
if v.length < succ bpi then invalid_arg "Bitv.to_int_s";
v.bits.(0) lor (v.bits.(1) lsl bpi)
(* [Int32] *)
let of_int32_us i = match Sys.word_size with
| 32 -> { length = 31;
bits = [| (Int32.to_int i) land max_int;
let hi = Int32.shift_right_logical i 30 in
(Int32.to_int hi) land 1 |] }
| 64 -> { length = 31; bits = [| (Int32.to_int i) land 0x7fffffff |] }
| _ -> assert false
let to_int32_us v =
if v.length < 31 then invalid_arg "Bitv.to_int32_us";
match Sys.word_size with
| 32 ->
Int32.logor (Int32.of_int v.bits.(0))
(Int32.shift_left (Int32.of_int (v.bits.(1) land 1)) 30)
| 64 ->
Int32.of_int (v.bits.(0) land 0x7fffffff)
| _ -> assert false
(* this is 0xffffffff (ocaml >= 3.08 checks for literal overflow) *)
let ffffffff = (0xffff lsl 16) lor 0xffff
let of_int32_s i = match Sys.word_size with
| 32 -> { length = 32;
bits = [| (Int32.to_int i) land max_int;
let hi = Int32.shift_right_logical i 30 in
(Int32.to_int hi) land 3 |] }
| 64 -> { length = 32; bits = [| (Int32.to_int i) land ffffffff |] }
| _ -> assert false
let to_int32_s v =
if v.length < 32 then invalid_arg "Bitv.to_int32_s";
match Sys.word_size with
| 32 ->
Int32.logor (Int32.of_int v.bits.(0))
(Int32.shift_left (Int32.of_int (v.bits.(1) land 3)) 30)
| 64 ->
Int32.of_int (v.bits.(0) land ffffffff)
| _ -> assert false
(* [Int64] *)
let of_int64_us i = match Sys.word_size with
| 32 -> { length = 63;
bits = [| (Int64.to_int i) land max_int;
(let mi = Int64.shift_right_logical i 30 in
(Int64.to_int mi) land max_int);
let hi = Int64.shift_right_logical i 60 in
(Int64.to_int hi) land 7 |] }
| 64 -> { length = 63;
bits = [| (Int64.to_int i) land max_int;
let hi = Int64.shift_right_logical i 62 in
(Int64.to_int hi) land 1 |] }
| _ -> assert false
let to_int64_us v =
if v.length < 63 then invalid_arg "Bitv.to_int64_us";
match Sys.word_size with
| 32 ->
Int64.logor (Int64.of_int v.bits.(0))
(Int64.logor (Int64.shift_left (Int64.of_int v.bits.(1)) 30)
(Int64.shift_left (Int64.of_int (v.bits.(2) land 7)) 60))
| 64 ->
Int64.logor (Int64.of_int v.bits.(0))
(Int64.shift_left (Int64.of_int (v.bits.(1) land 1)) 62)
| _ ->
assert false
let of_int64_s i = match Sys.word_size with
| 32 -> { length = 64;
bits = [| (Int64.to_int i) land max_int;
(let mi = Int64.shift_right_logical i 30 in
(Int64.to_int mi) land max_int);
let hi = Int64.shift_right_logical i 60 in
(Int64.to_int hi) land 15 |] }
| 64 -> { length = 64;
bits = [| (Int64.to_int i) land max_int;
let hi = Int64.shift_right_logical i 62 in
(Int64.to_int hi) land 3 |] }
| _ -> assert false
let to_int64_s v =
if v.length < 64 then invalid_arg "Bitv.to_int64_s";
match Sys.word_size with
| 32 ->
Int64.logor (Int64.of_int v.bits.(0))
(Int64.logor (Int64.shift_left (Int64.of_int v.bits.(1)) 30)
(Int64.shift_left (Int64.of_int (v.bits.(2) land 15)) 60))
| 64 ->
Int64.logor (Int64.of_int v.bits.(0))
(Int64.shift_left (Int64.of_int (v.bits.(1) land 3)) 62)
| _ -> assert false
(* [Nativeint] *)
let select_of f32 f64 = match Sys.word_size with
| 32 -> (fun i -> f32 (Nativeint.to_int32 i))
| 64 -> (fun i -> f64 (Int64.of_nativeint i))
| _ -> assert false
let of_nativeint_s = select_of of_int32_s of_int64_s
let of_nativeint_us = select_of of_int32_us of_int64_us
let select_to f32 f64 = match Sys.word_size with
| 32 -> (fun i -> Nativeint.of_int32 (f32 i))
| 64 -> (fun i -> Int64.to_nativeint (f64 i))
| _ -> assert false
let to_nativeint_s = select_to to_int32_s to_int64_s
let to_nativeint_us = select_to to_int32_us to_int64_us
| (**************************************************************************)
(* *)
(* Copyright (C) Jean-Christophe Filliatre *)
(* *)
(* This software is free software; you can redistribute it and/or *)
(* modify it under the terms of the GNU Library General Public *)
(* License version 2, with the special exception on linking *)
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
|
dune |
(menhir (modules duration_parser))
(ocamllex (modules duration_lexer date_lexer))
(library
(name odate)
(public_name odate)
(wrapped false)
(libraries unix))
| |
dune |
(rule (copy# ../../async/test/channel_test.ml channel_test.ml))
(rule (copy# ../../async/test/close_test.ml close_test.ml))
(rule (copy# ../../async/test/connection_test.ml connection_test.ml))
(rule (copy# ../../async/test/connection_fail_test.ml connection_fail_test.ml))
(rule (copy# ../../async/test/connect_uri_test.ml connect_uri_test.ml))
(rule (copy# ../../async/test/exchange_test.ml exchange_test.ml))
(rule (copy# ../../async/test/mandatory_test.ml mandatory_test.ml))
(rule (copy# ../../async/test/queue_test.ml queue_test.ml))
(rule (copy# ../../async/test/queue_declare_test.ml queue_declare_test.ml))
(rule (copy# ../../async/test/repeat.ml repeat.ml))
(rule (copy# ../../async/test/rpc_async_test.ml rpc_async_test.ml))
(rule (copy# ../../async/test/rpc_test.ml rpc_test.ml))
(rule (copy# ../../async/test/vhost_test.ml vhost_test.ml))
(rule (copy# ../../async/test/with_confirm_test.ml with_confirm_test.ml))
(rule (copy# ../../async/test/queue_cancel_test.ml queue_cancel_test.ml))
(executables
(names channel_test
close_test
connection_test
connection_fail_test
connect_uri_test
exchange_test
mandatory_test
queue_test
queue_declare_test
repeat
rpc_async_test
rpc_test
vhost_test
with_confirm_test
queue_cancel_test)
(libraries amqp-client-lwt)
)
(alias
(name integration)
(action (run ./channel_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./connection_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./connection_fail_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./connect_uri_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./exchange_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./mandatory_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./queue_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./queue_declare_test.exe))
(package amqp-client-lwt)
)
;(alias
; ((name integration)
; (action (run ./repeat.exe))
; (package amqp-client-lwt)
;))
(alias
(name integration)
(action (run ./rpc_async_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./rpc_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./vhost_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./with_confirm_test.exe))
(package amqp-client-lwt)
)
(alias
(name integration)
(action (run ./queue_cancel_test.exe))
(package amqp-client-lwt)
)
| |
fixed-reuse.c | /* SPDX-License-Identifier: MIT */
/*
* Description: link <open file><read from file><close file> with an existing
* file present in the opened slot, verifying that we get the new file
* rather than the old one.
*
*/
#include <errno.h>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include "liburing.h"
#include "helpers.h"
#define MAX_FILES 8
#define FNAME1 ".slot.reuse.1"
#define FNAME2 ".slot.reuse.2"
#define PAT1 0xaa
#define PAT2 0x55
#define BSIZE 4096
static int test(struct io_uring *ring)
{
struct io_uring_cqe *cqe;
struct io_uring_sqe *sqe;
char buf[BSIZE];
int ret, i;
/* open FNAME1 in slot 0 */
sqe = io_uring_get_sqe(ring);
io_uring_prep_openat_direct(sqe, AT_FDCWD, FNAME1, O_RDONLY, 0, 0);
sqe->user_data = 1;
ret = io_uring_submit(ring);
if (ret != 1) {
fprintf(stderr, "sqe submit failed: %d\n", ret);
goto err;
}
ret = io_uring_wait_cqe(ring, &cqe);
if (ret < 0) {
fprintf(stderr, "wait completion %d\n", ret);
goto err;
}
if (cqe->res != 0) {
fprintf(stderr, "open res %d\n", ret);
goto err;
}
io_uring_cqe_seen(ring, cqe);
/*
* Now open FNAME2 in that same slot, verifying we get data from
* FNAME2 and not FNAME1.
*/
sqe = io_uring_get_sqe(ring);
io_uring_prep_openat_direct(sqe, AT_FDCWD, FNAME2, O_RDONLY, 0, 0);
sqe->flags |= IOSQE_IO_LINK;
sqe->user_data = 2;
sqe = io_uring_get_sqe(ring);
io_uring_prep_read(sqe, 0, buf, sizeof(buf), 0);
sqe->flags |= IOSQE_FIXED_FILE;
sqe->flags |= IOSQE_IO_LINK;
sqe->user_data = 3;
sqe = io_uring_get_sqe(ring);
io_uring_prep_close_direct(sqe, 0);
sqe->user_data = 4;
ret = io_uring_submit(ring);
if (ret != 3) {
fprintf(stderr, "sqe submit failed: %d\n", ret);
goto err;
}
for (i = 0; i < 3; i++) {
ret = io_uring_wait_cqe(ring, &cqe);
if (ret < 0) {
fprintf(stderr, "wait completion %d\n", ret);
goto err;
}
switch (cqe->user_data) {
case 2:
if (cqe->res) {
fprintf(stderr, "bad open %d\n", cqe->res);
goto err;
}
break;
case 3:
if (cqe->res != sizeof(buf)) {
fprintf(stderr, "bad read %d\n", cqe->res);
goto err;
}
break;
case 4:
if (cqe->res) {
fprintf(stderr, "bad close %d\n", cqe->res);
goto err;
}
break;
}
io_uring_cqe_seen(ring, cqe);
}
for (i = 0; i < sizeof(buf); i++) {
if (buf[i] == PAT2)
continue;
fprintf(stderr, "Bad pattern %x at %d\n", buf[i], i);
goto err;
}
return 0;
err:
return 1;
}
int main(int argc, char *argv[])
{
struct io_uring ring;
struct io_uring_params p = { };
int ret, files[MAX_FILES];
if (argc > 1)
return T_EXIT_SKIP;
ret = io_uring_queue_init_params(8, &ring, &p);
if (ret) {
fprintf(stderr, "ring setup failed: %d\n", ret);
return T_EXIT_FAIL;
}
if (!(p.features & IORING_FEAT_CQE_SKIP))
return T_EXIT_SKIP;
memset(files, -1, sizeof(files));
ret = io_uring_register_files(&ring, files, ARRAY_SIZE(files));
if (ret) {
fprintf(stderr, "Failed registering files\n");
return T_EXIT_FAIL;
}
t_create_file_pattern(FNAME1, 4096, PAT1);
t_create_file_pattern(FNAME2, 4096, PAT2);
ret = test(&ring);
if (ret) {
fprintf(stderr, "test failed\n");
goto err;
}
unlink(FNAME1);
unlink(FNAME2);
return T_EXIT_PASS;
err:
unlink(FNAME1);
unlink(FNAME2);
return T_EXIT_FAIL;
}
| /* SPDX-License-Identifier: MIT */
/* |
set.c |
#include "padic.h"
void padic_set(padic_t rop, const padic_t op, const padic_ctx_t ctx)
{
fmpz_set(padic_unit(rop), padic_unit(op));
padic_val(rop) = padic_val(op);
_padic_reduce(rop, ctx);
}
| /*
Copyright (C) 2011, 2012 Sebastian Pancratz
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
randtest_bounds.c |
#include "fmpz_mpoly.h"
void fmpz_mpoly_randtest_bounds(fmpz_mpoly_t A, flint_rand_t state,
slong length, flint_bitcnt_t coeff_bits, ulong * exp_bounds,
const fmpz_mpoly_ctx_t ctx)
{
slong i, j, nvars = ctx->minfo->nvars;
ulong * exp;
TMP_INIT;
TMP_START;
exp = (ulong *) TMP_ALLOC(nvars*sizeof(ulong));
fmpz_mpoly_zero(A, ctx);
for (i = 0; i < length; i++)
{
for (j = 0; j < nvars; j++)
exp[j] = n_randint(state, exp_bounds[j]);
_fmpz_mpoly_push_exp_ui(A, exp, ctx);
fmpz_randtest(A->coeffs + A->length - 1, state, coeff_bits);
}
TMP_END;
fmpz_mpoly_sort_terms(A, ctx);
fmpz_mpoly_combine_like_terms(A, ctx);
}
| /*
Copyright (C) 2018 Daniel Schultz
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
callback.mli |
val register : string -> 'a -> unit
val register_exception : string -> exn -> unit
| |
level_repr.mli | type t = private {
level: Raw_level_repr.t (** The level of the block relative to genesis. This
is also the Shell's notion of level. *);
level_position: int32 (** The level of the block relative to the block that
starts protocol alpha. This is specific to the
protocol alpha. Other protocols might or might not
include a similar notion. *);
cycle: Cycle_repr.t (** The current cycle's number. Note that cycles are a
protocol-specific notion. As a result, the cycle
number starts at 0 with the first block of protocol
alpha. *);
cycle_position: int32 (** The current level of the block relative to the first
block of the current cycle. *);
voting_period: Voting_period_repr.t ;
voting_period_position: int32 ;
expected_commitment: bool ;
}
(* Note that, the type `t` above must respect some invariants (hence the
`private` annotation). Notably:
level_position = cycle * blocks_per_cycle + cycle_position
*)
type level = t
include Compare.S with type t := level
val encoding: level Data_encoding.t
val pp: Format.formatter -> level -> unit
val pp_full: Format.formatter -> level -> unit
val root: Raw_level_repr.t -> level
val from_raw:
first_level:Raw_level_repr.t ->
blocks_per_cycle:int32 ->
blocks_per_voting_period:int32 ->
blocks_per_commitment:int32 ->
Raw_level_repr.t -> level
val diff: level -> level -> int32
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
constants_repr.ml | let version_number = "\000"
let proof_of_work_nonce_size = 8
let nonce_length = 32
let max_revelations_per_block = 32
let max_proposals_per_delegate = 20
let max_operation_data_length = 16 * 1024 (* 16kB *)
type fixed = {
proof_of_work_nonce_size : int ;
nonce_length : int ;
max_revelations_per_block : int ;
max_operation_data_length : int ;
max_proposals_per_delegate : int ;
}
let fixed_encoding =
let open Data_encoding in
conv
(fun c ->
(c.proof_of_work_nonce_size,
c.nonce_length,
c.max_revelations_per_block,
c.max_operation_data_length,
c.max_proposals_per_delegate))
(fun (proof_of_work_nonce_size,
nonce_length,
max_revelations_per_block,
max_operation_data_length,
max_proposals_per_delegate) ->
{ proof_of_work_nonce_size ;
nonce_length ;
max_revelations_per_block ;
max_operation_data_length ;
max_proposals_per_delegate ;
} )
(obj5
(req "proof_of_work_nonce_size" uint8)
(req "nonce_length" uint8)
(req "max_revelations_per_block" uint8)
(req "max_operation_data_length" int31)
(req "max_proposals_per_delegate" uint8))
let fixed = {
proof_of_work_nonce_size ;
nonce_length ;
max_revelations_per_block ;
max_operation_data_length ;
max_proposals_per_delegate ;
}
type parametric = {
preserved_cycles: int ;
blocks_per_cycle: int32 ;
blocks_per_commitment: int32 ;
blocks_per_roll_snapshot: int32 ;
blocks_per_voting_period: int32 ;
time_between_blocks: Period_repr.t list ;
endorsers_per_block: int ;
hard_gas_limit_per_operation: Z.t ;
hard_gas_limit_per_block: Z.t ;
proof_of_work_threshold: int64 ;
tokens_per_roll: Tez_repr.t ;
michelson_maximum_type_size: int;
seed_nonce_revelation_tip: Tez_repr.t ;
origination_size: int ;
block_security_deposit: Tez_repr.t ;
endorsement_security_deposit: Tez_repr.t ;
block_reward: Tez_repr.t ;
endorsement_reward: Tez_repr.t ;
cost_per_byte: Tez_repr.t ;
hard_storage_limit_per_operation: Z.t ;
test_chain_duration: int64 ; (* in seconds *)
}
let default = {
preserved_cycles = 5 ;
blocks_per_cycle = 4096l ;
blocks_per_commitment = 32l ;
blocks_per_roll_snapshot = 256l ;
blocks_per_voting_period = 32768l ;
time_between_blocks =
List.map Period_repr.of_seconds_exn [ 60L ; 75L ] ;
endorsers_per_block = 32 ;
hard_gas_limit_per_operation = Z.of_int 800_000 ;
hard_gas_limit_per_block = Z.of_int 8_000_000 ;
proof_of_work_threshold =
Int64.(sub (shift_left 1L 46) 1L) ;
tokens_per_roll =
Tez_repr.(mul_exn one 8_000) ;
michelson_maximum_type_size = 1000 ;
seed_nonce_revelation_tip = begin
match Tez_repr.(one /? 8L) with
| Ok c -> c
| Error _ -> assert false
end ;
origination_size = 257 ;
block_security_deposit = Tez_repr.(mul_exn one 512) ;
endorsement_security_deposit = Tez_repr.(mul_exn one 64) ;
block_reward = Tez_repr.(mul_exn one 16) ;
endorsement_reward = Tez_repr.(mul_exn one 2) ;
hard_storage_limit_per_operation = Z.of_int 60_000 ;
cost_per_byte = Tez_repr.of_mutez_exn 1_000L ;
test_chain_duration = Int64.mul 32768L 60L;
}
let parametric_encoding =
let open Data_encoding in
conv
(fun c ->
(( c.preserved_cycles,
c.blocks_per_cycle,
c.blocks_per_commitment,
c.blocks_per_roll_snapshot,
c.blocks_per_voting_period,
c.time_between_blocks,
c.endorsers_per_block,
c.hard_gas_limit_per_operation,
c.hard_gas_limit_per_block),
((c.proof_of_work_threshold,
c.tokens_per_roll,
c.michelson_maximum_type_size,
c.seed_nonce_revelation_tip,
c.origination_size,
c.block_security_deposit,
c.endorsement_security_deposit,
c.block_reward),
(c.endorsement_reward,
c.cost_per_byte,
c.hard_storage_limit_per_operation,
c.test_chain_duration))) )
(fun (( preserved_cycles,
blocks_per_cycle,
blocks_per_commitment,
blocks_per_roll_snapshot,
blocks_per_voting_period,
time_between_blocks,
endorsers_per_block,
hard_gas_limit_per_operation,
hard_gas_limit_per_block),
((proof_of_work_threshold,
tokens_per_roll,
michelson_maximum_type_size,
seed_nonce_revelation_tip,
origination_size,
block_security_deposit,
endorsement_security_deposit,
block_reward),
(endorsement_reward,
cost_per_byte,
hard_storage_limit_per_operation,
test_chain_duration))) ->
{ preserved_cycles ;
blocks_per_cycle ;
blocks_per_commitment ;
blocks_per_roll_snapshot ;
blocks_per_voting_period ;
time_between_blocks ;
endorsers_per_block ;
hard_gas_limit_per_operation ;
hard_gas_limit_per_block ;
proof_of_work_threshold ;
tokens_per_roll ;
michelson_maximum_type_size ;
seed_nonce_revelation_tip ;
origination_size ;
block_security_deposit ;
endorsement_security_deposit ;
block_reward ;
endorsement_reward ;
cost_per_byte ;
hard_storage_limit_per_operation ;
test_chain_duration ;
} )
(merge_objs
(obj9
(req "preserved_cycles" uint8)
(req "blocks_per_cycle" int32)
(req "blocks_per_commitment" int32)
(req "blocks_per_roll_snapshot" int32)
(req "blocks_per_voting_period" int32)
(req "time_between_blocks" (list Period_repr.encoding))
(req "endorsers_per_block" uint16)
(req "hard_gas_limit_per_operation" z)
(req "hard_gas_limit_per_block" z))
(merge_objs
(obj8
(req "proof_of_work_threshold" int64)
(req "tokens_per_roll" Tez_repr.encoding)
(req "michelson_maximum_type_size" uint16)
(req "seed_nonce_revelation_tip" Tez_repr.encoding)
(req "origination_size" int31)
(req "block_security_deposit" Tez_repr.encoding)
(req "endorsement_security_deposit" Tez_repr.encoding)
(req "block_reward" Tez_repr.encoding))
(obj4
(req "endorsement_reward" Tez_repr.encoding)
(req "cost_per_byte" Tez_repr.encoding)
(req "hard_storage_limit_per_operation" z)
(req "test_chain_duration" int64))))
type t = {
fixed : fixed ;
parametric : parametric ;
}
let encoding =
let open Data_encoding in
conv
(fun { fixed ; parametric } -> (fixed, parametric))
(fun (fixed , parametric) -> { fixed ; parametric })
(merge_objs fixed_encoding parametric_encoding)
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
_heapqmodule.c.h | /*[clinic input]
preserve
[clinic start generated code]*/
PyDoc_STRVAR(_heapq_heappush__doc__,
"heappush($module, heap, item, /)\n"
"--\n"
"\n"
"Push item onto heap, maintaining the heap invariant.");
#define _HEAPQ_HEAPPUSH_METHODDEF \
{"heappush", _PyCFunction_CAST(_heapq_heappush), METH_FASTCALL, _heapq_heappush__doc__},
static PyObject *
_heapq_heappush_impl(PyObject *module, PyObject *heap, PyObject *item);
static PyObject *
_heapq_heappush(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject *heap;
PyObject *item;
if (!_PyArg_CheckPositional("heappush", nargs, 2, 2)) {
goto exit;
}
if (!PyList_Check(args[0])) {
_PyArg_BadArgument("heappush", "argument 1", "list", args[0]);
goto exit;
}
heap = args[0];
item = args[1];
return_value = _heapq_heappush_impl(module, heap, item);
exit:
return return_value;
}
PyDoc_STRVAR(_heapq_heappop__doc__,
"heappop($module, heap, /)\n"
"--\n"
"\n"
"Pop the smallest item off the heap, maintaining the heap invariant.");
#define _HEAPQ_HEAPPOP_METHODDEF \
{"heappop", (PyCFunction)_heapq_heappop, METH_O, _heapq_heappop__doc__},
static PyObject *
_heapq_heappop_impl(PyObject *module, PyObject *heap);
static PyObject *
_heapq_heappop(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
PyObject *heap;
if (!PyList_Check(arg)) {
_PyArg_BadArgument("heappop", "argument", "list", arg);
goto exit;
}
heap = arg;
return_value = _heapq_heappop_impl(module, heap);
exit:
return return_value;
}
PyDoc_STRVAR(_heapq_heapreplace__doc__,
"heapreplace($module, heap, item, /)\n"
"--\n"
"\n"
"Pop and return the current smallest value, and add the new item.\n"
"\n"
"This is more efficient than heappop() followed by heappush(), and can be\n"
"more appropriate when using a fixed-size heap. Note that the value\n"
"returned may be larger than item! That constrains reasonable uses of\n"
"this routine unless written as part of a conditional replacement:\n"
"\n"
" if item > heap[0]:\n"
" item = heapreplace(heap, item)");
#define _HEAPQ_HEAPREPLACE_METHODDEF \
{"heapreplace", _PyCFunction_CAST(_heapq_heapreplace), METH_FASTCALL, _heapq_heapreplace__doc__},
static PyObject *
_heapq_heapreplace_impl(PyObject *module, PyObject *heap, PyObject *item);
static PyObject *
_heapq_heapreplace(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject *heap;
PyObject *item;
if (!_PyArg_CheckPositional("heapreplace", nargs, 2, 2)) {
goto exit;
}
if (!PyList_Check(args[0])) {
_PyArg_BadArgument("heapreplace", "argument 1", "list", args[0]);
goto exit;
}
heap = args[0];
item = args[1];
return_value = _heapq_heapreplace_impl(module, heap, item);
exit:
return return_value;
}
PyDoc_STRVAR(_heapq_heappushpop__doc__,
"heappushpop($module, heap, item, /)\n"
"--\n"
"\n"
"Push item on the heap, then pop and return the smallest item from the heap.\n"
"\n"
"The combined action runs more efficiently than heappush() followed by\n"
"a separate call to heappop().");
#define _HEAPQ_HEAPPUSHPOP_METHODDEF \
{"heappushpop", _PyCFunction_CAST(_heapq_heappushpop), METH_FASTCALL, _heapq_heappushpop__doc__},
static PyObject *
_heapq_heappushpop_impl(PyObject *module, PyObject *heap, PyObject *item);
static PyObject *
_heapq_heappushpop(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject *heap;
PyObject *item;
if (!_PyArg_CheckPositional("heappushpop", nargs, 2, 2)) {
goto exit;
}
if (!PyList_Check(args[0])) {
_PyArg_BadArgument("heappushpop", "argument 1", "list", args[0]);
goto exit;
}
heap = args[0];
item = args[1];
return_value = _heapq_heappushpop_impl(module, heap, item);
exit:
return return_value;
}
PyDoc_STRVAR(_heapq_heapify__doc__,
"heapify($module, heap, /)\n"
"--\n"
"\n"
"Transform list into a heap, in-place, in O(len(heap)) time.");
#define _HEAPQ_HEAPIFY_METHODDEF \
{"heapify", (PyCFunction)_heapq_heapify, METH_O, _heapq_heapify__doc__},
static PyObject *
_heapq_heapify_impl(PyObject *module, PyObject *heap);
static PyObject *
_heapq_heapify(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
PyObject *heap;
if (!PyList_Check(arg)) {
_PyArg_BadArgument("heapify", "argument", "list", arg);
goto exit;
}
heap = arg;
return_value = _heapq_heapify_impl(module, heap);
exit:
return return_value;
}
PyDoc_STRVAR(_heapq__heappop_max__doc__,
"_heappop_max($module, heap, /)\n"
"--\n"
"\n"
"Maxheap variant of heappop.");
#define _HEAPQ__HEAPPOP_MAX_METHODDEF \
{"_heappop_max", (PyCFunction)_heapq__heappop_max, METH_O, _heapq__heappop_max__doc__},
static PyObject *
_heapq__heappop_max_impl(PyObject *module, PyObject *heap);
static PyObject *
_heapq__heappop_max(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
PyObject *heap;
if (!PyList_Check(arg)) {
_PyArg_BadArgument("_heappop_max", "argument", "list", arg);
goto exit;
}
heap = arg;
return_value = _heapq__heappop_max_impl(module, heap);
exit:
return return_value;
}
PyDoc_STRVAR(_heapq__heapreplace_max__doc__,
"_heapreplace_max($module, heap, item, /)\n"
"--\n"
"\n"
"Maxheap variant of heapreplace.");
#define _HEAPQ__HEAPREPLACE_MAX_METHODDEF \
{"_heapreplace_max", _PyCFunction_CAST(_heapq__heapreplace_max), METH_FASTCALL, _heapq__heapreplace_max__doc__},
static PyObject *
_heapq__heapreplace_max_impl(PyObject *module, PyObject *heap,
PyObject *item);
static PyObject *
_heapq__heapreplace_max(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject *heap;
PyObject *item;
if (!_PyArg_CheckPositional("_heapreplace_max", nargs, 2, 2)) {
goto exit;
}
if (!PyList_Check(args[0])) {
_PyArg_BadArgument("_heapreplace_max", "argument 1", "list", args[0]);
goto exit;
}
heap = args[0];
item = args[1];
return_value = _heapq__heapreplace_max_impl(module, heap, item);
exit:
return return_value;
}
PyDoc_STRVAR(_heapq__heapify_max__doc__,
"_heapify_max($module, heap, /)\n"
"--\n"
"\n"
"Maxheap variant of heapify.");
#define _HEAPQ__HEAPIFY_MAX_METHODDEF \
{"_heapify_max", (PyCFunction)_heapq__heapify_max, METH_O, _heapq__heapify_max__doc__},
static PyObject *
_heapq__heapify_max_impl(PyObject *module, PyObject *heap);
static PyObject *
_heapq__heapify_max(PyObject *module, PyObject *arg)
{
PyObject *return_value = NULL;
PyObject *heap;
if (!PyList_Check(arg)) {
_PyArg_BadArgument("_heapify_max", "argument", "list", arg);
goto exit;
}
heap = arg;
return_value = _heapq__heapify_max_impl(module, heap);
exit:
return return_value;
}
/*[clinic end generated code: output=9a22715a8bf0c91d input=a9049054013a1b77]*/
| /*[clinic input]
preserve
[clinic start generated code]*/ |
Parse_dart_tree_sitter.ml | module CST = Tree_sitter_dart.CST
module H = Parse_tree_sitter_helpers
module G = AST_generic
module R = Raw_tree
open AST_generic
(*****************************************************************************)
(* Prelude *)
(*****************************************************************************)
(* Dart parser using tree-sitter-lang/semgrep-dart and converting
* directly to AST_generic.ml
*
*)
(*****************************************************************************)
(* Helpers *)
(*****************************************************************************)
type env = unit H.env
let _todo (_env : env) _ = failwith "not implemented"
let token (env : env) (tok : Tree_sitter_run.Token.t) = R.Token (H.str env tok)
let _map_trailing_comma env x =
match x with
| Some tok -> Some ((* "," *) token env tok)
| None -> None
(*****************************************************************************)
(* Boilerplate converter *)
(*****************************************************************************)
(* This was started by copying tree-sitter-lang/semgrep-dart/Boilerplate.ml *)
let _map_break_builtin (env : env) (tok : CST.break_builtin) =
(* break_builtin *) token env tok
let map_anon_choice_async_725f72f (env : env)
(x : CST.anon_choice_async_725f72f) =
match x with
| `Async tok -> R.Case ("Async", (* "async" *) token env tok)
| `Asyn tok -> R.Case ("Asyn", (* "async*" *) token env tok)
| `Sync tok -> R.Case ("Sync", (* "sync*" *) token env tok)
let map_bitwise_operator (env : env) (x : CST.bitwise_operator) =
match x with
| `AMP tok -> R.Case ("AMP", (* "&" *) token env tok)
| `HAT tok -> R.Case ("HAT", (* "^" *) token env tok)
| `BAR tok -> R.Case ("BAR", (* "|" *) token env tok)
let _map_increment_operator (env : env) (tok : CST.increment_operator) =
(* increment_operator *) token env tok
let _map_pat_05bf793 (env : env) (tok : CST.pat_05bf793) =
(* pattern [^*]*\*+([^/*][^*]*\*+)* *) token env tok
let map_tok_is (env : env) (tok : CST.tok_is) = (* tok_is *) token env tok
let _map_as_operator (env : env) (tok : CST.as_operator) =
(* as_operator *) token env tok
let _map_template_chars_double_single (env : env)
(tok : CST.template_chars_double_single) =
(* template_chars_double_single *) token env tok
let _map_documentation_block_comment (env : env)
(tok : CST.documentation_block_comment) =
(* documentation_block_comment *) token env tok
let _map_pat_d6c261f (env : env) (tok : CST.pat_d6c261f) =
(* pattern ([^/\n].*\
)? *)
token env tok
let _map_template_chars_double (env : env) (tok : CST.template_chars_double) =
(* template_chars_double *) token env tok
let _map_block_comment (env : env) (tok : CST.block_comment) =
(* block_comment *) token env tok
let _map_pat_4fd4a56 (env : env) (tok : CST.pat_4fd4a56) =
(* pattern .* *) token env tok
let _map_void_type (env : env) (tok : CST.void_type) =
(* void_type *) token env tok
let _map_assert_builtin (env : env) (tok : CST.assert_builtin) =
(* assert_builtin *) token env tok
let map_shift_operator_ (env : env) (x : CST.shift_operator_) =
match x with
| `LTLT tok -> R.Case ("LTLT", (* "<<" *) token env tok)
| `GTGT tok -> R.Case ("GTGT", (* ">>" *) token env tok)
| `GTGTGT tok -> R.Case ("GTGTGT", (* ">>>" *) token env tok)
let _map_const_builtin (env : env) (tok : CST.const_builtin) =
(* const_builtin *) token env tok
let _map_final_builtin (env : env) (tok : CST.final_builtin) =
(* final_builtin *) token env tok
let map_multiplicative_operator_ (env : env) (x : CST.multiplicative_operator_)
=
match x with
| `STAR tok -> R.Case ("STAR", (* "*" *) token env tok)
| `SLASH tok -> R.Case ("SLASH", (* "/" *) token env tok)
| `PERC tok -> R.Case ("PERC", (* "%" *) token env tok)
| `TILDESLASH tok -> R.Case ("TILDESLASH", (* "~/" *) token env tok)
let _map_template_chars_raw_slash (env : env)
(tok : CST.template_chars_raw_slash) =
(* template_chars_raw_slash *) token env tok
let map_semicolon (env : env) (v1 : CST.semicolon) = (* ";" *) token env v1
let map_relational_operator (env : env) (x : CST.relational_operator) =
match x with
| `LT tok -> R.Case ("LT", (* "<" *) token env tok)
| `GT tok -> R.Case ("GT", (* ">" *) token env tok)
| `LTEQ tok -> R.Case ("LTEQ", (* "<=" *) token env tok)
| `GTEQ tok -> R.Case ("GTEQ", (* ">=" *) token env tok)
let _map_decimal_floating_point_literal (env : env)
(tok : CST.decimal_floating_point_literal) =
(* decimal_floating_point_literal *) token env tok
(* This one we should probably de-inline *)
let _map_identifier (env : env) (tok : CST.identifier) =
(* pattern [a-zA-Z_$][\w$]* *) token env tok
let _map_unused_escape_sequence (env : env) (tok : CST.unused_escape_sequence) =
(* unused_escape_sequence *) token env tok
let _map_identifier_dollar_escaped (env : env)
(tok : CST.identifier_dollar_escaped) =
(* pattern ([a-zA-Z_]|(\\\$))([\w]|(\\\$))* *) token env tok
let map_pat_a3d33dc (env : env) (tok : CST.pat_a3d33dc) =
(* pattern [^a-zA-Z_{] *) token env tok
let _map_equality_operator (env : env) (tok : CST.equality_operator) =
(* equality_operator *) token env tok
let _map_hex_integer_literal (env : env) (tok : CST.hex_integer_literal) =
(* hex_integer_literal *) token env tok
let map_pat_0017fb0 (env : env) (tok : CST.pat_0017fb0) =
(* pattern .+ *) token env tok
let _map_template_chars_single (env : env) (tok : CST.template_chars_single) =
(* template_chars_single *) token env tok
let _map_template_chars_single_single (env : env)
(tok : CST.template_chars_single_single) =
(* template_chars_single_single *) token env tok
let map_assignment_operator (env : env) (x : CST.assignment_operator) =
match x with
| `EQ tok -> R.Case ("EQ", (* "=" *) token env tok)
| `PLUSEQ tok -> R.Case ("PLUSEQ", (* "+=" *) token env tok)
| `DASHEQ tok -> R.Case ("DASHEQ", (* "-=" *) token env tok)
| `STAREQ tok -> R.Case ("STAREQ", (* "*=" *) token env tok)
| `SLASHEQ tok -> R.Case ("SLASHEQ", (* "/=" *) token env tok)
| `PERCEQ tok -> R.Case ("PERCEQ", (* "%=" *) token env tok)
| `TILDESLASHEQ tok -> R.Case ("TILDESLASHEQ", (* "~/=" *) token env tok)
| `LTLTEQ tok -> R.Case ("LTLTEQ", (* "<<=" *) token env tok)
| `GTGTEQ tok -> R.Case ("GTGTEQ", (* ">>=" *) token env tok)
| `GTGTGTEQ tok -> R.Case ("GTGTGTEQ", (* ">>>=" *) token env tok)
| `AMPEQ tok -> R.Case ("AMPEQ", (* "&=" *) token env tok)
| `HATEQ tok -> R.Case ("HATEQ", (* "^=" *) token env tok)
| `BAREQ tok -> R.Case ("BAREQ", (* "|=" *) token env tok)
| `QMARKQMARKEQ tok -> R.Case ("QMARKQMARKEQ", (* "??=" *) token env tok)
let _map_decimal_integer_literal (env : env) (tok : CST.decimal_integer_literal)
=
(* decimal_integer_literal *) token env tok
let _map_additive_operator_ (env : env) (tok : CST.additive_operator_) =
(* additive_operator_ *) token env tok
let _map_case_builtin (env : env) (tok : CST.case_builtin) =
(* case_builtin *) token env tok
let map_bitwise_operator_ (env : env) (x : CST.bitwise_operator_) =
map_bitwise_operator env x
let map_shift_operator (env : env) (x : CST.shift_operator) =
map_shift_operator_ env x
let map_final_or_const (env : env) (x : CST.final_or_const) =
match x with
| `Final_buil tok -> R.Case ("Final_buil", (* final_builtin *) token env tok)
| `Const_buil tok -> R.Case ("Const_buil", (* const_builtin *) token env tok)
let map_multiplicative_operator (env : env) (x : CST.multiplicative_operator) =
map_multiplicative_operator_ env x
(*
let rec map_module_name (env : env) (x : CST.module_name) =
(match x with
| `Id tok -> R.Case ("Id",
(* pattern [a-zA-Z_$][\w$]* *) token env tok
)
| `Module_name_DOT_id (v1, v2, v3) -> R.Case ("Module_name_DOT_id",
let v1 = map_module_name env v1 in
let v2 = (* "." *) token env v2 in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
R.Tuple [v1; v2; v3]
)
)
*)
let map_identifier_list_ (env : env) ((v1, v2) : CST.identifier_list_) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
let rec map_ambiguous_name (env : env) (x : CST.ambiguous_name) =
match x with
| `Id tok -> R.Case ("Id", (* pattern [a-zA-Z_$][\w$]* *) token env tok)
| `Scoped_id (v1, v2, v3) ->
R.Case
( "Scoped_id",
let v1 = map_ambiguous_name env v1 in
let v2 = (* "." *) token env v2 in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
R.Tuple [ v1; v2; v3 ] )
let map_catch_clause (env : env) ((v1, v2, v3, v4, v5) : CST.catch_clause) =
let v1 = (* "catch" *) token env v1 in
let v2 = (* "(" *) token env v2 in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
let v4 =
match v4 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "," *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v5 = (* ")" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ]
let map_identifier_list (env : env) ((v1, v2) : CST.identifier_list) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
let map_dot_identifier (env : env) ((v1, v2) : CST.dot_identifier) =
let v1 = (* "." *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ]
let map_label (env : env) ((v1, v2) : CST.label) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 = (* ":" *) token env v2 in
R.Tuple [ v1; v2 ]
let map_type_dot_identifier (env : env) ((v1, v2) : CST.type_dot_identifier) =
let v1 = (* "." *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ]
let map_sub_string_test (env : env) ((v1, v2) : CST.sub_string_test) =
let v1 = (* "$" *) token env v1 in
let v2 = map_pat_a3d33dc env v2 in
R.Tuple [ v1; v2 ]
let map_script_tag (env : env) ((v1, v2, v3) : CST.script_tag) =
let v1 = (* "#!" *) token env v1 in
let v2 = map_pat_0017fb0 env v2 in
let v3 = (* "\n" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
let map_external_and_static (env : env) ((v1, v2) : CST.external_and_static) =
let v1 = (* "external" *) token env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "static" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
let map_is_operator (env : env) ((v1, v2) : CST.is_operator) =
let v1 = map_tok_is env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "!" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
let map_prefix_operator (env : env) (x : CST.prefix_operator) =
match x with
| `Minus_op tok -> R.Case ("Minus_op", (* "-" *) token env tok)
| `Nega_op tok -> R.Case ("Nega_op", (* "!" *) token env tok)
| `Tilde_op tok -> R.Case ("Tilde_op", (* "~" *) token env tok)
let map_combinator (env : env) (x : CST.combinator) =
match x with
| `Show_id_list (v1, v2) ->
R.Case
( "Show_id_list",
let v1 = (* "show" *) token env v1 in
let v2 = map_identifier_list env v2 in
R.Tuple [ v1; v2 ] )
| `Hide_id_list (v1, v2) ->
R.Case
( "Hide_id_list",
let v1 = (* "hide" *) token env v1 in
let v2 = map_identifier_list env v2 in
R.Tuple [ v1; v2 ] )
let map_qualified (env : env) ((v1, v2) : CST.qualified) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_dot_identifier env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
let map_dotted_identifier_list (env : env)
((v1, v2) : CST.dotted_identifier_list) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 = R.List (List.map (map_type_dot_identifier env) v2) in
R.Tuple [ v1; v2 ]
let map_type_name (env : env) ((v1, v2) : CST.type_name) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_dot_identifier env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
let map_raw_string_literal_double_quotes (env : env)
((v1, v2, v3) : CST.raw_string_literal_double_quotes) =
let v1 = (* "r\"" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_double_single tok ->
R.Case
( "Temp_chars_double_single",
(* template_chars_double_single *) token env tok )
| `SQUOT tok -> R.Case ("SQUOT", (* "'" *) token env tok)
| `Temp_chars_raw_slash tok ->
R.Case
( "Temp_chars_raw_slash",
(* template_chars_raw_slash *) token env tok )
| `Unused_esc_seq tok ->
R.Case
("Unused_esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `DOLLAR tok -> R.Case ("DOLLAR", (* "$" *) token env tok))
v2)
in
let v3 = (* "\"" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
let map_raw_string_literal_single_quotes_multiple (env : env)
((v1, v2, v3) : CST.raw_string_literal_single_quotes_multiple) =
let v1 = (* "r'''" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_single tok ->
R.Case
("Temp_chars_single", (* template_chars_single *) token env tok)
| `DQUOT tok -> R.Case ("DQUOT", (* "\"" *) token env tok)
| `SQUOT tok -> R.Case ("SQUOT", (* "'" *) token env tok)
| `Temp_chars_raw_slash tok ->
R.Case
( "Temp_chars_raw_slash",
(* template_chars_raw_slash *) token env tok )
| `Unused_esc_seq tok ->
R.Case
("Unused_esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `DOLLAR tok -> R.Case ("DOLLAR", (* "$" *) token env tok))
v2)
in
let v3 = (* "'''" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
let map_raw_string_literal_double_quotes_multiple (env : env)
((v1, v2, v3) : CST.raw_string_literal_double_quotes_multiple) =
let v1 = (* "r\"\"\"" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_double tok ->
R.Case
("Temp_chars_double", (* template_chars_double *) token env tok)
| `SQUOT tok -> R.Case ("SQUOT", (* "'" *) token env tok)
| `Temp_chars_raw_slash tok ->
R.Case
( "Temp_chars_raw_slash",
(* template_chars_raw_slash *) token env tok )
| `DQUOT tok -> R.Case ("DQUOT", (* "\"" *) token env tok)
| `Unused_esc_seq tok ->
R.Case
("Unused_esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `DOLLAR tok -> R.Case ("DOLLAR", (* "$" *) token env tok))
v2)
in
let v3 = (* "\"\"\"" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
let map_raw_string_literal_single_quotes (env : env)
((v1, v2, v3) : CST.raw_string_literal_single_quotes) =
let v1 = (* "r'" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_single_single tok ->
R.Case
( "Temp_chars_single_single",
(* template_chars_single_single *) token env tok )
| `DQUOT tok -> R.Case ("DQUOT", (* "\"" *) token env tok)
| `Temp_chars_raw_slash tok ->
R.Case
( "Temp_chars_raw_slash",
(* template_chars_raw_slash *) token env tok )
| `Unused_esc_seq tok ->
R.Case
("Unused_esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `DOLLAR tok -> R.Case ("DOLLAR", (* "$" *) token env tok))
v2)
in
let v3 = (* "'" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
let map_binary_operator (env : env) (x : CST.binary_operator) =
match x with
| `Mult_op x -> R.Case ("Mult_op", map_multiplicative_operator env x)
| `Addi_op tok -> R.Case ("Addi_op", (* additive_operator_ *) token env tok)
| `Shift_op x -> R.Case ("Shift_op", map_shift_operator env x)
| `Rela_op x -> R.Case ("Rela_op", map_relational_operator env x)
| `EQEQ tok -> R.Case ("EQEQ", (* "==" *) token env tok)
| `Bitw_op_ x -> R.Case ("Bitw_op_", map_bitwise_operator_ env x)
let rec map_additive_expression (env : env) (x : CST.additive_expression) =
match x with
| `Real_exp_rep1_addi_op_real_exp (v1, v2) ->
R.Case
( "Real_exp_rep1_addi_op_real_exp",
let v1 = map_real_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* additive_operator_ *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Super_rep1_addi_op_real_exp (v1, v2) ->
R.Case
( "Super_rep1_addi_op_real_exp",
let v1 = (* "super" *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* additive_operator_ *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
and map_annotation_ (env : env) (x : CST.annotation_) =
match x with
| `Marker_anno (v1, v2) ->
R.Case
( "Marker_anno",
let v1 = (* "@" *) token env v1 in
let v2 = map_ambiguous_name env v2 in
R.Tuple [ v1; v2 ] )
| `Anno (v1, v2, v3) ->
R.Case
( "Anno",
let v1 = (* "@" *) token env v1 in
let v2 = map_ambiguous_name env v2 in
let v3 = map_arguments env v3 in
R.Tuple [ v1; v2; v3 ] )
and map_anon_arg_rep_COMMA_arg_eb223b2 (env : env)
((v1, v2) : CST.anon_arg_rep_COMMA_arg_eb223b2) =
let v1 = map_argument env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
and map_anon_elem_rep_COMMA_elem_opt_COMMA_4ec364f (env : env)
((v1, v2, v3) : CST.anon_elem_rep_COMMA_elem_opt_COMMA_4ec364f) =
let v1 = map_element env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_element env v2 in
R.Tuple [ v1; v2 ])
v2)
in
let v3 =
match v3 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ]
and map_argument (env : env) (x : CST.argument) = map_expression env x
and map_argument_list (env : env) (x : CST.argument_list) =
match x with
| `Named_arg_rep_COMMA_named_arg (v1, v2) ->
R.Case
( "Named_arg_rep_COMMA_named_arg",
let v1 = map_named_argument env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_named_argument env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Arg_rep_COMMA_arg_rep_COMMA_named_arg_rep_COMMA_named_arg (v1, v2, v3) ->
R.Case
( "Arg_rep_COMMA_arg_rep_COMMA_named_arg_rep_COMMA_named_arg",
let v1 = map_argument env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ])
v2)
in
let v3 =
R.List
(List.map
(fun (v1, v2, v3) ->
let v1 = (* "," *) token env v1 in
let v2 = map_named_argument env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_named_argument env v2 in
R.Tuple [ v1; v2 ])
v3)
in
R.Tuple [ v1; v2; v3 ])
v3)
in
R.Tuple [ v1; v2; v3 ] )
and map_argument_part (env : env) ((v1, v2) : CST.argument_part) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_type_arguments env x))
| None -> R.Option None
in
let v2 = map_arguments env v2 in
R.Tuple [ v1; v2 ]
and map_arguments (env : env) ((v1, v2, v3) : CST.arguments) =
let v1 = (* "(" *) token env v1 in
let v2 =
match v2 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = map_argument_list env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v3 = (* ")" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_assertion (env : env) ((v1, v2, v3, v4, v5, v6) : CST.assertion) =
let v1 = (* assert_builtin *) token env v1 in
let v2 = (* "(" *) token env v2 in
let v3 = map_argument env v3 in
let v4 =
match v4 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "," *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v5 =
match v5 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
let v6 = (* ")" *) token env v6 in
R.Tuple [ v1; v2; v3; v4; v5; v6 ]
and map_assignable_expression (env : env) (x : CST.assignable_expression) =
match x with
| `Prim_assi_sele_part (v1, v2) ->
R.Case
( "Prim_assi_sele_part",
let v1 = map_primary env v1 in
let v2 = map_assignable_selector_part env v2 in
R.Tuple [ v1; v2 ] )
| `Super_unco_assi_sele (v1, v2) ->
R.Case
( "Super_unco_assi_sele",
let v1 = (* "super" *) token env v1 in
let v2 = map_unconditional_assignable_selector env v2 in
R.Tuple [ v1; v2 ] )
| `Cons_invo_assi_sele_part (v1, v2) ->
R.Case
( "Cons_invo_assi_sele_part",
let v1 = map_constructor_invocation env v1 in
let v2 = map_assignable_selector_part env v2 in
R.Tuple [ v1; v2 ] )
| `Id tok -> R.Case ("Id", (* pattern [a-zA-Z_$][\w$]* *) token env tok)
and map_assignable_selector (env : env) (x : CST.assignable_selector) =
match x with
| `Unco_assi_sele x ->
R.Case ("Unco_assi_sele", map_unconditional_assignable_selector env x)
| `Cond_assi_sele (v1, v2) ->
R.Case
( "Cond_assi_sele",
let v1 = (* "?." *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ] )
and map_assignable_selector_part (env : env)
((v1, v2) : CST.assignable_selector_part) =
let v1 = R.List (List.map (map_selector env) v1) in
let v2 = map_assignable_selector env v2 in
R.Tuple [ v1; v2 ]
and map_assignment_expression (env : env)
((v1, v2, v3) : CST.assignment_expression) =
let v1 = map_assignable_expression env v1 in
let v2 = map_assignment_operator env v2 in
let v3 = map_argument env v3 in
R.Tuple [ v1; v2; v3 ]
and map_bitwise_and_expression (env : env) (x : CST.bitwise_and_expression) =
match x with
| `Real_exp_rep1_AMP_real_exp (v1, v2) ->
R.Case
( "Real_exp_rep1_AMP_real_exp",
let v1 = map_real_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "&" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Super_rep1_AMP_real_exp (v1, v2) ->
R.Case
( "Super_rep1_AMP_real_exp",
let v1 = (* "super" *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "&" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
and map_bitwise_or_expression (env : env) (x : CST.bitwise_or_expression) =
match x with
| `Real_exp_rep1_BAR_real_exp (v1, v2) ->
R.Case
( "Real_exp_rep1_BAR_real_exp",
let v1 = map_real_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "|" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Super_rep1_BAR_real_exp (v1, v2) ->
R.Case
( "Super_rep1_BAR_real_exp",
let v1 = (* "super" *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "|" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
and map_bitwise_xor_expression (env : env) (x : CST.bitwise_xor_expression) =
match x with
| `Real_exp_rep1_HAT_real_exp (v1, v2) ->
R.Case
( "Real_exp_rep1_HAT_real_exp",
let v1 = map_real_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "^" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Super_rep1_HAT_real_exp (v1, v2) ->
R.Case
( "Super_rep1_HAT_real_exp",
let v1 = (* "super" *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "^" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
and map_block (env : env) ((v1, v2, v3) : CST.block) =
let v1 = (* "{" *) token env v1 in
let v2 = R.List (List.map (map_statement env) v2) in
let v3 = (* "}" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_cascade_assignment_section (env : env)
((v1, v2) : CST.cascade_assignment_section) =
let v1 = map_assignment_operator env v1 in
let v2 = map_expression_without_cascade env v2 in
R.Tuple [ v1; v2 ]
and map_cascade_section (env : env) ((v1, v2, v3, v4, v5) : CST.cascade_section)
=
let v1 =
match v1 with
| `DOTDOT tok -> R.Case ("DOTDOT", (* ".." *) token env tok)
| `QMARKDOTDOT tok -> R.Case ("QMARKDOTDOT", (* "?.." *) token env tok)
in
let v2 = map_cascade_selector env v2 in
let v3 = R.List (List.map (map_argument_part env) v3) in
let v4 = R.List (List.map (map_cascade_subsection env) v4) in
let v5 =
match v5 with
| Some x -> R.Option (Some (map_cascade_assignment_section env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5 ]
and map_cascade_selector (env : env) (x : CST.cascade_selector) =
match x with
| `Opt_null_type_LBRACK_exp_RBRACK (v1, v2, v3, v4) ->
R.Case
( "Opt_null_type_LBRACK_exp_RBRACK",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
let v2 = (* "[" *) token env v2 in
let v3 = map_argument env v3 in
let v4 = (* "]" *) token env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Id tok -> R.Case ("Id", (* pattern [a-zA-Z_$][\w$]* *) token env tok)
and map_cascade_subsection (env : env) ((v1, v2) : CST.cascade_subsection) =
let v1 = map_assignable_selector env v1 in
let v2 = R.List (List.map (map_argument_part env) v2) in
R.Tuple [ v1; v2 ]
and map_constructor_invocation (env : env)
((v1, v2, v3, v4, v5) : CST.constructor_invocation) =
let v1 = map_type_name env v1 in
let v2 = map_type_arguments env v2 in
let v3 = (* "." *) token env v3 in
let v4 = (* pattern [a-zA-Z_$][\w$]* *) token env v4 in
let v5 = map_arguments env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ]
and map_constructor_param (env : env)
((v1, v2, v3, v4, v5) : CST.constructor_param) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_final_const_var_or_type env x))
| None -> R.Option None
in
let v2 = (* "this" *) token env v2 in
let v3 = (* "." *) token env v3 in
let v4 = (* pattern [a-zA-Z_$][\w$]* *) token env v4 in
let v5 =
match v5 with
| Some x -> R.Option (Some (map_formal_parameter_part env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5 ]
and map_declared_identifier (env : env)
((v1, v2, v3, v4) : CST.declared_identifier) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "covariant" *) token env tok))
| None -> R.Option None
in
let v3 = map_final_const_var_or_type env v3 in
let v4 = (* pattern [a-zA-Z_$][\w$]* *) token env v4 in
R.Tuple [ v1; v2; v3; v4 ]
and map_default_formal_parameter (env : env)
((v1, v2) : CST.default_formal_parameter) =
let v1 = map_formal_parameter env v1 in
let v2 =
match v2 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "=" *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
and map_default_named_parameter (env : env) (x : CST.default_named_parameter) =
match x with
| `Opt_requ_formal_param_opt_EQ_exp (v1, v2, v3) ->
R.Case
( "Opt_requ_formal_param_opt_EQ_exp",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "required" *) token env tok))
| None -> R.Option None
in
let v2 = map_formal_parameter env v2 in
let v3 =
match v3 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "=" *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ] )
| `Opt_requ_formal_param_opt_COLON_exp (v1, v2, v3) ->
R.Case
( "Opt_requ_formal_param_opt_COLON_exp",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "required" *) token env tok))
| None -> R.Option None
in
let v2 = map_formal_parameter env v2 in
let v3 =
match v3 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* ":" *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ] )
and map_element (env : env) (x : CST.element) =
match x with
| `Exp x -> R.Case ("Exp", map_argument env x)
| `Pair (v1, v2, v3) ->
R.Case
( "Pair",
let v1 = map_argument env v1 in
let v2 = (* ":" *) token env v2 in
let v3 = map_argument env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Spread_elem (v1, v2, v3) ->
R.Case
( "Spread_elem",
let v1 = (* "..." *) token env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
let v3 = map_argument env v3 in
R.Tuple [ v1; v2; v3 ] )
| `If_elem (v1, v2, v3, v4) ->
R.Case
( "If_elem",
let v1 = (* "if" *) token env v1 in
let v2 = map_parenthesized_expression env v2 in
let v3 = map_element env v3 in
let v4 =
match v4 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "else" *) token env v1 in
let v2 = map_element env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4 ] )
| `For_elem (v1, v2, v3, v4, v5, v6) ->
R.Case
( "For_elem",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "await" *) token env tok))
| None -> R.Option None
in
let v2 = (* "for" *) token env v2 in
let v3 = (* "(" *) token env v3 in
let v4 = map_for_loop_parts env v4 in
let v5 = (* ")" *) token env v5 in
let v6 = map_element env v6 in
R.Tuple [ v1; v2; v3; v4; v5; v6 ] )
and map_equality_expression (env : env) (x : CST.equality_expression) =
match x with
| `Real_exp_equa_op_real_exp (v1, v2, v3) ->
R.Case
( "Real_exp_equa_op_real_exp",
let v1 = map_real_expression env v1 in
let v2 = (* equality_operator *) token env v2 in
let v3 = map_real_expression env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Super_equa_op_real_exp (v1, v2, v3) ->
R.Case
( "Super_equa_op_real_exp",
let v1 = (* "super" *) token env v1 in
let v2 = (* equality_operator *) token env v2 in
let v3 = map_real_expression env v3 in
R.Tuple [ v1; v2; v3 ] )
and map_expression (env : env) (x : CST.expression) =
match x with
| `Choice_assign_exp x ->
R.Case
( "Choice_assign_exp",
match x with
| `Assign_exp x ->
R.Case ("Assign_exp", map_assignment_expression env x)
| `Throw_exp x -> R.Case ("Throw_exp", map_throw_expression env x)
| `Real_exp_rep_casc_sect (v1, v2) ->
R.Case
( "Real_exp_rep_casc_sect",
let v1 = map_real_expression env v1 in
let v2 = R.List (List.map (map_cascade_section env) v2) in
R.Tuple [ v1; v2 ] ) )
| `Semg_ellips tok -> R.Case ("Semg_ellips", (* "..." *) token env tok)
| `Deep_ellips (v1, v2, v3) ->
R.Case
( "Deep_ellips",
let v1 = (* "<..." *) token env v1 in
let v2 = map_argument env v2 in
let v3 = (* "...>" *) token env v3 in
R.Tuple [ v1; v2; v3 ] )
and map_expression_statement (env : env) (x : CST.expression_statement) =
match x with
| `Exp_semi (v1, v2) ->
R.Case
( "Exp_semi",
let v1 = map_argument env v1 in
let v2 = map_semicolon env v2 in
R.Tuple [ v1; v2 ] )
| `Semg_ellips tok -> R.Case ("Semg_ellips", (* "..." *) token env tok)
and map_expression_without_cascade (env : env)
(x : CST.expression_without_cascade) =
match x with
| `Assign_exp_with_casc (v1, v2, v3) ->
R.Case
( "Assign_exp_with_casc",
let v1 = map_assignable_expression env v1 in
let v2 = map_assignment_operator env v2 in
let v3 = map_expression_without_cascade env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Real_exp x -> R.Case ("Real_exp", map_real_expression env x)
| `Throw_exp_with_casc (v1, v2) ->
R.Case
( "Throw_exp_with_casc",
let v1 = (* "throw" *) token env v1 in
let v2 = map_expression_without_cascade env v2 in
R.Tuple [ v1; v2 ] )
and map_final_const_var_or_type (env : env) (x : CST.final_const_var_or_type) =
match x with
| `Opt_late_buil_final_buil_opt_type (v1, v2, v3) ->
R.Case
( "Opt_late_buil_final_buil_opt_type",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "late" *) token env tok))
| None -> R.Option None
in
let v2 = (* final_builtin *) token env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ] )
| `Const_buil_opt_type (v1, v2) ->
R.Case
( "Const_buil_opt_type",
let v1 = (* const_builtin *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
| `Opt_late_buil_var_or_type (v1, v2) ->
R.Case
( "Opt_late_buil_var_or_type",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "late" *) token env tok))
| None -> R.Option None
in
let v2 = map_var_or_type env v2 in
R.Tuple [ v1; v2 ] )
and map_finally_clause (env : env) ((v1, v2) : CST.finally_clause) =
let v1 = (* "finally" *) token env v1 in
let v2 = map_block env v2 in
R.Tuple [ v1; v2 ]
and map_for_loop_parts (env : env) (x : CST.for_loop_parts) =
match x with
| `Choice_decl_id_in_exp (v1, v2, v3) ->
R.Case
( "Choice_decl_id_in_exp",
let v1 =
match v1 with
| `Decl_id x -> R.Case ("Decl_id", map_declared_identifier env x)
| `Id tok ->
R.Case ("Id", (* pattern [a-zA-Z_$][\w$]* *) token env tok)
in
let v2 = (* "in" *) token env v2 in
let v3 = map_argument env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Opt_choice_local_var_decl_opt_exp_semi_opt_exp_rep_COMMA_exp
(v1, v2, v3, v4) ->
R.Case
( "Opt_choice_local_var_decl_opt_exp_semi_opt_exp_rep_COMMA_exp",
let v1 =
match v1 with
| Some x ->
R.Option
(Some
(match x with
| `Local_var_decl x ->
R.Case
( "Local_var_decl",
map_local_variable_declaration env x )
| `Opt_exp_rep_COMMA_exp_semi (v1, v2) ->
R.Case
( "Opt_exp_rep_COMMA_exp_semi",
let v1 =
match v1 with
| Some x ->
R.Option
(Some
(map_anon_arg_rep_COMMA_arg_eb223b2 env
x))
| None -> R.Option None
in
let v2 = map_semicolon env v2 in
R.Tuple [ v1; v2 ] )))
| None -> R.Option None
in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_argument env x))
| None -> R.Option None
in
let v3 = map_semicolon env v3 in
let v4 =
match v4 with
| Some x ->
R.Option (Some (map_anon_arg_rep_COMMA_arg_eb223b2 env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4 ] )
and map_formal_parameter (env : env) (x : CST.formal_parameter) =
match x with
| `Semg_ellips tok -> R.Case ("Semg_ellips", (* "..." *) token env tok)
| `Normal_formal_param (v1, v2) ->
R.Case
( "Normal_formal_param",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 =
match v2 with
| `Func_formal_param x ->
R.Case ("Func_formal_param", map_function_formal_parameter env x)
| `Simple_formal_param x ->
R.Case ("Simple_formal_param", map_simple_formal_parameter env x)
| `Cons_param x -> R.Case ("Cons_param", map_constructor_param env x)
| `Super_formal_param x ->
R.Case ("Super_formal_param", map_super_formal_parameter env x)
in
R.Tuple [ v1; v2 ] )
and map_formal_parameter_list (env : env) (x : CST.formal_parameter_list) =
map_strict_formal_parameter_list env x
and map_formal_parameter_part (env : env) ((v1, v2) : CST.formal_parameter_part)
=
let v1 =
match v1 with
| Some x -> R.Option (Some (map_type_parameters env x))
| None -> R.Option None
in
let v2 = map_formal_parameter_list env v2 in
R.Tuple [ v1; v2 ]
and map_function_body (env : env) (x : CST.function_body) =
match x with
| `Opt_async_EQGT_exp_semi (v1, v2, v3, v4) ->
R.Case
( "Opt_async_EQGT_exp_semi",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "async" *) token env tok))
| None -> R.Option None
in
let v2 = (* "=>" *) token env v2 in
let v3 = map_argument env v3 in
let v4 = map_semicolon env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Opt_choice_async_blk (v1, v2) ->
R.Case
( "Opt_choice_async_blk",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_anon_choice_async_725f72f env x))
| None -> R.Option None
in
let v2 = map_block env v2 in
R.Tuple [ v1; v2 ] )
and map_function_expression_body (env : env) (x : CST.function_expression_body)
=
match x with
| `Opt_async_EQGT_exp (v1, v2, v3) ->
R.Case
( "Opt_async_EQGT_exp",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "async" *) token env tok))
| None -> R.Option None
in
let v2 = (* "=>" *) token env v2 in
let v3 = map_argument env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Opt_choice_async_blk (v1, v2) ->
R.Case
( "Opt_choice_async_blk",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_anon_choice_async_725f72f env x))
| None -> R.Option None
in
let v2 = map_block env v2 in
R.Tuple [ v1; v2 ] )
and map_function_formal_parameter (env : env)
((v1, v2, v3, v4, v5) : CST.function_formal_parameter) =
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "covariant" *) token env tok))
| None -> R.Option None
in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
let v4 = map_formal_parameter_part env v4 in
let v5 =
match v5 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5 ]
and map_function_signature (env : env)
((v1, v2, v3, v4) : CST.function_signature) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v2 =
match v2 with
| `Get tok -> R.Case ("Get", (* "get" *) token env tok)
| `Set tok -> R.Case ("Set", (* "set" *) token env tok)
| `Id tok -> R.Case ("Id", (* pattern [a-zA-Z_$][\w$]* *) token env tok)
in
let v3 = map_formal_parameter_part env v3 in
let v4 =
match v4 with
| Some x -> R.Option (Some (map_native env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4 ]
and map_function_type (env : env) (x : CST.function_type) =
match x with
| `Func_type_tails x ->
R.Case ("Func_type_tails", map_function_type_tails env x)
| `Type_not_func_func_type_tails (v1, v2) ->
R.Case
( "Type_not_func_func_type_tails",
let v1 = map_type_not_function env v1 in
let v2 = map_function_type_tails env v2 in
R.Tuple [ v1; v2 ] )
and map_function_type_tail (env : env)
((v1, v2, v3, v4, v5) : CST.function_type_tail) =
let v1 = (* "Function" *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_parameters env x))
| None -> R.Option None
in
let v3 =
match v3 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
let v4 =
match v4 with
| Some x -> R.Option (Some (map_parameter_type_list env x))
| None -> R.Option None
in
let v5 =
match v5 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5 ]
and map_function_type_tails (env : env) (xs : CST.function_type_tails) =
R.List (List.map (map_function_type_tail env) xs)
and map_if_null_expression_ (env : env) (xs : CST.if_null_expression_) =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "??" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
xs)
and map_initialized_identifier (env : env)
((v1, v2) : CST.initialized_identifier) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 =
match v2 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "=" *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
and map_initialized_variable_definition (env : env)
((v1, v2, v3) : CST.initialized_variable_definition) =
let v1 = map_declared_identifier env v1 in
let v2 =
match v2 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "=" *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_initialized_identifier env v2 in
R.Tuple [ v1; v2 ])
v3)
in
R.Tuple [ v1; v2; v3 ]
and map_interface_type_list (env : env) ((v1, v2) : CST.interface_type_list) =
let v1 = map_type_ env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_type_ env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
and map_lambda_expression (env : env) ((v1, v2) : CST.lambda_expression) =
let v1 = map_function_signature env v1 in
let v2 = map_function_body env v2 in
R.Tuple [ v1; v2 ]
and map_literal (env : env) (x : CST.literal) =
match x with
| `Deci_int_lit tok ->
R.Case ("Deci_int_lit", (* decimal_integer_literal *) token env tok)
| `Hex_int_lit tok ->
R.Case ("Hex_int_lit", (* hex_integer_literal *) token env tok)
| `Deci_floa_point_lit tok ->
R.Case
( "Deci_floa_point_lit",
(* decimal_floating_point_literal *) token env tok )
| `True tok -> R.Case ("True", (* "true" *) token env tok)
| `False tok -> R.Case ("False", (* "false" *) token env tok)
| `Str_lit x -> R.Case ("Str_lit", map_uri env x)
| `Null_lit tok -> R.Case ("Null_lit", (* "null" *) token env tok)
| `Symb_lit (v1, v2) ->
R.Case
( "Symb_lit",
let v1 = (* "#" *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ] )
| `List_lit (v1, v2, v3, v4, v5) ->
R.Case
( "List_lit",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* const_builtin *) token env tok))
| None -> R.Option None
in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_arguments env x))
| None -> R.Option None
in
let v3 = (* "[" *) token env v3 in
let v4 =
match v4 with
| Some x ->
R.Option
(Some (map_anon_elem_rep_COMMA_elem_opt_COMMA_4ec364f env x))
| None -> R.Option None
in
let v5 = (* "]" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Set_or_map_lit (v1, v2, v3, v4, v5) ->
R.Case
( "Set_or_map_lit",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* const_builtin *) token env tok))
| None -> R.Option None
in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_arguments env x))
| None -> R.Option None
in
let v3 = (* "{" *) token env v3 in
let v4 =
match v4 with
| Some x ->
R.Option
(Some (map_anon_elem_rep_COMMA_elem_opt_COMMA_4ec364f env x))
| None -> R.Option None
in
let v5 = (* "}" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
and map_local_variable_declaration (env : env)
((v1, v2) : CST.local_variable_declaration) =
let v1 = map_initialized_variable_definition env v1 in
let v2 = map_semicolon env v2 in
R.Tuple [ v1; v2 ]
and map_metadata (env : env) (xs : CST.metadata) =
R.List (List.map (map_annotation_ env) xs)
and map_multiplicative_expression (env : env)
(x : CST.multiplicative_expression) =
match x with
| `Un_exp_rep1_mult_op_un_exp (v1, v2) ->
R.Case
( "Un_exp_rep1_mult_op_un_exp",
let v1 = map_unary_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = map_multiplicative_operator env v1 in
let v2 = map_unary_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Super_rep1_mult_op_un_exp (v1, v2) ->
R.Case
( "Super_rep1_mult_op_un_exp",
let v1 = (* "super" *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = map_multiplicative_operator env v1 in
let v2 = map_unary_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
and map_named_argument (env : env) ((v1, v2) : CST.named_argument) =
let v1 = map_label env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]
and map_named_parameter_type (env : env)
((v1, v2, v3) : CST.named_parameter_type) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "required" *) token env tok))
| None -> R.Option None
in
let v3 = map_typed_identifier env v3 in
R.Tuple [ v1; v2; v3 ]
and map_native (env : env) ((v1, v2) : CST.native) =
let v1 = (* "native" *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_uri env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
and map_normal_formal_parameters (env : env)
((v1, v2) : CST.normal_formal_parameters) =
let v1 = map_formal_parameter env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_formal_parameter env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
and map_normal_parameter_type (env : env) ((v1, v2) : CST.normal_parameter_type)
=
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 =
match v2 with
| `Typed_id x -> R.Case ("Typed_id", map_typed_identifier env x)
| `Type x -> R.Case ("Type", map_type_ env x)
in
R.Tuple [ v1; v2 ]
and map_on_part (env : env) (x : CST.on_part) =
match x with
| `Catch_clause_blk (v1, v2) ->
R.Case
( "Catch_clause_blk",
let v1 = map_catch_clause env v1 in
let v2 = map_block env v2 in
R.Tuple [ v1; v2 ] )
| `On_type_not_void_opt_catch_clause_blk (v1, v2, v3, v4) ->
R.Case
( "On_type_not_void_opt_catch_clause_blk",
let v1 = (* "on" *) token env v1 in
let v2 = map_type_not_void env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_catch_clause env x))
| None -> R.Option None
in
let v4 = map_block env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
and map_optional_formal_parameters (env : env)
(x : CST.optional_formal_parameters) =
match x with
| `Opt_post_formal_params (v1, v2, v3, v4, v5) ->
R.Case
( "Opt_post_formal_params",
let v1 = (* "[" *) token env v1 in
let v2 = map_default_formal_parameter env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_default_formal_parameter env v2 in
R.Tuple [ v1; v2 ])
v3)
in
let v4 =
match v4 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
let v5 = (* "]" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Named_formal_params (v1, v2, v3, v4, v5) ->
R.Case
( "Named_formal_params",
let v1 = (* "{" *) token env v1 in
let v2 = map_default_named_parameter env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_default_named_parameter env v2 in
R.Tuple [ v1; v2 ])
v3)
in
let v4 =
match v4 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
let v5 = (* "}" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
and map_optional_parameter_types (env : env) (x : CST.optional_parameter_types)
=
match x with
| `Opt_posi_param_types (v1, v2, v3, v4, v5) ->
R.Case
( "Opt_posi_param_types",
let v1 = (* "[" *) token env v1 in
let v2 = map_normal_parameter_type env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_normal_parameter_type env v2 in
R.Tuple [ v1; v2 ])
v3)
in
let v4 =
match v4 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
let v5 = (* "]" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Named_param_types (v1, v2, v3, v4, v5) ->
R.Case
( "Named_param_types",
let v1 = (* "{" *) token env v1 in
let v2 = map_named_parameter_type env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_named_parameter_type env v2 in
R.Tuple [ v1; v2 ])
v3)
in
let v4 =
match v4 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
let v5 = (* "}" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
and map_parameter_type_list (env : env) ((v1, v2, v3) : CST.parameter_type_list)
=
let v1 = (* "(" *) token env v1 in
let v2 =
match v2 with
| Some x ->
R.Option
(Some
(match x with
| `Normal_param_type_rep_COMMA_normal_param_type_opt_COMMA
(v1, v2, v3) ->
R.Case
( "Normal_param_type_rep_COMMA_normal_param_type_opt_COMMA",
let v1 = map_normal_parameter_type env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_normal_parameter_type env v2 in
R.Tuple [ v1; v2 ])
v2)
in
let v3 =
match v3 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ] )
| `Normal_param_type_rep_COMMA_normal_param_type_COMMA_opt_param_types
(v1, v2, v3, v4) ->
R.Case
( "Normal_param_type_rep_COMMA_normal_param_type_COMMA_opt_param_types",
let v1 = map_normal_parameter_type env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_normal_parameter_type env v2 in
R.Tuple [ v1; v2 ])
v2)
in
let v3 = (* "," *) token env v3 in
let v4 = map_optional_parameter_types env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Opt_param_types x ->
R.Case ("Opt_param_types", map_optional_parameter_types env x)))
| None -> R.Option None
in
let v3 = (* ")" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_parenthesized_expression (env : env)
((v1, v2, v3) : CST.parenthesized_expression) =
let v1 = (* "(" *) token env v1 in
let v2 = map_argument env v2 in
let v3 = (* ")" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_postfix_expression (env : env) (x : CST.postfix_expression) =
match x with
| `Prim_rep_sele (v1, v2) ->
R.Case
( "Prim_rep_sele",
let v1 = map_primary env v1 in
let v2 = R.List (List.map (map_selector env) v2) in
R.Tuple [ v1; v2 ] )
| `Post_exp_ x -> R.Case ("Post_exp_", map_postfix_expression_ env x)
and map_postfix_expression_ (env : env) (x : CST.postfix_expression_) =
match x with
| `Assi_exp_post_op (v1, v2) ->
R.Case
( "Assi_exp_post_op",
let v1 = map_assignable_expression env v1 in
let v2 = (* increment_operator *) token env v2 in
R.Tuple [ v1; v2 ] )
| `Cons_invo_rep_sele (v1, v2) ->
R.Case
( "Cons_invo_rep_sele",
let v1 = map_constructor_invocation env v1 in
let v2 = R.List (List.map (map_selector env) v2) in
R.Tuple [ v1; v2 ] )
and map_primary (env : env) (x : CST.primary) =
match x with
| `Lit x -> R.Case ("Lit", map_literal env x)
| `Func_exp (v1, v2) ->
R.Case
( "Func_exp",
let v1 = map_formal_parameter_part env v1 in
let v2 = map_function_expression_body env v2 in
R.Tuple [ v1; v2 ] )
| `Id tok -> R.Case ("Id", (* pattern [a-zA-Z_$][\w$]* *) token env tok)
| `New_exp (v1, v2, v3, v4) ->
R.Case
( "New_exp",
let v1 = (* "new" *) token env v1 in
let v2 = map_type_not_void env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_dot_identifier env x))
| None -> R.Option None
in
let v4 = map_arguments env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Const_obj_exp (v1, v2, v3, v4) ->
R.Case
( "Const_obj_exp",
let v1 = (* const_builtin *) token env v1 in
let v2 = map_type_not_void env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_dot_identifier env x))
| None -> R.Option None
in
let v4 = map_arguments env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `LPAR_exp_RPAR x ->
R.Case ("LPAR_exp_RPAR", map_parenthesized_expression env x)
| `This tok -> R.Case ("This", (* "this" *) token env tok)
| `Super_unco_assi_sele (v1, v2) ->
R.Case
( "Super_unco_assi_sele",
let v1 = (* "super" *) token env v1 in
let v2 = map_unconditional_assignable_selector env v2 in
R.Tuple [ v1; v2 ] )
and map_real_expression (env : env) (x : CST.real_expression) =
match x with
| `Cond_exp (v1, v2, v3, v4, v5) ->
R.Case
( "Cond_exp",
let v1 = map_real_expression env v1 in
let v2 = (* "?" *) token env v2 in
let v3 = map_expression_without_cascade env v3 in
let v4 = (* ":" *) token env v4 in
let v5 = map_expression_without_cascade env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Logi_or_exp (v1, v2) ->
R.Case
( "Logi_or_exp",
let v1 = map_real_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "||" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `If_null_exp (v1, v2) ->
R.Case
( "If_null_exp",
let v1 = map_real_expression env v1 in
let v2 = map_if_null_expression_ env v2 in
R.Tuple [ v1; v2 ] )
| `Addi_exp x -> R.Case ("Addi_exp", map_additive_expression env x)
| `Mult_exp x -> R.Case ("Mult_exp", map_multiplicative_expression env x)
| `Rela_exp x -> R.Case ("Rela_exp", map_relational_expression env x)
| `Equa_exp x -> R.Case ("Equa_exp", map_equality_expression env x)
| `Logi_and_exp (v1, v2) ->
R.Case
( "Logi_and_exp",
let v1 = map_real_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "&&" *) token env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Bitw_and_exp x -> R.Case ("Bitw_and_exp", map_bitwise_and_expression env x)
| `Bitw_or_exp x -> R.Case ("Bitw_or_exp", map_bitwise_or_expression env x)
| `Bitw_xor_exp x -> R.Case ("Bitw_xor_exp", map_bitwise_xor_expression env x)
| `Shift_exp x -> R.Case ("Shift_exp", map_shift_expression env x)
| `Type_cast_exp (v1, v2) ->
R.Case
( "Type_cast_exp",
let v1 = map_real_expression env v1 in
let v2 = map_type_cast env v2 in
R.Tuple [ v1; v2 ] )
| `Type_test_exp (v1, v2) ->
R.Case
( "Type_test_exp",
let v1 = map_real_expression env v1 in
let v2 = map_type_test env v2 in
R.Tuple [ v1; v2 ] )
| `Un_exp x -> R.Case ("Un_exp", map_unary_expression env x)
and map_relational_expression (env : env) (x : CST.relational_expression) =
match x with
| `Real_exp_rela_op_real_exp (v1, v2, v3) ->
R.Case
( "Real_exp_rela_op_real_exp",
let v1 = map_real_expression env v1 in
let v2 = map_relational_operator env v2 in
let v3 = map_real_expression env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Super_rela_op_real_exp (v1, v2, v3) ->
R.Case
( "Super_rela_op_real_exp",
let v1 = (* "super" *) token env v1 in
let v2 = map_relational_operator env v2 in
let v3 = map_real_expression env v3 in
R.Tuple [ v1; v2; v3 ] )
and map_selector (env : env) (x : CST.selector) =
match x with
| `Excl_op tok -> R.Case ("Excl_op", (* "!" *) token env tok)
| `Assi_sele x -> R.Case ("Assi_sele", map_assignable_selector env x)
| `Arg_part x -> R.Case ("Arg_part", map_argument_part env x)
and map_shift_expression (env : env) (x : CST.shift_expression) =
match x with
| `Real_exp_rep1_shift_op_real_exp (v1, v2) ->
R.Case
( "Real_exp_rep1_shift_op_real_exp",
let v1 = map_real_expression env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = map_shift_operator env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
| `Super_rep1_shift_op_real_exp (v1, v2) ->
R.Case
( "Super_rep1_shift_op_real_exp",
let v1 = (* "super" *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = map_shift_operator env v1 in
let v2 = map_real_expression env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ] )
and map_simple_formal_parameter (env : env) (x : CST.simple_formal_parameter) =
match x with
| `Decl_id x -> R.Case ("Decl_id", map_declared_identifier env x)
| `Opt_cova_id (v1, v2) ->
R.Case
( "Opt_cova_id",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "covariant" *) token env tok))
| None -> R.Option None
in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ] )
and map_statement (env : env) (x : CST.statement) =
match x with
| `Blk x -> R.Case ("Blk", map_block env x)
| `Local_func_decl (v1, v2) ->
R.Case
( "Local_func_decl",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = map_lambda_expression env v2 in
R.Tuple [ v1; v2 ] )
| `Local_var_decl x ->
R.Case ("Local_var_decl", map_local_variable_declaration env x)
| `For_stmt (v1, v2, v3, v4, v5, v6) ->
R.Case
( "For_stmt",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "await" *) token env tok))
| None -> R.Option None
in
let v2 = (* "for" *) token env v2 in
let v3 = (* "(" *) token env v3 in
let v4 = map_for_loop_parts env v4 in
let v5 = (* ")" *) token env v5 in
let v6 = map_statement env v6 in
R.Tuple [ v1; v2; v3; v4; v5; v6 ] )
| `While_stmt (v1, v2, v3) ->
R.Case
( "While_stmt",
let v1 = (* "while" *) token env v1 in
let v2 = map_parenthesized_expression env v2 in
let v3 = map_statement env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Do_stmt (v1, v2, v3, v4, v5) ->
R.Case
( "Do_stmt",
let v1 = (* "do" *) token env v1 in
let v2 = map_statement env v2 in
let v3 = (* "while" *) token env v3 in
let v4 = map_parenthesized_expression env v4 in
let v5 = map_semicolon env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Switch_stmt (v1, v2, v3) ->
R.Case
( "Switch_stmt",
let v1 = (* "switch" *) token env v1 in
let v2 = map_parenthesized_expression env v2 in
let v3 = map_switch_block env v3 in
R.Tuple [ v1; v2; v3 ] )
| `If_stmt (v1, v2, v3, v4) ->
R.Case
( "If_stmt",
let v1 = (* "if" *) token env v1 in
let v2 = map_parenthesized_expression env v2 in
let v3 = map_statement env v3 in
let v4 =
match v4 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "else" *) token env v1 in
let v2 = map_statement env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4 ] )
| `Try_stmt (v1, v2) ->
R.Case
( "Try_stmt",
let v1 = map_try_head env v1 in
let v2 =
match v2 with
| `Fina_clause x -> R.Case ("Fina_clause", map_finally_clause env x)
| `Rep1_on_part_opt_fina_clause (v1, v2) ->
R.Case
( "Rep1_on_part_opt_fina_clause",
let v1 = R.List (List.map (map_on_part env) v1) in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_finally_clause env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
in
R.Tuple [ v1; v2 ] )
| `Brk_stmt (v1, v2, v3) ->
R.Case
( "Brk_stmt",
let v1 = (* break_builtin *) token env v1 in
let v2 =
match v2 with
| Some tok ->
R.Option (Some ((* pattern [a-zA-Z_$][\w$]* *) token env tok))
| None -> R.Option None
in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Cont_stmt (v1, v2, v3) ->
R.Case
( "Cont_stmt",
let v1 = (* "continue" *) token env v1 in
let v2 =
match v2 with
| Some tok ->
R.Option (Some ((* pattern [a-zA-Z_$][\w$]* *) token env tok))
| None -> R.Option None
in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Ret_stmt (v1, v2, v3) ->
R.Case
( "Ret_stmt",
let v1 = (* "return" *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_argument env x))
| None -> R.Option None
in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Yield_stmt (v1, v2, v3) ->
R.Case
( "Yield_stmt",
let v1 = (* "yield" *) token env v1 in
let v2 = map_argument env v2 in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Yield_each_stmt (v1, v2, v3, v4) ->
R.Case
( "Yield_each_stmt",
let v1 = (* "yield" *) token env v1 in
let v2 = (* "*" *) token env v2 in
let v3 = map_argument env v3 in
let v4 = map_semicolon env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Exp_stmt x -> R.Case ("Exp_stmt", map_expression_statement env x)
| `Assert_stmt (v1, v2) ->
R.Case
( "Assert_stmt",
let v1 = map_assertion env v1 in
let v2 = (* ";" *) token env v2 in
R.Tuple [ v1; v2 ] )
and map_strict_formal_parameter_list (env : env)
(x : CST.strict_formal_parameter_list) =
match x with
| `LPAR_RPAR (v1, v2) ->
R.Case
( "LPAR_RPAR",
let v1 = (* "(" *) token env v1 in
let v2 = (* ")" *) token env v2 in
R.Tuple [ v1; v2 ] )
| `LPAR_normal_formal_params_opt_COMMA_RPAR (v1, v2, v3, v4) ->
R.Case
( "LPAR_normal_formal_params_opt_COMMA_RPAR",
let v1 = (* "(" *) token env v1 in
let v2 = map_normal_formal_parameters env v2 in
let v3 =
match v3 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
let v4 = (* ")" *) token env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `LPAR_normal_formal_params_COMMA_opt_formal_params_RPAR (v1, v2, v3, v4, v5)
->
R.Case
( "LPAR_normal_formal_params_COMMA_opt_formal_params_RPAR",
let v1 = (* "(" *) token env v1 in
let v2 = map_normal_formal_parameters env v2 in
let v3 = (* "," *) token env v3 in
let v4 = map_optional_formal_parameters env v4 in
let v5 = (* ")" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `LPAR_opt_formal_params_RPAR (v1, v2, v3) ->
R.Case
( "LPAR_opt_formal_params_RPAR",
let v1 = (* "(" *) token env v1 in
let v2 = map_optional_formal_parameters env v2 in
let v3 = (* ")" *) token env v3 in
R.Tuple [ v1; v2; v3 ] )
and map_string_literal (env : env) (xs : CST.string_literal) =
R.List
(List.map
(fun x ->
match x with
| `Str_lit_double_quotes x ->
R.Case
("Str_lit_double_quotes", map_string_literal_double_quotes env x)
| `Str_lit_single_quotes x ->
R.Case
("Str_lit_single_quotes", map_string_literal_single_quotes env x)
| `Str_lit_double_quotes_mult x ->
R.Case
( "Str_lit_double_quotes_mult",
map_string_literal_double_quotes_multiple env x )
| `Str_lit_single_quotes_mult x ->
R.Case
( "Str_lit_single_quotes_mult",
map_string_literal_single_quotes_multiple env x )
| `Raw_str_lit_double_quotes x ->
R.Case
( "Raw_str_lit_double_quotes",
map_raw_string_literal_double_quotes env x )
| `Raw_str_lit_single_quotes x ->
R.Case
( "Raw_str_lit_single_quotes",
map_raw_string_literal_single_quotes env x )
| `Raw_str_lit_double_quotes_mult x ->
R.Case
( "Raw_str_lit_double_quotes_mult",
map_raw_string_literal_double_quotes_multiple env x )
| `Raw_str_lit_single_quotes_mult x ->
R.Case
( "Raw_str_lit_single_quotes_mult",
map_raw_string_literal_single_quotes_multiple env x ))
xs)
and map_string_literal_double_quotes (env : env)
((v1, v2, v3) : CST.string_literal_double_quotes) =
let v1 = (* "\"" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_double_single tok ->
R.Case
( "Temp_chars_double_single",
(* template_chars_double_single *) token env tok )
| `SQUOT tok -> R.Case ("SQUOT", (* "'" *) token env tok)
| `Esc_seq tok ->
R.Case ("Esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `Temp_subs x ->
R.Case ("Temp_subs", map_template_substitution env x))
v2)
in
let v3 = (* "\"" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_string_literal_double_quotes_multiple (env : env)
((v1, v2, v3) : CST.string_literal_double_quotes_multiple) =
let v1 = (* "\"\"\"" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_double tok ->
R.Case
("Temp_chars_double", (* template_chars_double *) token env tok)
| `SQUOT tok -> R.Case ("SQUOT", (* "'" *) token env tok)
| `DQUOT tok -> R.Case ("DQUOT", (* "\"" *) token env tok)
| `Esc_seq tok ->
R.Case ("Esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `Temp_subs x ->
R.Case ("Temp_subs", map_template_substitution env x))
v2)
in
let v3 = (* "\"\"\"" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_string_literal_single_quotes (env : env)
((v1, v2, v3) : CST.string_literal_single_quotes) =
let v1 = (* "'" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_single_single tok ->
R.Case
( "Temp_chars_single_single",
(* template_chars_single_single *) token env tok )
| `DQUOT tok -> R.Case ("DQUOT", (* "\"" *) token env tok)
| `Esc_seq tok ->
R.Case ("Esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `Temp_subs x ->
R.Case ("Temp_subs", map_template_substitution env x))
v2)
in
let v3 = (* "'" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_string_literal_single_quotes_multiple (env : env)
((v1, v2, v3) : CST.string_literal_single_quotes_multiple) =
let v1 = (* "'''" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Temp_chars_single tok ->
R.Case
("Temp_chars_single", (* template_chars_single *) token env tok)
| `DQUOT tok -> R.Case ("DQUOT", (* "\"" *) token env tok)
| `SQUOT tok -> R.Case ("SQUOT", (* "'" *) token env tok)
| `Esc_seq tok ->
R.Case ("Esc_seq", (* unused_escape_sequence *) token env tok)
| `Sub_str_test x ->
R.Case ("Sub_str_test", map_sub_string_test env x)
| `Temp_subs x ->
R.Case ("Temp_subs", map_template_substitution env x))
v2)
in
let v3 = (* "'''" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_super_formal_parameter (env : env)
((v1, v2, v3, v4, v5) : CST.super_formal_parameter) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_final_const_var_or_type env x))
| None -> R.Option None
in
let v2 = (* "super" *) token env v2 in
let v3 = (* "." *) token env v3 in
let v4 = (* pattern [a-zA-Z_$][\w$]* *) token env v4 in
let v5 =
match v5 with
| Some x -> R.Option (Some (map_formal_parameter_part env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5 ]
and map_switch_block (env : env) ((v1, v2, v3) : CST.switch_block) =
let v1 = (* "{" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Switch_label x -> R.Case ("Switch_label", map_switch_label env x)
| `Stmt x -> R.Case ("Stmt", map_statement env x))
v2)
in
let v3 = (* "}" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
and map_switch_label (env : env) ((v1, v2) : CST.switch_label) =
let v1 = R.List (List.map (map_label env) v1) in
let v2 =
match v2 with
| `Case_buil_exp_COLON (v1, v2, v3) ->
R.Case
( "Case_buil_exp_COLON",
let v1 = (* case_builtin *) token env v1 in
let v2 = map_argument env v2 in
let v3 = (* ":" *) token env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Defa_COLON (v1, v2) ->
R.Case
( "Defa_COLON",
let v1 = (* "default" *) token env v1 in
let v2 = (* ":" *) token env v2 in
R.Tuple [ v1; v2 ] )
in
R.Tuple [ v1; v2 ]
and map_template_substitution (env : env) ((v1, v2) : CST.template_substitution)
=
let v1 = (* "$" *) token env v1 in
let v2 =
match v2 with
| `LCURL_exp_RCURL (v1, v2, v3) ->
R.Case
( "LCURL_exp_RCURL",
let v1 = (* "{" *) token env v1 in
let v2 = map_argument env v2 in
let v3 = (* "}" *) token env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Id_dollar_esca tok ->
R.Case
( "Id_dollar_esca",
(* pattern ([a-zA-Z_]|(\\\$))([\w]|(\\\$))* *) token env tok )
in
R.Tuple [ v1; v2 ]
and map_throw_expression (env : env) ((v1, v2) : CST.throw_expression) =
let v1 = (* "throw" *) token env v1 in
let v2 = map_argument env v2 in
R.Tuple [ v1; v2 ]
and map_try_head (env : env) ((v1, v2) : CST.try_head) =
let v1 = (* "try" *) token env v1 in
let v2 = map_block env v2 in
R.Tuple [ v1; v2 ]
and map_type_ (env : env) (x : CST.type_) =
match x with
| `Func_type_opt_null_type (v1, v2) ->
R.Case
( "Func_type_opt_null_type",
let v1 = map_function_type env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
| `Type_not_func x -> R.Case ("Type_not_func", map_type_not_function env x)
and map_type_arguments (env : env) (x : CST.type_arguments) =
match x with
| `LT_opt_type_rep_COMMA_type_GT (v1, v2, v3) ->
R.Case
( "LT_opt_type_rep_COMMA_type_GT",
let v1 = (* "<" *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_interface_type_list env x))
| None -> R.Option None
in
let v3 = (* ">" *) token env v3 in
R.Tuple [ v1; v2; v3 ] )
and map_type_bound (env : env) ((v1, v2) : CST.type_bound) =
let v1 = (* "extends" *) token env v1 in
let v2 = map_type_not_void env v2 in
R.Tuple [ v1; v2 ]
and map_type_cast (env : env) ((v1, v2) : CST.type_cast) =
let v1 = (* as_operator *) token env v1 in
let v2 = map_type_not_void env v2 in
R.Tuple [ v1; v2 ]
and map_type_not_function (env : env) (x : CST.type_not_function) =
match x with
| `Type_not_void_not_func x ->
R.Case ("Type_not_void_not_func", map_type_not_void_not_function env x)
| `Void_type tok -> R.Case ("Void_type", (* void_type *) token env tok)
and map_type_not_void (env : env) (x : CST.type_not_void) =
match x with
| `Func_type_opt_null_type (v1, v2) ->
R.Case
( "Func_type_opt_null_type",
let v1 = map_function_type env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
| `Type_not_void_not_func x ->
R.Case ("Type_not_void_not_func", map_type_not_void_not_function env x)
and map_type_not_void_not_function (env : env)
(x : CST.type_not_void_not_function) =
match x with
| `Type_name_opt_type_args_opt_null_type (v1, v2, v3) ->
R.Case
( "Type_name_opt_type_args_opt_null_type",
let v1 = map_type_name env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_arguments env x))
| None -> R.Option None
in
let v3 =
match v3 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ] )
| `Func_buil_id_opt_null_type (v1, v2) ->
R.Case
( "Func_buil_id_opt_null_type",
let v1 = (* "Function" *) token env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
and map_type_parameter (env : env) ((v1, v2, v3, v4) : CST.type_parameter) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
let v3 =
match v3 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
let v4 =
match v4 with
| Some x -> R.Option (Some (map_type_bound env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4 ]
and map_type_parameters (env : env) ((v1, v2, v3, v4) : CST.type_parameters) =
let v1 = (* "<" *) token env v1 in
let v2 = map_type_parameter env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_type_parameter env v2 in
R.Tuple [ v1; v2 ])
v3)
in
let v4 = (* ">" *) token env v4 in
R.Tuple [ v1; v2; v3; v4 ]
and map_type_test (env : env) ((v1, v2) : CST.type_test) =
let v1 = map_is_operator env v1 in
let v2 = map_type_not_void env v2 in
R.Tuple [ v1; v2 ]
and map_typed_identifier (env : env) ((v1, v2) : CST.typed_identifier) =
let v1 = map_type_ env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ]
and map_unary_expression (env : env) (x : CST.unary_expression) =
match x with
| `Post_exp x -> R.Case ("Post_exp", map_postfix_expression env x)
| `Un_exp_ x -> R.Case ("Un_exp_", map_unary_expression_ env x)
and map_unary_expression_ (env : env) (x : CST.unary_expression_) =
match x with
| `Prefix_op_un_exp (v1, v2) ->
R.Case
( "Prefix_op_un_exp",
let v1 = map_prefix_operator env v1 in
let v2 = map_unary_expression env v2 in
R.Tuple [ v1; v2 ] )
| `Await_exp (v1, v2) ->
R.Case
( "Await_exp",
let v1 = (* "await" *) token env v1 in
let v2 = map_unary_expression env v2 in
R.Tuple [ v1; v2 ] )
| `Choice_minus_op_super (v1, v2) ->
R.Case
( "Choice_minus_op_super",
let v1 =
match v1 with
| `Minus_op tok -> R.Case ("Minus_op", (* "-" *) token env tok)
| `Tilde_op tok -> R.Case ("Tilde_op", (* "~" *) token env tok)
in
let v2 = (* "super" *) token env v2 in
R.Tuple [ v1; v2 ] )
| `Incr_op_assi_exp (v1, v2) ->
R.Case
( "Incr_op_assi_exp",
let v1 = (* increment_operator *) token env v1 in
let v2 = map_assignable_expression env v2 in
R.Tuple [ v1; v2 ] )
and map_unconditional_assignable_selector (env : env)
(x : CST.unconditional_assignable_selector) =
match x with
| `Opt_null_type_LBRACK_exp_RBRACK (v1, v2, v3, v4) ->
R.Case
( "Opt_null_type_LBRACK_exp_RBRACK",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "?" *) token env tok))
| None -> R.Option None
in
let v2 = (* "[" *) token env v2 in
let v3 = map_argument env v3 in
let v4 = (* "]" *) token env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `DOT_id x -> R.Case ("DOT_id", map_type_dot_identifier env x)
and map_uri (env : env) (x : CST.uri) = map_string_literal env x
and map_var_or_type (env : env) (x : CST.var_or_type) =
match x with
| `Type x -> R.Case ("Type", map_type_ env x)
| `Infe_type_opt_type (v1, v2) ->
R.Case
( "Infe_type_opt_type",
let v1 = (* "var" *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
let _map_throws (env : env) ((v1, v2, v3) : CST.throws) =
let v1 = (* "throws" *) token env v1 in
let v2 = map_type_ env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_type_ env v2 in
R.Tuple [ v1; v2 ])
v3)
in
R.Tuple [ v1; v2; v3 ]
let _map_explicit_constructor_invocation (env : env)
((v1, v2, v3) : CST.explicit_constructor_invocation) =
let v1 =
match v1 with
| `Opt_type_args_choice_this (v1, v2) ->
R.Case
( "Opt_type_args_choice_this",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_type_arguments env x))
| None -> R.Option None
in
let v2 =
match v2 with
| `This tok -> R.Case ("This", (* "this" *) token env tok)
| `Super tok -> R.Case ("Super", (* "super" *) token env tok)
in
R.Tuple [ v1; v2 ] )
| `Choice_choice_id_DOT_opt_type_args_super (v1, v2, v3, v4) ->
R.Case
( "Choice_choice_id_DOT_opt_type_args_super",
let v1 =
match v1 with
| `Choice_id x -> R.Case ("Choice_id", map_ambiguous_name env x)
| `Prim x -> R.Case ("Prim", map_primary env x)
in
let v2 = (* "." *) token env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_arguments env x))
| None -> R.Option None
in
let v4 = (* "super" *) token env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
in
let v2 = map_arguments env v2 in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ]
let map_part_directive (env : env) ((v1, v2, v3, v4) : CST.part_directive) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = (* "part" *) token env v2 in
let v3 = map_uri env v3 in
let v4 = map_semicolon env v4 in
R.Tuple [ v1; v2; v3; v4 ]
let map_initialized_identifier_list (env : env)
((v1, v2) : CST.initialized_identifier_list) =
let v1 = map_initialized_identifier env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_initialized_identifier env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
let map_type_not_void_list (env : env) ((v1, v2) : CST.type_not_void_list) =
let v1 = map_type_not_void env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_type_not_void env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
let map_enum_constant (env : env) ((v1, v2) : CST.enum_constant) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ]
let map_anon_choice_type_be0da33 (env : env) (x : CST.anon_choice_type_be0da33)
=
match x with
| `Type x -> R.Case ("Type", map_type_ env x)
| `Infe_type tok -> R.Case ("Infe_type", (* "var" *) token env tok)
let map_setter_signature (env : env)
((v1, v2, v3, v4, v5) : CST.setter_signature) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v2 = (* "set" *) token env v2 in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
let v4 = map_formal_parameter_part env v4 in
let v5 =
match v5 with
| Some x -> R.Option (Some (map_native env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5 ]
let map_operator_signature (env : env)
((v1, v2, v3, v4, v5) : CST.operator_signature) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v2 = (* "operator" *) token env v2 in
let v3 =
match v3 with
| `TILDE tok -> R.Case ("TILDE", (* "~" *) token env tok)
| `Bin_op x -> R.Case ("Bin_op", map_binary_operator env x)
| `LBRACKRBRACK tok -> R.Case ("LBRACKRBRACK", (* "[]" *) token env tok)
| `LBRACKRBRACKEQ tok -> R.Case ("LBRACKRBRACKEQ", (* "[]=" *) token env tok)
in
let v4 = map_formal_parameter_list env v4 in
let v5 =
match v5 with
| Some x -> R.Option (Some (map_native env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5 ]
let _map_wildcard_bounds (env : env) (x : CST.wildcard_bounds) =
match x with
| `Extends_type (v1, v2) ->
R.Case
( "Extends_type",
let v1 = (* "extends" *) token env v1 in
let v2 = map_type_ env v2 in
R.Tuple [ v1; v2 ] )
| `Super_type (v1, v2) ->
R.Case
( "Super_type",
let v1 = (* "super" *) token env v1 in
let v2 = map_type_ env v2 in
R.Tuple [ v1; v2 ] )
let map_type_alias (env : env) (x : CST.type_alias) =
match x with
| `Type_type_name_opt_type_params_EQ_func_type_SEMI (v1, v2, v3, v4, v5, v6)
->
R.Case
( "Type_type_name_opt_type_params_EQ_func_type_SEMI",
let v1 = (* "typedef" *) token env v1 in
let v2 = map_type_name env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_parameters env x))
| None -> R.Option None
in
let v4 = (* "=" *) token env v4 in
let v5 = map_function_type env v5 in
let v6 = (* ";" *) token env v6 in
R.Tuple [ v1; v2; v3; v4; v5; v6 ] )
| `Type_opt_type_type_name_formal_param_part_SEMI (v1, v2, v3, v4, v5) ->
R.Case
( "Type_opt_type_type_name_formal_param_part_SEMI",
let v1 = (* "typedef" *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v3 = map_type_name env v3 in
let v4 = map_formal_parameter_part env v4 in
let v5 = (* ";" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
let map_dimensions (env : env) (xs : CST.dimensions) =
R.List
(List.map
(fun (v1, v2, v3) ->
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = (* "[" *) token env v2 in
let v3 = (* "]" *) token env v3 in
R.Tuple [ v1; v2; v3 ])
xs)
let map_library_name (env : env) ((v1, v2, v3, v4) : CST.library_name) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = (* "library" *) token env v2 in
let v3 = map_dotted_identifier_list env v3 in
let v4 = map_semicolon env v4 in
R.Tuple [ v1; v2; v3; v4 ]
let map_getter_signature (env : env) ((v1, v2, v3, v4) : CST.getter_signature) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v2 = (* "get" *) token env v2 in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
let v4 =
match v4 with
| Some x -> R.Option (Some (map_native env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4 ]
let map_constant_constructor_signature (env : env)
((v1, v2, v3) : CST.constant_constructor_signature) =
let v1 = (* const_builtin *) token env v1 in
let v2 = map_qualified env v2 in
let v3 = map_formal_parameter_list env v3 in
R.Tuple [ v1; v2; v3 ]
let map_factory_constructor_signature (env : env)
((v1, v2, v3, v4) : CST.factory_constructor_signature) =
let v1 = (* "factory" *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
let v3 = R.List (List.map (map_type_dot_identifier env) v3) in
let v4 = map_formal_parameter_list env v4 in
R.Tuple [ v1; v2; v3; v4 ]
let map_part_of_directive (env : env)
((v1, v2, v3, v4, v5) : CST.part_of_directive) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = (* "part" *) token env v2 in
let v3 = (* "of" *) token env v3 in
let v4 =
match v4 with
| `Dotted_id_list x ->
R.Case ("Dotted_id_list", map_dotted_identifier_list env x)
| `Uri x -> R.Case ("Uri", map_uri env x)
in
let v5 = map_semicolon env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ]
let map_static_final_declaration (env : env)
((v1, v2, v3) : CST.static_final_declaration) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 = (* "=" *) token env v2 in
let v3 = map_argument env v3 in
R.Tuple [ v1; v2; v3 ]
let map_constructor_signature (env : env)
((v1, v2, v3) : CST.constructor_signature) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_dot_identifier env x))
| None -> R.Option None
in
let v3 = map_formal_parameter_list env v3 in
R.Tuple [ v1; v2; v3 ]
let map_uri_test (env : env) ((v1, v2) : CST.uri_test) =
let v1 = map_dotted_identifier_list env v1 in
let v2 =
match v2 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "==" *) token env v1 in
let v2 = map_uri env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
R.Tuple [ v1; v2 ]
let map_interfaces (env : env) ((v1, v2) : CST.interfaces) =
let v1 = (* "implements" *) token env v1 in
let v2 = map_type_not_void_list env v2 in
R.Tuple [ v1; v2 ]
let map_mixins (env : env) ((v1, v2) : CST.mixins) =
let v1 = (* "with" *) token env v1 in
let v2 = map_type_not_void_list env v2 in
R.Tuple [ v1; v2 ]
let map_enum_body (env : env) ((v1, v2, v3, v4, v5) : CST.enum_body) =
let v1 = (* "{" *) token env v1 in
let v2 = map_enum_constant env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_enum_constant env v2 in
R.Tuple [ v1; v2 ])
v3)
in
let v4 =
match v4 with
| Some tok -> R.Option (Some ((* "," *) token env tok))
| None -> R.Option None
in
let v5 = (* "}" *) token env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ]
let _map_method_declarator (env : env) ((v1, v2, v3) : CST.method_declarator) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 = map_formal_parameter_list env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_dimensions env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ]
let map_static_final_declaration_list (env : env)
((v1, v2) : CST.static_final_declaration_list) =
let v1 = map_static_final_declaration env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_static_final_declaration env v2 in
R.Tuple [ v1; v2 ])
v2)
in
R.Tuple [ v1; v2 ]
let map_initializer_list_entry (env : env) (x : CST.initializer_list_entry) =
match x with
| `Super_opt_DOT_qual_args (v1, v2, v3) ->
R.Case
( "Super_opt_DOT_qual_args",
let v1 = (* "super" *) token env v1 in
let v2 =
match v2 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "." *) token env v1 in
let v2 = map_qualified env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v3 = map_arguments env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Field_init (v1, v2, v3, v4, v5) ->
R.Case
( "Field_init",
let v1 =
match v1 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "this" *) token env v1 in
let v2 = (* "." *) token env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
let v3 = (* "=" *) token env v3 in
let v4 = map_real_expression env v4 in
let v5 = R.List (List.map (map_cascade_section env) v5) in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Asse x -> R.Case ("Asse", map_assertion env x)
let map_configuration_uri (env : env)
((v1, v2, v3, v4, v5) : CST.configuration_uri) =
let v1 = (* "if" *) token env v1 in
let v2 = (* "(" *) token env v2 in
let v3 = map_uri_test env v3 in
let v4 = (* ")" *) token env v4 in
let v5 = map_uri env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ]
let map_mixin_application (env : env) ((v1, v2, v3) : CST.mixin_application) =
let v1 = map_type_not_void env v1 in
let v2 = map_mixins env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_interfaces env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ]
let map_superclass (env : env) (x : CST.superclass) =
match x with
| `Extends_type_not_void_opt_mixins (v1, v2, v3) ->
R.Case
( "Extends_type_not_void_opt_mixins",
let v1 = (* "extends" *) token env v1 in
let v2 = map_type_not_void env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_mixins env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3 ] )
| `Mixins x -> R.Case ("Mixins", map_mixins env x)
let map_enum_declaration (env : env) ((v1, v2, v3) : CST.enum_declaration) =
let v1 = (* "enum" *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
let v3 = map_enum_body env v3 in
R.Tuple [ v1; v2; v3 ]
let map_initializers (env : env) ((v1, v2, v3) : CST.initializers) =
let v1 = (* ":" *) token env v1 in
let v2 = map_initializer_list_entry env v2 in
let v3 =
R.List
(List.map
(fun (v1, v2) ->
let v1 = (* "," *) token env v1 in
let v2 = map_initializer_list_entry env v2 in
R.Tuple [ v1; v2 ])
v3)
in
R.Tuple [ v1; v2; v3 ]
let map_configurable_uri (env : env) ((v1, v2) : CST.configurable_uri) =
let v1 = map_uri env v1 in
let v2 = R.List (List.map (map_configuration_uri env) v2) in
R.Tuple [ v1; v2 ]
let map_mixin_application_class (env : env)
((v1, v2, v3, v4, v5) : CST.mixin_application_class) =
let v1 = (* pattern [a-zA-Z_$][\w$]* *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_parameters env x))
| None -> R.Option None
in
let v3 = (* "=" *) token env v3 in
let v4 = map_mixin_application env v4 in
let v5 = map_semicolon env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ]
let map_method_signature (env : env) (x : CST.method_signature) =
match x with
| `Cons_sign_opt_initis (v1, v2) ->
R.Case
( "Cons_sign_opt_initis",
let v1 = map_constructor_signature env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_initializers env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
| `Fact_cons_sign x ->
R.Case ("Fact_cons_sign", map_factory_constructor_signature env x)
| `Opt_static_choice_func_sign (v1, v2) ->
R.Case
( "Opt_static_choice_func_sign",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "static" *) token env tok))
| None -> R.Option None
in
let v2 =
match v2 with
| `Func_sign x -> R.Case ("Func_sign", map_function_signature env x)
| `Getter_sign x ->
R.Case ("Getter_sign", map_getter_signature env x)
| `Setter_sign x ->
R.Case ("Setter_sign", map_setter_signature env x)
in
R.Tuple [ v1; v2 ] )
| `Op_sign x -> R.Case ("Op_sign", map_operator_signature env x)
let map_anon_choice_redi_3f8cf96 (env : env) (x : CST.anon_choice_redi_3f8cf96)
=
match x with
| `Redi (v1, v2, v3, v4) ->
R.Case
( "Redi",
let v1 = (* ":" *) token env v1 in
let v2 = (* "this" *) token env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_dot_identifier env x))
| None -> R.Option None
in
let v4 = map_arguments env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Initis x -> R.Case ("Initis", map_initializers env x)
let map_import_specification (env : env) (x : CST.import_specification) =
match x with
| `Import_conf_uri_opt_as_id_rep_comb_semi (v1, v2, v3, v4, v5) ->
R.Case
( "Import_conf_uri_opt_as_id_rep_comb_semi",
let v1 = (* "import" *) token env v1 in
let v2 = map_configurable_uri env v2 in
let v3 =
match v3 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "as" *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v4 = R.List (List.map (map_combinator env) v4) in
let v5 = map_semicolon env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Import_uri_defe_as_id_rep_comb_semi (v1, v2, v3, v4, v5, v6, v7) ->
R.Case
( "Import_uri_defe_as_id_rep_comb_semi",
let v1 = (* "import" *) token env v1 in
let v2 = map_uri env v2 in
let v3 = (* "deferred" *) token env v3 in
let v4 = (* "as" *) token env v4 in
let v5 = (* pattern [a-zA-Z_$][\w$]* *) token env v5 in
let v6 = R.List (List.map (map_combinator env) v6) in
let v7 = map_semicolon env v7 in
R.Tuple [ v1; v2; v3; v4; v5; v6; v7 ] )
let map_declaration_ (env : env) (x : CST.declaration_) =
match x with
| `Cst_cons_sign_opt_choice_redi (v1, v2) ->
R.Case
( "Cst_cons_sign_opt_choice_redi",
let v1 = map_constant_constructor_signature env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_anon_choice_redi_3f8cf96 env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
| `Cons_sign_opt_choice_redi (v1, v2) ->
R.Case
( "Cons_sign_opt_choice_redi",
let v1 = map_constructor_signature env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_anon_choice_redi_3f8cf96 env x))
| None -> R.Option None
in
R.Tuple [ v1; v2 ] )
| `Exte_opt_const_buil_fact_cons_sign (v1, v2, v3) ->
R.Case
( "Exte_opt_const_buil_fact_cons_sign",
let v1 = (* "external" *) token env v1 in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* const_builtin *) token env tok))
| None -> R.Option None
in
let v3 = map_factory_constructor_signature env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Opt_const_buil_fact_cons_sign_native (v1, v2, v3) ->
R.Case
( "Opt_const_buil_fact_cons_sign_native",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* const_builtin *) token env tok))
| None -> R.Option None
in
let v2 = map_factory_constructor_signature env v2 in
let v3 = map_native env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Exte_cst_cons_sign (v1, v2) ->
R.Case
( "Exte_cst_cons_sign",
let v1 = (* "external" *) token env v1 in
let v2 = map_constant_constructor_signature env v2 in
R.Tuple [ v1; v2 ] )
| `Redi_fact_cons_sign (v1, v2, v3, v4, v5, v6, v7, v8) ->
R.Case
( "Redi_fact_cons_sign",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* const_builtin *) token env tok))
| None -> R.Option None
in
let v2 = (* "factory" *) token env v2 in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
let v4 = R.List (List.map (map_type_dot_identifier env) v4) in
let v5 = map_formal_parameter_list env v5 in
let v6 = (* "=" *) token env v6 in
let v7 = map_type_not_void env v7 in
let v8 =
match v8 with
| Some x -> R.Option (Some (map_type_dot_identifier env x))
| None -> R.Option None
in
R.Tuple [ v1; v2; v3; v4; v5; v6; v7; v8 ] )
| `Exte_cons_sign (v1, v2) ->
R.Case
( "Exte_cons_sign",
let v1 = (* "external" *) token env v1 in
let v2 = map_constructor_signature env v2 in
R.Tuple [ v1; v2 ] )
| `Opt_exte_buil_opt_static_getter_sign (v1, v2, v3) ->
R.Case
( "Opt_exte_buil_opt_static_getter_sign",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "external" *) token env tok))
| None -> R.Option None
in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "static" *) token env tok))
| None -> R.Option None
in
let v3 = map_getter_signature env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Opt_exte_and_static_setter_sign (v1, v2) ->
R.Case
( "Opt_exte_and_static_setter_sign",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_external_and_static env x))
| None -> R.Option None
in
let v2 = map_setter_signature env v2 in
R.Tuple [ v1; v2 ] )
| `Opt_exte_op_sign (v1, v2) ->
R.Case
( "Opt_exte_op_sign",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "external" *) token env tok))
| None -> R.Option None
in
let v2 = map_operator_signature env v2 in
R.Tuple [ v1; v2 ] )
| `Opt_exte_and_static_func_sign (v1, v2) ->
R.Case
( "Opt_exte_and_static_func_sign",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_external_and_static env x))
| None -> R.Option None
in
let v2 = map_function_signature env v2 in
R.Tuple [ v1; v2 ] )
| `Static_func_sign (v1, v2) ->
R.Case
( "Static_func_sign",
let v1 = (* "static" *) token env v1 in
let v2 = map_function_signature env v2 in
R.Tuple [ v1; v2 ] )
| `Static_choice_final_or_const_opt_type_static_final_decl_list (v1, v2) ->
R.Case
( "Static_choice_final_or_const_opt_type_static_final_decl_list",
let v1 = (* "static" *) token env v1 in
let v2 =
match v2 with
| `Final_or_const_opt_type_static_final_decl_list (v1, v2, v3) ->
R.Case
( "Final_or_const_opt_type_static_final_decl_list",
let v1 = map_final_or_const env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v3 = map_static_final_declaration_list env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Late_buil_choice_final_buil_opt_type_init_id_list (v1, v2) ->
R.Case
( "Late_buil_choice_final_buil_opt_type_init_id_list",
let v1 = (* "late" *) token env v1 in
let v2 =
match v2 with
| `Final_buil_opt_type_init_id_list (v1, v2, v3) ->
R.Case
( "Final_buil_opt_type_init_id_list",
let v1 = (* final_builtin *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v3 = map_initialized_identifier_list env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Choice_type_init_id_list (v1, v2) ->
R.Case
( "Choice_type_init_id_list",
let v1 = map_anon_choice_type_be0da33 env v1 in
let v2 = map_initialized_identifier_list env v2 in
R.Tuple [ v1; v2 ] )
in
R.Tuple [ v1; v2 ] )
| `Choice_type_init_id_list (v1, v2) ->
R.Case
( "Choice_type_init_id_list",
let v1 = map_anon_choice_type_be0da33 env v1 in
let v2 = map_initialized_identifier_list env v2 in
R.Tuple [ v1; v2 ] )
in
R.Tuple [ v1; v2 ] )
| `Cova_choice_late_buil_choice_final_buil_opt_type_id_list_ (v1, v2) ->
R.Case
( "Cova_choice_late_buil_choice_final_buil_opt_type_id_list_",
let v1 = (* "covariant" *) token env v1 in
let v2 =
match v2 with
| `Late_buil_choice_final_buil_opt_type_id_list_ (v1, v2) ->
R.Case
( "Late_buil_choice_final_buil_opt_type_id_list_",
let v1 = (* "late" *) token env v1 in
let v2 =
match v2 with
| `Final_buil_opt_type_id_list_ (v1, v2, v3) ->
R.Case
( "Final_buil_opt_type_id_list_",
let v1 = (* final_builtin *) token env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v3 = map_identifier_list_ env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Choice_type_init_id_list (v1, v2) ->
R.Case
( "Choice_type_init_id_list",
let v1 = map_anon_choice_type_be0da33 env v1 in
let v2 = map_initialized_identifier_list env v2 in
R.Tuple [ v1; v2 ] )
in
R.Tuple [ v1; v2 ] )
| `Choice_type_init_id_list (v1, v2) ->
R.Case
( "Choice_type_init_id_list",
let v1 = map_anon_choice_type_be0da33 env v1 in
let v2 = map_initialized_identifier_list env v2 in
R.Tuple [ v1; v2 ] )
in
R.Tuple [ v1; v2 ] )
| `Opt_late_buil_final_buil_opt_type_init_id_list (v1, v2, v3, v4) ->
R.Case
( "Opt_late_buil_final_buil_opt_type_init_id_list",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "late" *) token env tok))
| None -> R.Option None
in
let v2 = (* final_builtin *) token env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v4 = map_initialized_identifier_list env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Opt_late_buil_var_or_type_init_id_list (v1, v2, v3) ->
R.Case
( "Opt_late_buil_var_or_type_init_id_list",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "late" *) token env tok))
| None -> R.Option None
in
let v2 = map_var_or_type env v2 in
let v3 = map_initialized_identifier_list env v3 in
R.Tuple [ v1; v2; v3 ] )
let map_class_member_definition (env : env) (x : CST.class_member_definition) =
match x with
| `Decl__semi (v1, v2) ->
R.Case
( "Decl__semi",
let v1 = map_declaration_ env v1 in
let v2 = map_semicolon env v2 in
R.Tuple [ v1; v2 ] )
| `Meth_sign_func_body (v1, v2) ->
R.Case
( "Meth_sign_func_body",
let v1 = map_method_signature env v1 in
let v2 = map_function_body env v2 in
R.Tuple [ v1; v2 ] )
let map_extension_body (env : env) ((v1, v2, v3) : CST.extension_body) =
let v1 = (* "{" *) token env v1 in
let v2 =
R.List
(List.map
(fun x ->
match x with
| `Opt_meta_decl__semi (v1, v2, v3) ->
R.Case
( "Opt_meta_decl__semi",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = map_declaration_ env v2 in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Opt_meta_meth_sign_func_body (v1, v2, v3) ->
R.Case
( "Opt_meta_meth_sign_func_body",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = map_method_signature env v2 in
let v3 = map_function_body env v3 in
R.Tuple [ v1; v2; v3 ] ))
v2)
in
let v3 = (* "}" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
let map_import_or_export (env : env) (x : CST.import_or_export) =
match x with
| `Libr_import (v1, v2) ->
R.Case
( "Libr_import",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = map_import_specification env v2 in
R.Tuple [ v1; v2 ] )
| `Libr_export (v1, v2, v3, v4, v5) ->
R.Case
( "Libr_export",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = (* "export" *) token env v2 in
let v3 = map_configurable_uri env v3 in
let v4 = R.List (List.map (map_combinator env) v4) in
let v5 = map_semicolon env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
let map_class_body (env : env) ((v1, v2, v3) : CST.class_body) =
let v1 = (* "{" *) token env v1 in
let v2 =
R.List
(List.map
(fun (v1, v2) ->
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = map_class_member_definition env v2 in
R.Tuple [ v1; v2 ])
v2)
in
let v3 = (* "}" *) token env v3 in
R.Tuple [ v1; v2; v3 ]
let map_extension_declaration (env : env) (x : CST.extension_declaration) =
match x with
| `Exte_opt_id_opt_type_params_on_type_exte_body (v1, v2, v3, v4, v5, v6) ->
R.Case
( "Exte_opt_id_opt_type_params_on_type_exte_body",
let v1 = (* "extension" *) token env v1 in
let v2 =
match v2 with
| Some tok ->
R.Option (Some ((* pattern [a-zA-Z_$][\w$]* *) token env tok))
| None -> R.Option None
in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_parameters env x))
| None -> R.Option None
in
let v4 = (* "on" *) token env v4 in
let v5 = map_type_ env v5 in
let v6 = map_extension_body env v6 in
R.Tuple [ v1; v2; v3; v4; v5; v6 ] )
let map_class_definition (env : env) (x : CST.class_definition) =
match x with
| `Opt_abst_class_id_opt_type_params_opt_supe_opt_inters_class_body
(v1, v2, v3, v4, v5, v6, v7) ->
R.Case
( "Opt_abst_class_id_opt_type_params_opt_supe_opt_inters_class_body",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "abstract" *) token env tok))
| None -> R.Option None
in
let v2 = (* "class" *) token env v2 in
let v3 = (* pattern [a-zA-Z_$][\w$]* *) token env v3 in
let v4 =
match v4 with
| Some x -> R.Option (Some (map_type_parameters env x))
| None -> R.Option None
in
let v5 =
match v5 with
| Some x -> R.Option (Some (map_superclass env x))
| None -> R.Option None
in
let v6 =
match v6 with
| Some x -> R.Option (Some (map_interfaces env x))
| None -> R.Option None
in
let v7 = map_class_body env v7 in
R.Tuple [ v1; v2; v3; v4; v5; v6; v7 ] )
| `Opt_meta_opt_abst_class_mixin_app_class (v1, v2, v3, v4) ->
R.Case
( "Opt_meta_opt_abst_class_mixin_app_class",
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 =
match v2 with
| Some tok -> R.Option (Some ((* "abstract" *) token env tok))
| None -> R.Option None
in
let v3 = (* "class" *) token env v3 in
let v4 = map_mixin_application_class env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
let map_top_level_definition (env : env) (x : CST.top_level_definition) =
match x with
| `Class_defi x -> R.Case ("Class_defi", map_class_definition env x)
| `Enum_decl x -> R.Case ("Enum_decl", map_enum_declaration env x)
| `Exte_decl x -> R.Case ("Exte_decl", map_extension_declaration env x)
| `Mixin_decl (v1, v2, v3, v4, v5, v6) ->
R.Case
( "Mixin_decl",
let v1 = (* "mixin" *) token env v1 in
let v2 = (* pattern [a-zA-Z_$][\w$]* *) token env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_parameters env x))
| None -> R.Option None
in
let v4 =
match v4 with
| Some (v1, v2) ->
R.Option
(Some
(let v1 = (* "on" *) token env v1 in
let v2 = map_type_not_void_list env v2 in
R.Tuple [ v1; v2 ]))
| None -> R.Option None
in
let v5 =
match v5 with
| Some x -> R.Option (Some (map_interfaces env x))
| None -> R.Option None
in
let v6 = map_class_body env v6 in
R.Tuple [ v1; v2; v3; v4; v5; v6 ] )
| `Type_alias x -> R.Case ("Type_alias", map_type_alias env x)
| `Opt_exte_buil_func_sign_semi (v1, v2, v3) ->
R.Case
( "Opt_exte_buil_func_sign_semi",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "external" *) token env tok))
| None -> R.Option None
in
let v2 = map_function_signature env v2 in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Opt_exte_buil_getter_sign_semi (v1, v2, v3) ->
R.Case
( "Opt_exte_buil_getter_sign_semi",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "external" *) token env tok))
| None -> R.Option None
in
let v2 = map_getter_signature env v2 in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Opt_exte_buil_setter_sign_semi (v1, v2, v3) ->
R.Case
( "Opt_exte_buil_setter_sign_semi",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "external" *) token env tok))
| None -> R.Option None
in
let v2 = map_setter_signature env v2 in
let v3 = map_semicolon env v3 in
R.Tuple [ v1; v2; v3 ] )
| `Func_sign_func_body x ->
R.Case ("Func_sign_func_body", map_lambda_expression env x)
| `Getter_sign_func_body (v1, v2) ->
R.Case
( "Getter_sign_func_body",
let v1 = map_getter_signature env v1 in
let v2 = map_function_body env v2 in
R.Tuple [ v1; v2 ] )
| `Setter_sign_func_body (v1, v2) ->
R.Case
( "Setter_sign_func_body",
let v1 = map_setter_signature env v1 in
let v2 = map_function_body env v2 in
R.Tuple [ v1; v2 ] )
| `Choice_final_buil_opt_type_static_final_decl_list_semi (v1, v2, v3, v4) ->
R.Case
( "Choice_final_buil_opt_type_static_final_decl_list_semi",
let v1 = map_final_or_const env v1 in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v3 = map_static_final_declaration_list env v3 in
let v4 = map_semicolon env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
| `Late_buil_final_buil_opt_type_init_id_list_semi (v1, v2, v3, v4, v5) ->
R.Case
( "Late_buil_final_buil_opt_type_init_id_list_semi",
let v1 = (* "late" *) token env v1 in
let v2 = (* final_builtin *) token env v2 in
let v3 =
match v3 with
| Some x -> R.Option (Some (map_type_ env x))
| None -> R.Option None
in
let v4 = map_initialized_identifier_list env v4 in
let v5 = map_semicolon env v5 in
R.Tuple [ v1; v2; v3; v4; v5 ] )
| `Opt_late_buil_choice_type_init_id_list_semi (v1, v2, v3, v4) ->
R.Case
( "Opt_late_buil_choice_type_init_id_list_semi",
let v1 =
match v1 with
| Some tok -> R.Option (Some ((* "late" *) token env tok))
| None -> R.Option None
in
let v2 = map_var_or_type env v2 in
let v3 = map_initialized_identifier_list env v3 in
let v4 = map_semicolon env v4 in
R.Tuple [ v1; v2; v3; v4 ] )
let map_program (env : env) ((v1, v2, v3, v4, v5, v6, v7) : CST.program) =
let v1 =
match v1 with
| Some x -> R.Option (Some (map_script_tag env x))
| None -> R.Option None
in
let v2 =
match v2 with
| Some x -> R.Option (Some (map_library_name env x))
| None -> R.Option None
in
let v3 = R.List (List.map (map_import_or_export env) v3) in
let v4 = R.List (List.map (map_part_directive env) v4) in
let v5 = R.List (List.map (map_part_of_directive env) v5) in
let v6 =
R.List
(List.map
(fun (v1, v2) ->
let v1 =
match v1 with
| Some x -> R.Option (Some (map_metadata env x))
| None -> R.Option None
in
let v2 = map_top_level_definition env v2 in
R.Tuple [ v1; v2 ])
v6)
in
let v7 = R.List (List.map (map_statement env) v7) in
R.Tuple [ v1; v2; v3; v4; v5; v6; v7 ]
(*****************************************************************************)
(* Entry point *)
(*****************************************************************************)
let parse file =
H.wrap_parser
(fun () -> Tree_sitter_dart.Parse.file file)
(fun cst ->
let env = { H.file; conv = H.line_col_to_pos file; extra = () } in
let x = map_program env cst in
[ x |> G.raw |> G.exprstmt ])
let parse_pattern str =
H.wrap_parser
(fun () -> Tree_sitter_dart.Parse.string str)
(fun cst ->
let file = "<pattern>" in
let env = { H.file; conv = Hashtbl.create 0; extra = () } in
let e = map_program env cst in
(* this will be simplified if needed in Parse_pattern.normalize_any *)
Raw e)
| (* Yoann Padioleau
*
* Copyright (c) 2022 R2C
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation, with the
* special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
* LICENSE for more details.
*) |
verb.mli |
module Make
(Dest : OutManager.S) (Image : ImageManager.S)
(Scan : Latexscan.S) : Misc.Rien
| (***********************************************************************)
(* *)
(* HEVEA *)
(* *)
(* Luc Maranget, projet Moscova, INRIA Rocquencourt *)
(* *)
(* Copyright 2001 Institut National de Recherche en Informatique et *)
(* Automatique. Distributed only by permission. *)
(* *)
(* $Id: verb.mli,v 1.9 2001-05-25 12:37:32 maranget Exp $ *)
(***********************************************************************)
exception VError of string |
main.mli | (** Tezos Protocol Implementation - Protocol Signature Instance
This module is the entrypoint to the protocol for shells and other
embedders. This signature is an instance of
{{!Tezos_protocol_environment_sigs.V3.T.Updater.PROTOCOL} the
[Updater.PROTOCOL] signature} from the
{{:https://tezos.gitlab.io/shell/the_big_picture.html#the-economic-protocol-environment-and-compiler}
Protocol Environment}.
Each Protocol depends on a version of the Protocol Environment. For the
currently developed protocol, this is normally the latest version. You can
see {{!Tezos_protocol_environment_sigs} the full list of versions here}.
For details on how Protocol and Environment interact, see
{{:https://tezos.gitlab.io/shell/the_big_picture.html} this overview}.
*)
type validation_mode =
| Application of {
block_header : Alpha_context.Block_header.t;
fitness : Alpha_context.Fitness.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
predecessor_round : Alpha_context.Round.t;
predecessor_level : Alpha_context.Level.t;
}
| Partial_application of {
block_header : Alpha_context.Block_header.t;
fitness : Alpha_context.Fitness.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
(* Mempool only *)
| Partial_construction of {
predecessor : Block_hash.t;
predecessor_fitness : Fitness.t;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
(* Baker only *)
| Full_construction of {
predecessor : Block_hash.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
protocol_data_contents : Alpha_context.Block_header.contents;
level : Int32.t;
round : Alpha_context.Round.t;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
type validation_state = {
mode : validation_mode;
chain_id : Chain_id.t;
ctxt : Alpha_context.t;
op_count : int;
migration_balance_updates : Alpha_context.Receipt.balance_updates;
liquidity_baking_escape_ema : Int32.t;
implicit_operations_results :
Apply_results.packed_successful_manager_operation_result list;
}
type operation_data = Alpha_context.packed_protocol_data
type operation = Alpha_context.packed_operation = {
shell : Operation.shell_header;
protocol_data : operation_data;
}
val init_cache : Context.t -> Context.t Lwt.t
(** [check_manager_signature validation_state op raw_operation]
The function starts by retrieving the public key hash [pkh] of the manager
operation. In case the operation is batched, the function also checks that
the sources are all the same.
Once the [pkh] is retrieved, the function looks for its associated public
key. For that, the manager operation is inspected to check if it contains
a public key revelation. If not, the public key is searched in the context.
@return [Error Invalid_signature] if the signature check fails
@return [Error Unrevealed_manager_key] if the manager has not yet been
revealed
@return [Error Failure "get_manager_key"] if the key is not found in the
context
@return [Error Inconsistent_sources] if the operations in a batch are not
from the same manager *)
val check_manager_signature :
validation_state ->
'b Alpha_context.Kind.manager Alpha_context.contents_list ->
'a Alpha_context.operation ->
unit tzresult Lwt.t
(** [precheck_manager validation_state op] returns [()] if the manager operation
[op] is solveable, returns an error otherwise. An operation is solveable if
it is well-formed and can pay the fees to be included in a block with either
a success or a failure status.
This function uses [Apply.precheck_manager_contents_list] but discard the
context and balance update *)
val precheck_manager :
validation_state ->
'a Alpha_context.Kind.manager Alpha_context.contents_list ->
unit tzresult Lwt.t
include
Updater.PROTOCOL
with type block_header_data = Alpha_context.Block_header.protocol_data
and type block_header_metadata = Apply_results.block_metadata
and type block_header = Alpha_context.Block_header.t
and type operation_data := operation_data
and type operation_receipt = Apply_results.packed_operation_metadata
and type operation := operation
and type validation_state := validation_state
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
dune | (* -*- tuareg -*- *)
let bisect_ppx =
match Sys.getenv "BISECT_ENABLE" with
| "yes" -> "bisect_ppx"
| _ -> ""
| exception _ -> ""
let () = Jbuild_plugin.V1.send @@ {|
(library
(name ppx_lwt)
(public_name lwt_ppx)
(synopsis "Lwt PPX syntax extension")
(modules ppx_lwt)
(libraries ppxlib)
(ppx_runtime_libraries lwt)
(kind ppx_rewriter)
(preprocess (pps ppxlib.metaquot|} ^ bisect_ppx ^ {|))
(flags (:standard -w +A-4)))
|}
| (* -*- tuareg -*- *)
|
michelson_v1_printer.mli | open Protocol
open Alpha_context
open Tezos_micheline
val print_expr : Format.formatter -> Script_repr.expr -> unit
val print_expr_unwrapped : Format.formatter -> Script_repr.expr -> unit
val print_execution_trace :
Format.formatter ->
(Script.location * Gas.t * (Script.expr * string option) list) list ->
unit
val print_big_map_diff : Format.formatter -> Contract.big_map_diff -> unit
(** Insert the type map returned by the typechecker as comments in a
printable Micheline AST. *)
val inject_types :
Script_tc_errors.type_map ->
Michelson_v1_parser.parsed ->
Micheline_printer.node
(** Unexpand the macros and produce the result of parsing an
intermediate pretty printed source. Useful when working with
contracts extracted from the blockchain and not local files. *)
val unparse_toplevel :
?type_map:Script_tc_errors.type_map ->
Script.expr ->
Michelson_v1_parser.parsed
val unparse_expression : Script.expr -> Michelson_v1_parser.parsed
(** Unexpand the macros and produce the result of parsing an
intermediate pretty printed source. Works on generic trees,for
programs that fail to be converted to a specific script version. *)
val unparse_invalid : string Micheline.canonical -> Michelson_v1_parser.parsed
val ocaml_constructor_of_prim : Michelson_v1_primitives.prim -> string
val micheline_string_of_expression : zero_loc:bool -> Script.expr -> string
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
migrate_402_405.ml | (**pp -syntax camlp5o $(IMPORT_OCAMLCFLAGS) *)
module SRC = All_ast.Ast_4_02
module DST = All_ast.Ast_4_05
let src_loc_none =
let open SRC.Lexing in
let open SRC.Location in
let loc = {
pos_fname = "";
pos_lnum = 1;
pos_bol = 0;
pos_cnum = -1;
} in
{ loc_start = loc; loc_end = loc; loc_ghost = true }
let dst_loc_none =
let open DST.Lexing in
let open DST.Location in
let loc = {
pos_fname = "";
pos_lnum = 1;
pos_bol = 0;
pos_cnum = -1;
} in
{ loc_start = loc; loc_end = loc; loc_ghost = true }
let wrap_loc inh v =
let loc = match inh with
None -> src_loc_none
| Some loc -> loc in
let open SRC.Location in
{ txt = v ; loc = loc }
let map_loc f v =
let open SRC.Location in
{ txt = f v.txt ; loc = v.loc }
let unwrap_loc v = v.SRC.Location.txt
let _migrate_list subrw0 __dt__ __inh__ l =
List.map (subrw0 __dt__ __inh__) l
let migrate_label_arg_label : 'a -> 'b -> SRC.Asttypes.label -> DST.Asttypes.arg_label =
fun __dt__ __inh__ x ->
if x <> "" then
if x.[0] = '?' then DST.Asttypes.Optional (String.sub x 1 (String.length x - 1))
else DST.Asttypes.Labelled x
else
DST.Asttypes.Nolabel
let migrate_Asttypes_constant_Parsetree_constant :
'a -> 'b -> SRC.Asttypes.constant -> DST.Parsetree.constant =
fun __dt__ __inh__ -> function
| SRC.Asttypes.Const_int x0 ->
DST.Parsetree.Pconst_integer (string_of_int x0, None)
| SRC.Asttypes.Const_char x0 ->
DST.Parsetree.Pconst_char x0
| SRC.Asttypes.Const_string (x0,x1) ->
DST.Parsetree.Pconst_string
(x0, x1)
| SRC.Asttypes.Const_float x0 ->
DST.Parsetree.Pconst_float (x0, None)
| SRC.Asttypes.Const_int32 x0 ->
DST.Parsetree.Pconst_integer (Int32.to_string x0, Some 'l')
| SRC.Asttypes.Const_int64 x0 ->
DST.Parsetree.Pconst_integer (Int64.to_string x0, Some 'L')
| SRC.Asttypes.Const_nativeint x0 ->
DST.Parsetree.Pconst_integer (Nativeint.to_string x0, Some 'n')
type lexing_position = [%import: All_ast.Ast_4_02.Lexing.position]
and location_t = [%import: All_ast.Ast_4_02.Location.t
[@with Lexing.position := lexing_position]
]
and 'a location_loc = [%import: 'a All_ast.Ast_4_02.Location.loc
[@with t := location_t]
]
and longident_t = [%import: All_ast.Ast_4_02.Longident.t
[@with t := longident_t]
]
and label = [%import: All_ast.Ast_4_02.Asttypes.label
]
and closed_flag = [%import: All_ast.Ast_4_02.Asttypes.closed_flag]
and rec_flag = [%import: All_ast.Ast_4_02.Asttypes.rec_flag]
and direction_flag = [%import: All_ast.Ast_4_02.Asttypes.direction_flag]
and private_flag = [%import: All_ast.Ast_4_02.Asttypes.private_flag]
and mutable_flag = [%import: All_ast.Ast_4_02.Asttypes.mutable_flag]
and virtual_flag = [%import: All_ast.Ast_4_02.Asttypes.virtual_flag]
and override_flag = [%import: All_ast.Ast_4_02.Asttypes.override_flag]
and variance = [%import: All_ast.Ast_4_02.Asttypes.variance]
and constant = [%import: All_ast.Ast_4_02.Asttypes.constant]
and location_stack = [%import: All_ast.Ast_4_02.Parsetree.location_stack
[@with Location.t := location_t]
]
and attribute = [%import: All_ast.Ast_4_02.Parsetree.attribute
[@with Asttypes.loc := location_loc]
]
and extension = [%import: All_ast.Ast_4_02.Parsetree.extension
[@with Asttypes.loc := location_loc]
]
and attributes = [%import: All_ast.Ast_4_02.Parsetree.attributes]
and payload = [%import: All_ast.Ast_4_02.Parsetree.payload]
and core_type = [%import: All_ast.Ast_4_02.Parsetree.core_type
[@with Location.t := location_t]
]
and core_type_desc = [%import: All_ast.Ast_4_02.Parsetree.core_type_desc
[@with Longident.t := longident_t
; Asttypes.loc := location_loc
; Asttypes.closed_flag := closed_flag
; Asttypes.label := label
]
]
and package_type = [%import: All_ast.Ast_4_02.Parsetree.package_type
[@with Longident.t := longident_t
; Asttypes.loc := location_loc
]
]
and row_field = [%import: All_ast.Ast_4_02.Parsetree.row_field
[@with
Asttypes.label := label
]
]
and pattern = [%import: All_ast.Ast_4_02.Parsetree.pattern
[@with Location.t := location_t]
]
and pattern_desc = [%import: All_ast.Ast_4_02.Parsetree.pattern_desc
[@with Longident.t := longident_t ;
Asttypes.loc := location_loc ;
Asttypes.constant := constant ;
Asttypes.label := label ;
Asttypes.closed_flag := closed_flag
]
]
and expression = [%import: All_ast.Ast_4_02.Parsetree.expression
[@with Location.t := location_t]
]
and expression_desc = [%import: All_ast.Ast_4_02.Parsetree.expression_desc
[@with Longident.t := longident_t ;
Asttypes.loc := location_loc ;
Asttypes.label := label ;
Asttypes.rec_flag := rec_flag ;
Asttypes.override_flag := override_flag ;
Asttypes.direction_flag := direction_flag ;
Asttypes.constant := constant
]
]
and case = [%import: All_ast.Ast_4_02.Parsetree.case]
and value_description = [%import: All_ast.Ast_4_02.Parsetree.value_description
[@with Location.t := location_t ;
Asttypes.loc := location_loc
]
]
and type_declaration = [%import: All_ast.Ast_4_02.Parsetree.type_declaration
[@with Location.t := location_t
; Asttypes.loc := location_loc
; Asttypes.variance := variance
; Asttypes.private_flag := private_flag
]
]
and type_kind = [%import: All_ast.Ast_4_02.Parsetree.type_kind]
and label_declaration = [%import: All_ast.Ast_4_02.Parsetree.label_declaration
[@with Location.t := location_t
; Asttypes.loc := location_loc
; Asttypes.mutable_flag := mutable_flag
]
]
and constructor_declaration = [%import: All_ast.Ast_4_02.Parsetree.constructor_declaration
[@with Location.t := location_t ;
Asttypes.loc := location_loc
]
]
and type_extension = [%import: All_ast.Ast_4_02.Parsetree.type_extension
[@with Longident.t := longident_t
; Asttypes.loc := location_loc
; Asttypes.variance := variance
; Asttypes.private_flag := private_flag
]
]
and extension_constructor = [%import: All_ast.Ast_4_02.Parsetree.extension_constructor
[@with Location.t := location_t ;
Asttypes.loc := location_loc
]
]
and extension_constructor_kind = [%import: All_ast.Ast_4_02.Parsetree.extension_constructor_kind
[@with Longident.t := longident_t ;
Asttypes.loc := location_loc
]
]
and class_type = [%import: All_ast.Ast_4_02.Parsetree.class_type
[@with Location.t := location_t]
]
and class_type_desc = [%import: All_ast.Ast_4_02.Parsetree.class_type_desc
[@with Longident.t := longident_t
; Asttypes.loc := location_loc
; Asttypes.label := label
]
]
and class_signature = [%import: All_ast.Ast_4_02.Parsetree.class_signature]
and class_type_field = [%import: All_ast.Ast_4_02.Parsetree.class_type_field
[@with Location.t := location_t]
]
and class_type_field_desc = [%import: All_ast.Ast_4_02.Parsetree.class_type_field_desc
[@with
Asttypes.private_flag := private_flag
; Asttypes.mutable_flag := mutable_flag
; Asttypes.virtual_flag := virtual_flag
]
]
and 'a class_infos = [%import: 'a All_ast.Ast_4_02.Parsetree.class_infos
[@with Location.t := location_t
; Asttypes.loc := location_loc
; Asttypes.variance := variance
; Asttypes.virtual_flag := virtual_flag
]
]
and class_description = [%import: All_ast.Ast_4_02.Parsetree.class_description]
and class_type_declaration = [%import: All_ast.Ast_4_02.Parsetree.class_type_declaration]
and class_expr = [%import: All_ast.Ast_4_02.Parsetree.class_expr
[@with Location.t := location_t]
]
and class_expr_desc = [%import: All_ast.Ast_4_02.Parsetree.class_expr_desc
[@with Longident.t := longident_t
; Asttypes.loc := location_loc
; Asttypes.rec_flag := rec_flag
; Asttypes.label := label
]
]
and class_structure = [%import: All_ast.Ast_4_02.Parsetree.class_structure]
and class_field = [%import: All_ast.Ast_4_02.Parsetree.class_field
[@with Location.t := location_t]
]
and class_field_desc = [%import: All_ast.Ast_4_02.Parsetree.class_field_desc
[@with Asttypes.loc := location_loc
; Asttypes.override_flag := override_flag
; Asttypes.mutable_flag := mutable_flag
; Asttypes.private_flag := private_flag
]
]
and class_field_kind = [%import: All_ast.Ast_4_02.Parsetree.class_field_kind
[@with Asttypes.override_flag := override_flag
]
]
and class_declaration = [%import: All_ast.Ast_4_02.Parsetree.class_declaration]
and module_type = [%import: All_ast.Ast_4_02.Parsetree.module_type
[@with Location.t := location_t]
]
and module_type_desc = [%import: All_ast.Ast_4_02.Parsetree.module_type_desc
[@with Longident.t := longident_t ;
Asttypes.loc := location_loc
]
]
and signature = [%import: All_ast.Ast_4_02.Parsetree.signature]
and signature_item = [%import: All_ast.Ast_4_02.Parsetree.signature_item
[@with Location.t := location_t]
]
and signature_item_desc = [%import: All_ast.Ast_4_02.Parsetree.signature_item_desc]
and module_declaration = [%import: All_ast.Ast_4_02.Parsetree.module_declaration
[@with Location.t := location_t ;
Asttypes.loc := location_loc
]
]
and module_type_declaration = [%import: All_ast.Ast_4_02.Parsetree.module_type_declaration
[@with Location.t := location_t ;
Asttypes.loc := location_loc
]
]
and open_description = [%import: All_ast.Ast_4_02.Parsetree.open_description
[@with Location.t := location_t
; Longident.t := longident_t
; Asttypes.loc := location_loc
; Asttypes.override_flag := override_flag
]
]
and 'a include_infos = [%import: 'a All_ast.Ast_4_02.Parsetree.include_infos
[@with Location.t := location_t]
]
and include_description = [%import: All_ast.Ast_4_02.Parsetree.include_description]
and include_declaration = [%import: All_ast.Ast_4_02.Parsetree.include_declaration]
and with_constraint = [%import: All_ast.Ast_4_02.Parsetree.with_constraint
[@with Longident.t := longident_t
; Asttypes.loc := location_loc
]
]
and module_expr = [%import: All_ast.Ast_4_02.Parsetree.module_expr
[@with Location.t := location_t]
]
and module_expr_desc = [%import: All_ast.Ast_4_02.Parsetree.module_expr_desc
[@with Longident.t := longident_t ;
Asttypes.loc := location_loc
]
]
and structure = [%import: All_ast.Ast_4_02.Parsetree.structure]
and structure_item = [%import: All_ast.Ast_4_02.Parsetree.structure_item
[@with Location.t := location_t]
]
and structure_item_desc = [%import: All_ast.Ast_4_02.Parsetree.structure_item_desc
[@with Location.t := location_t
; Longident.t := longident_t
; Asttypes.loc := location_loc
; Asttypes.rec_flag := rec_flag
]
]
and value_binding = [%import: All_ast.Ast_4_02.Parsetree.value_binding
[@with Location.t := location_t
; Asttypes.loc := location_loc
]
]
and module_binding = [%import: All_ast.Ast_4_02.Parsetree.module_binding
[@with Location.t := location_t
; Asttypes.loc := location_loc
]
]
and out_ident = [%import: All_ast.Ast_4_02.Outcometree.out_ident]
and out_value = [%import: All_ast.Ast_4_02.Outcometree.out_value]
and out_type = [%import: All_ast.Ast_4_02.Outcometree.out_type]
and out_variant = [%import: All_ast.Ast_4_02.Outcometree.out_variant]
and out_class_type = [%import: All_ast.Ast_4_02.Outcometree.out_class_type]
and out_class_sig_item = [%import: All_ast.Ast_4_02.Outcometree.out_class_sig_item]
and out_module_type = [%import: All_ast.Ast_4_02.Outcometree.out_module_type]
and out_sig_item = [%import: All_ast.Ast_4_02.Outcometree.out_sig_item]
and out_type_decl = [%import: All_ast.Ast_4_02.Outcometree.out_type_decl
[@with Asttypes.private_flag := private_flag]
]
and out_extension_constructor = [%import: All_ast.Ast_4_02.Outcometree.out_extension_constructor
[@with Asttypes.private_flag := private_flag]
]
and out_type_extension = [%import: All_ast.Ast_4_02.Outcometree.out_type_extension
[@with Asttypes.private_flag := private_flag]
]
and out_rec_status = [%import: All_ast.Ast_4_02.Outcometree.out_rec_status]
and out_ext_status = [%import: All_ast.Ast_4_02.Outcometree.out_ext_status]
and out_phrase = [%import: All_ast.Ast_4_02.Outcometree.out_phrase]
[@@deriving migrate
{ inherit_type = [%typ: location_t option]
; dispatch_type = dispatch_table_t
; dispatch_table_constructor = make_dt
; default_dispatchers = [
{
srcmod = All_ast.Ast_4_02
; dstmod = DST
; types = [
lexing_position
; location_t
; location_loc
; longident_t
]
}
; {
srcmod = All_ast.Ast_4_02.Asttypes
; dstmod = DST.Asttypes
; types = [
closed_flag
; direction_flag
; label
; mutable_flag
; override_flag
; private_flag
; rec_flag
; variance
; virtual_flag
]
}
; {
srcmod = All_ast.Ast_4_02.Parsetree
; dstmod = DST.Parsetree
; types = [
attribute
; attributes
; case
; class_declaration
; class_description
; class_expr
; class_field
; class_field_kind
; class_infos
; class_signature
; class_structure
; class_type
; class_type_declaration
; class_type_field
; core_type
; expression
; extension
; extension_constructor
; include_declaration
; include_description
; include_infos
; label_declaration
; location_stack
; module_binding
; module_declaration
; module_expr
; module_expr_desc
; module_type
; module_type_declaration
; module_type_desc
; open_description
; package_type
; pattern
; pattern_desc
; payload
; row_field
; signature
; signature_item
; structure
; structure_item
; type_declaration
; type_extension
; type_kind
; value_binding
; value_description
; with_constraint
]
; inherit_code = {
class_expr = Some pcl_loc
; class_field = Some pcf_loc
; class_infos = Some pci_loc
; class_type_field = Some pctf_loc
; class_type = Some pcty_loc
; core_type = Some ptyp_loc
; expression = Some pexp_loc
; extension_constructor = Some pext_loc
; include_infos = Some pincl_loc
; label_declaration = Some pld_loc
; module_binding = Some pmb_loc
; module_declaration = Some pmd_loc
; module_expr = Some pmod_loc
; module_type_declaration = Some pmtd_loc
; module_type = Some pmty_loc
; open_description = Some popen_loc
; pattern = Some ppat_loc
; signature_item = Some psig_loc
; structure_item = Some pstr_loc
; type_declaration = Some ptype_loc
; value_binding = Some pvb_loc
; value_description = Some pval_loc
}
}
; {
srcmod = All_ast.Ast_4_02.Outcometree
; dstmod = DST.Outcometree
; types = [
out_class_sig_item
; out_class_type
; out_extension_constructor
; out_ext_status
; out_ident
; out_module_type
; out_phrase
; out_rec_status
; out_type
; out_type_extension
; out_value
]
}
]
; dispatchers = {
migrate_option = {
srctype = [%typ: 'a option]
; dsttype = [%typ: 'b option]
; subs = [ ([%typ: 'a], [%typ: 'b]) ]
; code = (fun subrw __dt__ __inh__ x -> Option.map (subrw __dt__ __inh__) x)
}
; migrate_constant = {
srctype = [%typ: constant]
; dsttype = [%typ: DST.Parsetree.constant]
; code = migrate_Asttypes_constant_Parsetree_constant
}
; migrate_list = {
srctype = [%typ: 'a list]
; dsttype = [%typ: 'b list]
; code = _migrate_list
; subs = [ ([%typ: 'a], [%typ: 'b]) ]
}
; migrate_core_type_desc = {
srctype = [%typ: core_type_desc]
; dsttype = [%typ: DST.Parsetree.core_type_desc]
; custom_branches_code = function
Ptyp_arrow (v_0, v_1, v_2) ->
let open DST.Parsetree in
Ptyp_arrow
(migrate_label_arg_label __dt__ __inh__ v_0,
__dt__.migrate_core_type __dt__ __inh__ v_1,
__dt__.migrate_core_type __dt__ __inh__ v_2)
| Ptyp_object (v_0, v_1) ->
let open DST.Parsetree in
Ptyp_object
(List.map (fun (v_0, v_1, v_2) ->
(__dt__.migrate_location_loc (fun _ _ x -> x) __dt__ __inh__ (wrap_loc __inh__ v_0),
__dt__.migrate_attributes __dt__ __inh__ v_1,
__dt__.migrate_core_type __dt__ __inh__ v_2)) v_0,
__dt__.migrate_closed_flag __dt__ __inh__ v_1)
| Ptyp_poly (v_0, v_1) ->
let open DST.Parsetree in
Ptyp_poly
(List.map (fun v_0 ->
__dt__.migrate_location_loc (fun _ _ x -> x) __dt__ __inh__ (wrap_loc __inh__ v_0)) v_0,
__dt__.migrate_core_type __dt__ __inh__ v_1)
}
; migrate_expression_desc = {
srctype = [%typ: expression_desc]
; dsttype = [%typ: DST.Parsetree.expression_desc]
; custom_branches_code = function
Pexp_fun (v_0, v_1, v_2, v_3) ->
let open DST.Parsetree in
Pexp_fun
(migrate_label_arg_label __dt__ __inh__ v_0,
__dt__.migrate_option __dt__.migrate_expression __dt__ __inh__ v_1,
__dt__.migrate_pattern __dt__ __inh__ v_2,
__dt__.migrate_expression __dt__ __inh__ v_3)
| Pexp_apply (v_0, v_1) ->
let open DST.Parsetree in
Pexp_apply
(__dt__.migrate_expression __dt__ __inh__ v_0,
List.map (fun (v_0, v_1) ->
migrate_label_arg_label __dt__ __inh__ v_0,
__dt__.migrate_expression __dt__ __inh__ v_1) v_1)
| Pexp_send (v_0, v_1) ->
let open DST.Parsetree in
Pexp_send
(__dt__.migrate_expression __dt__ __inh__ v_0,
__dt__.migrate_location_loc (fun _ _ x -> x) __dt__ __inh__ (wrap_loc __inh__ v_1))
| Pexp_newtype (v_0, v_1) ->
let open DST.Parsetree in
Pexp_newtype
(__dt__.migrate_location_loc (fun _ _ x -> x) __dt__ __inh__ (wrap_loc __inh__ v_0),
__dt__.migrate_expression __dt__ __inh__ v_1)
}
; migrate_constructor_declaration = {
srctype = [%typ: constructor_declaration]
; dsttype = [%typ: DST.Parsetree.constructor_declaration]
; inherit_code = Some pcd_loc
; skip_fields = [ pcd_args ]
; custom_fields_code = {
pcd_args =
DST.Parsetree.Pcstr_tuple (List.map (__dt__.migrate_core_type __dt__ __inh__) pcd_args)
}
}
; migrate_extension_constructor_kind = {
srctype = [%typ: extension_constructor_kind]
; dsttype = [%typ: DST.Parsetree.extension_constructor_kind]
; custom_branches_code = function
Pext_decl (v_0, v_1) ->
let open DST.Parsetree in
Pext_decl
(DST.Parsetree.Pcstr_tuple (List.map (__dt__.migrate_core_type __dt__ __inh__) v_0),
Option.map (__dt__.migrate_core_type __dt__ __inh__) v_1)
}
; migrate_class_type_desc = {
srctype = [%typ: class_type_desc]
; dsttype = [%typ: DST.Parsetree.class_type_desc]
; custom_branches_code = function
Pcty_arrow (v_0, v_1, v_2) ->
let open DST.Parsetree in
Pcty_arrow
(migrate_label_arg_label __dt__ __inh__ v_0,
__dt__.migrate_core_type __dt__ __inh__ v_1,
__dt__.migrate_class_type __dt__ __inh__ v_2)
}
; migrate_class_type_field_desc = {
srctype = [%typ: class_type_field_desc]
; dsttype = [%typ: DST.Parsetree.class_type_field_desc]
; custom_branches_code = function
| Pctf_val v_0 ->
let open DST.Parsetree in
Pctf_val
((fun (v_0, v_1, v_2, v_3) ->
__dt__.migrate_location_loc (fun _ _ x -> x) __dt__ __inh__ (wrap_loc __inh__ v_0),
__dt__.migrate_mutable_flag __dt__ __inh__ v_1,
__dt__.migrate_virtual_flag __dt__ __inh__ v_2,
__dt__.migrate_core_type __dt__ __inh__ v_3)
v_0)
| Pctf_method v_0 ->
let open DST.Parsetree in
Pctf_method
((fun (v_0, v_1, v_2, v_3) ->
__dt__.migrate_location_loc (fun _ _ x -> x) __dt__ __inh__ (wrap_loc __inh__ v_0),
__dt__.migrate_private_flag __dt__ __inh__ v_1,
__dt__.migrate_virtual_flag __dt__ __inh__ v_2,
__dt__.migrate_core_type __dt__ __inh__ v_3) v_0)
}
; migrate_class_expr_desc = {
srctype = [%typ: class_expr_desc]
; dsttype = [%typ: DST.Parsetree.class_expr_desc]
; custom_branches_code = function
Pcl_fun (v_0, v_1, v_2, v_3) ->
let open DST.Parsetree in
Pcl_fun
(migrate_label_arg_label __dt__ __inh__ v_0,
Option.map (__dt__.migrate_expression __dt__ __inh__) v_1,
__dt__.migrate_pattern __dt__ __inh__ v_2,
__dt__.migrate_class_expr __dt__ __inh__ v_3)
| Pcl_apply (v_0, v_1) ->
let open DST.Parsetree in
Pcl_apply
(__dt__.migrate_class_expr __dt__ __inh__ v_0,
List.map (fun (v_0, v_1) ->
migrate_label_arg_label __dt__ __inh__ v_0,
__dt__.migrate_expression __dt__ __inh__ v_1)
v_1)
}
; migrate_class_field_desc = {
srctype = [%typ: class_field_desc]
; dsttype = [%typ: DST.Parsetree.class_field_desc]
; custom_branches_code = function
Pcf_inherit (v_0, v_1, v_2) ->
let open DST.Parsetree in
Pcf_inherit
(__dt__.migrate_override_flag __dt__ __inh__ v_0,
__dt__.migrate_class_expr __dt__ __inh__ v_1,
Option.map (fun v -> __dt__.migrate_location_loc (fun _ _ x -> x) __dt__ __inh__ (wrap_loc __inh__ v)) v_2)
}
; migrate_signature_item_desc = {
srctype = [%typ: signature_item_desc]
; dsttype = [%typ: DST.Parsetree.signature_item_desc]
; custom_branches_code = function
Psig_type v_0 ->
let is_nonrec (attr,_) = attr.txt = "nonrec" in
let rf = if (List.exists (fun td ->
List.exists is_nonrec td.ptype_attributes) v_0) then
DST.Asttypes.Nonrecursive
else DST.Asttypes.Recursive in
let open DST.Parsetree in
Psig_type
(rf, List.map (__dt__.migrate_type_declaration __dt__ __inh__) v_0)
}
; migrate_structure_item_desc = {
srctype = [%typ: structure_item_desc]
; dsttype = [%typ: DST.Parsetree.structure_item_desc]
; custom_branches_code = function
Pstr_type v_0 ->
let is_nonrec (attr,_) = attr.txt = "nonrec" in
let rf = if (List.exists (fun td ->
List.exists is_nonrec td.ptype_attributes) v_0) then
DST.Asttypes.Nonrecursive
else DST.Asttypes.Recursive in
let open DST.Parsetree in
Pstr_type
(rf, List.map (__dt__.migrate_type_declaration __dt__ __inh__) v_0)
}
; migrate_printer = {
srctype = [%typ: (Format.formatter -> unit)]
; dsttype = [%typ: (Format.formatter -> unit)]
; code = fun _ _ x -> x
}
; migrate_exn = {
srctype = [%typ: exn]
; dsttype = [%typ: exn]
; code = fun _ _ x -> x
}
; migrate_out_variant = {
srctype = [%typ: out_variant]
; dsttype = [%typ: DST.Outcometree.out_variant]
; custom_branches_code = function
| Ovar_name (v_0, v_1) ->
let open DST.Outcometree in
Ovar_typ
(Otyp_constr
(__dt__.migrate_out_ident __dt__ __inh__ v_0,
List.map (__dt__.migrate_out_type __dt__ __inh__) v_1))
}
; migrate_out_sig_item = {
srctype = [%typ: out_sig_item]
; dsttype = [%typ: DST.Outcometree.out_sig_item]
; custom_branches_code = function
Osig_value (v_0, v_1, v_2) ->
let open DST.Outcometree in
Osig_value
{oval_name = v_0
; oval_type = __dt__.migrate_out_type __dt__ __inh__ v_1
; oval_prims = v_2
; oval_attributes = []}
}
; migrate_out_type_decl = {
srctype = [%typ: out_type_decl]
; dsttype = [%typ: DST.Outcometree.out_type_decl]
; custom_fields_code = {
otype_immediate = false
; otype_unboxed = false
}
}
}
}
]
| (**pp -syntax camlp5o $(IMPORT_OCAMLCFLAGS) *)
module SRC = All_ast.Ast_4_02 |
receipt_repr.mli | (** Places where tez can be found in the ledger's state. *)
type balance =
| Contract of Contract_repr.t
| Block_fees
| Deposits of Signature.Public_key_hash.t
| Nonce_revelation_rewards
| Double_signing_evidence_rewards
| Endorsing_rewards
| Baking_rewards
| Baking_bonuses
| Storage_fees
| Double_signing_punishments
| Lost_endorsing_rewards of Signature.Public_key_hash.t * bool * bool
| Liquidity_baking_subsidies
| Burned
| Commitments of Blinded_public_key_hash.t
| Bootstrap
| Invoice
| Initial_commitments
| Minted
| Frozen_bonds of Contract_repr.t * Bond_id_repr.t
| Tx_rollup_rejection_punishments
| Tx_rollup_rejection_rewards
(** Compares two balances. *)
val compare_balance : balance -> balance -> int
(** A credit or debit of tez to a balance. *)
type balance_update = Debited of Tez_repr.t | Credited of Tez_repr.t
(** An origin of a balance update *)
type update_origin =
| Block_application (** Update from a block application *)
| Protocol_migration (** Update from a protocol migration *)
| Subsidy (** Update from an inflationary subsidy *)
| Simulation (** Simulation of an operation **)
(** Compares two origins. *)
val compare_update_origin : update_origin -> update_origin -> int
(** A list of balance updates. Duplicates may happen.
For example, an entry of the form [(Rewards (b,c), Credited am, ...)]
indicates that the balance of frozen rewards has been increased by [am]
for baker [b] and cycle [c]. *)
type balance_updates = (balance * balance_update * update_origin) list
(** The property [Json.destruct (Json.construct balance_updates) = balance_updates]
does not always hold for [balance_updates_encoding] when [balance_updates]
contains entries of the form [(_, _ Tez_repr.zero, _)]. This is because the
[balance_update] [(_ Tez_repr.zero)] always decodes into [(Credited Tez_repr.zero)]. *)
val balance_updates_encoding : balance_updates Data_encoding.t
(** Group updates by (balance x origin), and remove zero-valued balances. *)
val group_balance_updates : balance_updates -> balance_updates tzresult
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* Copyright (c) 2020 Metastate AG <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
config.ml |
open Mirage
let ssh_key =
let doc = Key.Arg.info ~doc:"Private ssh key (rsa:<seed> or ed25519:<b64-key>)." ["ssh-key"] in
Key.(create "ssh-key" Arg.(opt (some string) None doc))
let authenticator =
let doc = Key.Arg.info ~doc:"Authenticator for SSH." ["authenticator"] in
Key.(create "authenticator" Arg.(opt (some string) None doc))
let tls_authenticator =
(* this will not look the same in the help printout *)
let doc = "TLS host authenticator. See git_http in lib/mirage/mirage.mli for a description of the format."
in
let doc = Key.Arg.info ~doc ["tls-authenticator"] in
Key.(create "tls-authenticator" Arg.(opt (some string) None doc))
let net = generic_stackv4v6 default_network
(* set ~tls to false to get a plain-http server *)
let http_srv = cohttp_server @@ conduit_direct ~tls:true net
(* TODO: make it possible to enable and disable schemes without providing a port *)
let http_port =
let doc = Key.Arg.info ~doc:"Listening HTTP port." ["http"] ~docv:"PORT" in
Key.(create "http_port" Arg.(opt (some int) None doc))
let https_port =
let doc = Key.Arg.info ~doc:"Listening HTTPS port." ["https"] ~docv:"PORT" in
Key.(create "https_port" Arg.(opt (some int) None doc))
let certs = generic_kv_ro ~key:Key.(value @@ kv_ro ()) "tls"
let zap = generic_kv_ro ~key:Key.(value @@ kv_ro ()) "caldavzap"
let admin_password =
let doc = Key.Arg.info ~doc:"Password for the administrator." ["admin-password"] ~docv:"STRING" in
Key.(create "admin_password" Arg.(opt (some string) None doc))
let remote =
let doc = Key.Arg.info ~doc:"Location of calendar data. Use suffix #foo to specify branch 'foo'." [ "remote" ] ~docv:"REMOTE" in
Key.(create "remote" Arg.(required string doc))
let tofu =
let doc = Key.Arg.info ~doc:"If a user does not exist, create them and give them a new calendar." [ "tofu" ] in
Key.(create "tofu" Arg.(flag doc))
let hostname =
let doc = Key.Arg.info ~doc:"Hostname to use." [ "host" ] ~docv:"STRING" in
Key.(create "hostname" Arg.(required string doc))
let apple_testable =
let doc = Key.Arg.info ~doc:"Configure the server to use with Apple CCS CalDAVtester." [ "apple-testable" ] in
Key.(create "apple_testable" Arg.(flag doc))
let main =
let direct_dependencies = [
package "uri" ;
package "caldav" ;
package ~min:"0.1.3" "icalendar" ;
package ~min:"0.8.7" "fmt";
package ~min:"0.0.3" "git-kv"
] in
let keys =
[ Key.v http_port ; Key.v https_port ;
Key.v admin_password ; Key.v remote ;
Key.v tofu ; Key.v hostname ;
Key.v apple_testable ]
in
foreign
~packages:direct_dependencies ~keys
"Unikernel.Main" (random @-> pclock @-> git_client @-> kv_ro @-> http @-> kv_ro @-> job)
let git_client =
let dns = generic_dns_client net in
let git = git_happy_eyeballs net dns (generic_happy_eyeballs net dns) in
let tcp = tcpv4v6_of_stackv4v6 net in
merge_git_clients (git_tcp tcp git)
(merge_git_clients (git_ssh ~key:ssh_key ~authenticator tcp git)
(git_http ~authenticator:tls_authenticator tcp git))
let () =
register "caldav" [main $ default_random $ default_posix_clock $ git_client $ certs $ http_srv $ zap ]
| |
mpipeline.ml |
open Std
let {Logger. log} = Logger.for_section "Pipeline"
let time_shift = ref 0.0
let timed_lazy r x =
lazy (
let start = Misc.time_spent () in
let time_shift0 = !time_shift in
let update () =
let delta = Misc.time_spent () -. start in
let shift = !time_shift -. time_shift0 in
time_shift := time_shift0 +. delta;
r := !r +. delta -. shift;
in
match Lazy.force x with
| x -> update (); x
| exception exn -> update (); Std.reraise exn
)
module Cache = struct
let cache = ref []
(* Values from configuration that are used as a key for the cache.
These values should:
- allow to maximize reuse; associating a single typechecker instance to a
filename and directory is natural, but keying also based on verbosity
makes no sense
- prevent reuse in different environments (if there is a change in
loadpath, a new typechecker should be produced).
It would be better to guarantee that the typechecker was well-behaved
when the loadpath changes (so that we can reusing the same instance, and
let the typechecker figure which part of its internal state should be
invalidated).
However we already had many bug related to that. There are subtle changes
in the type checker behavior across the different versions of OCaml.
It is simpler to create new instances upfront.
*)
let key config =
Mconfig.(
config.query.filename,
config.query.directory,
config.ocaml,
{config.merlin with log_file = None; log_sections = []}
)
let get config =
let title = "pop_cache" in
let key = key config in
match List.assoc key !cache with
| state ->
cache := (key, state) :: List.remove_assoc key !cache;
log ~title "found entry for this configuration";
state
| exception Not_found ->
log ~title "nothing cached for this configuration";
let state = Mocaml.new_state () in
cache := (key, state) :: List.take_n 5 !cache;
state
end
module Typer = struct
type t = {
errors : exn list lazy_t;
result : Mtyper.result;
}
end
module Ppx = struct
type t = {
config : Mconfig.t;
errors : exn list;
parsetree : Mreader.parsetree;
}
end
type t = {
config : Mconfig.t;
state : Mocaml.typer_state;
raw_source : Msource.t;
source : (Msource.t * Mreader.parsetree option) lazy_t;
reader : (Mreader.result * Mconfig.t) lazy_t;
ppx : Ppx.t lazy_t;
typer : Typer.t lazy_t;
pp_time : float ref;
reader_time : float ref;
ppx_time : float ref;
typer_time : float ref;
error_time : float ref;
}
let raw_source t = t.raw_source
let input_config t = t.config
let input_source t = fst (Lazy.force t.source)
let with_pipeline t f =
Mocaml.with_state t.state @@ fun () ->
Mreader.with_ambient_reader t.config (input_source t) f
let get_lexing_pos t pos =
Msource.get_lexing_pos
(input_source t) ~filename:(Mconfig.filename t.config) pos
let reader t = Lazy.force t.reader
let ppx t = Lazy.force t.ppx
let typer t = Lazy.force t.typer
let reader_config t = (snd (reader t))
let reader_parsetree t = (fst (reader t)).Mreader.parsetree
let reader_comments t = (fst (reader t)).Mreader.comments
let reader_lexer_keywords t = (fst (reader t)).Mreader.lexer_keywords
let reader_lexer_errors t = (fst (reader t)).Mreader.lexer_errors
let reader_parser_errors t = (fst (reader t)).Mreader.parser_errors
let reader_no_labels_for_completion t =
(fst (reader t)).Mreader.no_labels_for_completion
let ppx_parsetree t = (ppx t).Ppx.parsetree
let ppx_errors t = (ppx t).Ppx.errors
let final_config t = (ppx t).Ppx.config
let typer_result t = (typer t).Typer.result
let typer_errors t = Lazy.force (typer t).Typer.errors
let process
?state
?(pp_time=ref 0.0)
?(reader_time=ref 0.0)
?(ppx_time=ref 0.0)
?(typer_time=ref 0.0)
?(error_time=ref 0.0)
?for_completion
config raw_source =
let state = match state with
| None -> Cache.get config
| Some state -> state
in
let source = timed_lazy pp_time (lazy (
match Mconfig.(config.ocaml.pp) with
| None -> raw_source, None
| Some { workdir; workval } ->
let source = Msource.text raw_source in
match
Pparse.apply_pp
~workdir ~filename:Mconfig.(config.query.filename)
~source ~pp:workval
with
| `Source source -> Msource.make source, None
| (`Interface _ | `Implementation _) as ast ->
raw_source, Some ast
)) in
let reader = timed_lazy reader_time (lazy (
let lazy source = source in
let config = Mconfig.normalize config in
Mocaml.setup_reader_config config;
let result = Mreader.parse ?for_completion config source in
result, config
)) in
let ppx = timed_lazy ppx_time (lazy (
let lazy ({Mreader.parsetree; _}, config) = reader in
let caught = ref [] in
Msupport.catch_errors Mconfig.(config.ocaml.warnings) caught @@ fun () ->
let parsetree = Mppx.rewrite config parsetree in
{ Ppx. config; parsetree; errors = !caught }
)) in
let typer = timed_lazy typer_time (lazy (
let lazy { Ppx. config; parsetree; _ } = ppx in
Mocaml.setup_typer_config config;
let result = Mtyper.run config parsetree in
let errors = timed_lazy error_time (lazy (Mtyper.get_errors result)) in
{ Typer. errors; result }
)) in
{ config; state; raw_source; source; reader; ppx; typer;
pp_time; reader_time; ppx_time; typer_time; error_time }
let make config source =
process (Mconfig.normalize config) source
let for_completion position
{config; state; raw_source;
pp_time; reader_time; ppx_time; typer_time; error_time; _} =
process config raw_source ~for_completion:position
~state ~pp_time ~reader_time ~ppx_time ~typer_time ~error_time
let timing_information t = [
"pp" , !(t.pp_time);
"reader" , !(t.reader_time);
"ppx" , !(t.ppx_time);
"typer" , !(t.typer_time);
"error" , !(t.error_time);
]
| |
logs_cli.ml |
open Cmdliner
let strf = Format.asprintf
let level ?env ?docs () =
let vopts =
let doc = "Increase verbosity. Repeatable, but more than twice does
not bring more."
in
Arg.(value & flag_all & info ["v"; "verbose"] ~doc ?docs)
in
let verbosity =
let enum =
[ "warning", None; (* Hack for the option's absent rendering *)
"quiet", Some None;
"error", Some (Some Logs.Error);
"warning", Some (Some Logs.Warning);
"info", Some (Some Logs.Info);
"debug", Some (Some Logs.Debug); ]
in
let log_level = Arg.enum enum in
let enum_alts = Arg.doc_alts_enum List.(tl enum) in
let doc = strf "Be more or less verbose. $(docv) must be %s. Takes over
$(b,-v)." enum_alts
in
Arg.(value & opt log_level None &
info ["verbosity"] ?env ~docv:"LEVEL" ~doc ?docs)
in
let quiet =
let doc = "Be quiet. Takes over $(b,-v) and $(b,--verbosity)." in
Arg.(value & flag & info ["q"; "quiet"] ~doc ?docs)
in
let choose quiet verbosity vopts =
if quiet then None else match verbosity with
| Some verbosity -> verbosity
| None ->
match List.length vopts with
| 0 -> Some Logs.Warning
| 1 -> Some Logs.Info
| n -> Some Logs.Debug
in
Term.(const choose $ quiet $ verbosity $ vopts)
(*---------------------------------------------------------------------------
Copyright (c) 2015 The logs programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| (*---------------------------------------------------------------------------
Copyright (c) 2015 The logs programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
logs v0.7.0+dune2
---------------------------------------------------------------------------*) |
dune | ||
state_hash.ml | let random_state_hash = "\076\064\204" (* rng(53): never used... *)
include Blake2B.Make
(Base58)
(struct
let name = "random"
let title = "A random generation state"
let b58check_prefix = random_state_hash
let size = None
end)
let () = Base58.check_encoded_prefix b58check_encoding "rng" 53
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
testVariant.mli | (** Parse in-test variant info *)
module Make : functor
(Var:sig
module Opt:sig
include ParseTag.Opt
val compare : t -> t -> int
end
val info : MiscParser.info
val precision : Precision.t
val variant : Opt.t -> bool
val set_precision : Precision.t ref -> Opt.t -> bool
end) ->
sig
type t = Var.Opt.t
val precision : Precision.t
val variant : t -> bool
end
| (****************************************************************************)
(* the diy toolsuite *)
(* *)
(* Jade Alglave, University College London, UK. *)
(* Luc Maranget, INRIA Paris-Rocquencourt, France. *)
(* *)
(* Copyright 2020-present Institut National de Recherche en Informatique et *)
(* en Automatique and the authors. All rights reserved. *)
(* *)
(* This software is governed by the CeCILL-B license under French law and *)
(* abiding by the rules of distribution of free software. You can use, *)
(* modify and/ or redistribute the software under the terms of the CeCILL-B *)
(* license as circulated by CEA, CNRS and INRIA at the following URL *)
(* "http://www.cecill.info". We also give a copy in LICENSE.txt. *)
(****************************************************************************)
|
dune |
(library
(name vlib)
(virtual_modules vmod)) | |
omake_value_util.mli | (* module type PosSig = *)
(* sig *)
(* val loc_exp_pos : Lm_location.t -> Omake_value_type.pos *)
(* val loc_pos : *)
(* Lm_location.t -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val ast_exp_pos : Omake_ast.exp -> Omake_value_type.pos *)
(* val ir_exp_pos : Omake_ir.exp -> Omake_value_type.pos *)
(* val var_exp_pos : Omake_ir.var -> Omake_value_type.pos *)
(* val string_exp_pos : string -> Omake_value_type.pos *)
(* val value_exp_pos : Omake_value_type.value -> Omake_value_type.pos *)
(* val string_pos : string -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val pos_pos : *)
(* Omake_value_type.pos -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val int_pos : int -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val var_pos : *)
(* Omake_ir.var -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val error_pos : *)
(* Omake_value_type.omake_error -> *)
(* Omake_value_type.pos -> Omake_value_type.pos *)
(* val del_pos : *)
(* (Format.formatter -> unit) -> Lm_location.t -> Omake_value_type.pos *)
(* val del_exp_pos : *)
(* (Format.formatter -> unit) -> *)
(* Omake_value_type.pos -> Omake_value_type.pos *)
(* val loc_of_pos : Omake_value_type.pos -> Lm_location.t *)
(* val pp_print_pos : Format.formatter -> Omake_value_type.pos -> unit *)
(* end *)
val empty_obj : 'a Lm_symbol.SymbolTable.t
val class_sym : Lm_symbol.t
val venv_get_class :
Omake_value_type.t Lm_symbol.SymbolTable.t ->
Omake_value_type.obj Lm_symbol.SymbolTable.t
module ValueCompare :
sig
type t = Omake_value_type.t
val check_simple : Omake_value_type.pos -> Omake_value_type.t -> unit
val check :
Omake_value_type.pos ->
Omake_value_type.t -> Omake_value_type.t
val tag : Omake_value_type.t -> int
val compare : Omake_value_type.t -> Omake_value_type.t -> int
val compare_list :
Omake_value_type.t list -> Omake_value_type.t list -> int
end
module ValueTable :
sig
type key = ValueCompare.t
type 'a t = (ValueCompare.t, 'a) Lm_map.tree
val empty : 'a t
val is_empty : 'a t -> bool
val cardinal : 'a t -> int
val add : 'a t -> key -> 'a -> 'a t
val find : 'a t -> key -> 'a
val remove : 'a t -> key -> 'a t
val mem : 'a t -> key -> bool
val find_key : 'a t -> key -> key option
val iter : (key -> 'a -> unit) -> 'a t -> unit
val map : ('a -> 'b) -> 'a t -> 'b t
val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t
val fold : ('a -> key -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold_map : ('a -> key -> 'b -> 'a * 'c) -> 'a -> 'b t -> 'a * 'c t
val forall2 : ('a -> 'b -> bool) -> 'a t -> 'b t -> bool
val forall : (key -> 'a -> bool) -> 'a t -> bool
val exists : (key -> 'a -> bool) -> 'a t -> bool
val find_iter : (key -> 'a -> 'b option) -> 'a t -> 'b option
val isect_mem : 'a t -> (key -> bool) -> 'a t
val choose : 'a t -> key * 'a
val filter_add : 'a t -> key -> ('a option -> 'a) -> 'a t
val filter_remove : 'a t -> key -> ('a -> 'a option) -> 'a t
val replace : 'a t -> key -> ('a -> 'a) -> 'a t
val keys : 'a t -> key list
val data : 'a t -> 'a list
val add_list : 'a t -> (key * 'a) list -> 'a t
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val union : (key -> 'a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t
end
| (* module type PosSig = *)
(* sig *)
(* val loc_exp_pos : Lm_location.t -> Omake_value_type.pos *)
(* val loc_pos : *)
(* Lm_location.t -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val ast_exp_pos : Omake_ast.exp -> Omake_value_type.pos *)
(* val ir_exp_pos : Omake_ir.exp -> Omake_value_type.pos *)
(* val var_exp_pos : Omake_ir.var -> Omake_value_type.pos *)
(* val string_exp_pos : string -> Omake_value_type.pos *)
(* val value_exp_pos : Omake_value_type.value -> Omake_value_type.pos *)
(* val string_pos : string -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val pos_pos : *)
(* Omake_value_type.pos -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val int_pos : int -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val var_pos : *)
(* Omake_ir.var -> Omake_value_type.pos -> Omake_value_type.pos *)
(* val error_pos : *)
(* Omake_value_type.omake_error -> *)
(* Omake_value_type.pos -> Omake_value_type.pos *)
(* val del_pos : *)
(* (Format.formatter -> unit) -> Lm_location.t -> Omake_value_type.pos *)
(* val del_exp_pos : *)
(* (Format.formatter -> unit) -> *)
(* Omake_value_type.pos -> Omake_value_type.pos *)
(* val loc_of_pos : Omake_value_type.pos -> Lm_location.t *)
(* val pp_print_pos : Format.formatter -> Omake_value_type.pos -> unit *)
(* end *)
|
atomic_write_intf.ml |
module type S = sig
include Irmin.ATOMIC_WRITE_STORE
val flush : t -> unit
val clear_keep_generation : t -> unit Lwt.t
end
module type Persistent = sig
include S
val v : ?fresh:bool -> ?readonly:bool -> string -> t Lwt.t
end
module type Sigs = sig
module type S = S
module type Persistent = Persistent
module Make_persistent (_ : Version.S) (K : Irmin.Type.S) (V : Irmin.Hash.S) :
Persistent with type key = K.t and type value = V.t
module Closeable (AW : S) : sig
include
S
with type key = AW.key
and type value = AW.value
and type watch = AW.watch
val make_closeable : AW.t -> t
end
end
| (*
* Copyright (c) 2018-2021 Tarides <[email protected]>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*) |
amendment.ml | open Alpha_context
(** Returns the proposal submitted by the most delegates.
Returns None in case of a tie, if proposal quorum is below required
minimum or if there are no proposals. *)
let select_winning_proposal ctxt =
Vote.get_proposals ctxt
>>=? fun proposals ->
let merge proposal vote winners =
match winners with
| None ->
Some ([proposal], vote)
| Some (winners, winners_vote) as previous ->
if Compare.Int32.(vote = winners_vote) then
Some (proposal :: winners, winners_vote)
else if Compare.Int32.(vote > winners_vote) then Some ([proposal], vote)
else previous
in
match Protocol_hash.Map.fold merge proposals None with
| Some ([proposal], vote) ->
Vote.listing_size ctxt
>>=? fun max_vote ->
let min_proposal_quorum = Constants.min_proposal_quorum ctxt in
let min_vote_to_pass =
Int32.div (Int32.mul min_proposal_quorum max_vote) 100_00l
in
if Compare.Int32.(vote >= min_vote_to_pass) then return_some proposal
else return_none
| _ ->
return_none
(* in case of a tie, let's do nothing. *)
(** A proposal is approved if it has supermajority and the participation reaches
the current quorum.
Supermajority means the yays are more 8/10 of casted votes.
The participation is the ratio of all received votes, including passes, with
respect to the number of possible votes.
The participation EMA (exponential moving average) uses the last
participation EMA and the current participation./
The expected quorum is calculated using the last participation EMA, capped
by the min/max quorum protocol constants. *)
let check_approval_and_update_participation_ema ctxt =
Vote.get_ballots ctxt
>>=? fun ballots ->
Vote.listing_size ctxt
>>=? fun maximum_vote ->
Vote.get_participation_ema ctxt
>>=? fun participation_ema ->
Vote.get_current_quorum ctxt
>>=? fun expected_quorum ->
(* Note overflows: considering a maximum of 8e8 tokens, with roll size as
small as 1e3, there is a maximum of 8e5 rolls and thus votes.
In 'participation' an Int64 is used because in the worst case 'all_votes is
8e5 and after the multiplication is 8e9, making it potentially overflow a
signed Int32 which is 2e9. *)
let casted_votes = Int32.add ballots.yay ballots.nay in
let all_votes = Int32.add casted_votes ballots.pass in
let supermajority = Int32.div (Int32.mul 8l casted_votes) 10l in
let participation =
(* in centile of percentage *)
Int64.(
to_int32 (div (mul (of_int32 all_votes) 100_00L) (of_int32 maximum_vote)))
in
let outcome =
Compare.Int32.(
participation >= expected_quorum && ballots.yay >= supermajority)
in
let new_participation_ema =
Int32.(div (add (mul 8l participation_ema) (mul 2l participation)) 10l)
in
Vote.set_participation_ema ctxt new_participation_ema
>>=? fun ctxt -> return (ctxt, outcome)
(** Implements the state machine of the amendment procedure.
Note that [freeze_listings], that computes the vote weight of each delegate,
is run at the beginning of each voting period.
*)
let start_new_voting_period ctxt =
Vote.get_current_period_kind ctxt
>>=? function
| Proposal -> (
select_winning_proposal ctxt
>>=? fun proposal ->
Vote.clear_proposals ctxt
>>= fun ctxt ->
Vote.clear_listings ctxt
>>=? fun ctxt ->
match proposal with
| None ->
Vote.freeze_listings ctxt >>=? fun ctxt -> return ctxt
| Some proposal ->
Vote.init_current_proposal ctxt proposal
>>=? fun ctxt ->
Vote.freeze_listings ctxt
>>=? fun ctxt ->
Vote.set_current_period_kind ctxt Testing_vote
>>=? fun ctxt -> return ctxt )
| Testing_vote ->
check_approval_and_update_participation_ema ctxt
>>=? fun (ctxt, approved) ->
Vote.clear_ballots ctxt
>>= fun ctxt ->
Vote.clear_listings ctxt
>>=? fun ctxt ->
if approved then
let expiration =
(* in two days maximum... *)
Time.add
(Timestamp.current ctxt)
(Constants.test_chain_duration ctxt)
in
Vote.get_current_proposal ctxt
>>=? fun proposal ->
fork_test_chain ctxt proposal expiration
>>= fun ctxt ->
Vote.set_current_period_kind ctxt Testing >>=? fun ctxt -> return ctxt
else
Vote.clear_current_proposal ctxt
>>=? fun ctxt ->
Vote.freeze_listings ctxt
>>=? fun ctxt ->
Vote.set_current_period_kind ctxt Proposal >>=? fun ctxt -> return ctxt
| Testing ->
Vote.freeze_listings ctxt
>>=? fun ctxt ->
Vote.set_current_period_kind ctxt Promotion_vote
>>=? fun ctxt -> return ctxt
| Promotion_vote ->
check_approval_and_update_participation_ema ctxt
>>=? fun (ctxt, approved) ->
( if approved then
Vote.get_current_proposal ctxt
>>=? fun proposal -> activate ctxt proposal >>= fun ctxt -> return ctxt
else return ctxt )
>>=? fun ctxt ->
Vote.clear_ballots ctxt
>>= fun ctxt ->
Vote.clear_listings ctxt
>>=? fun ctxt ->
Vote.clear_current_proposal ctxt
>>=? fun ctxt ->
Vote.freeze_listings ctxt
>>=? fun ctxt ->
Vote.set_current_period_kind ctxt Proposal >>=? fun ctxt -> return ctxt
type error +=
| (* `Branch *)
Invalid_proposal
| Unexpected_proposal
| Unauthorized_proposal
| Too_many_proposals
| Empty_proposal
| Unexpected_ballot
| Unauthorized_ballot
let () =
let open Data_encoding in
(* Invalid proposal *)
register_error_kind
`Branch
~id:"invalid_proposal"
~title:"Invalid proposal"
~description:"Ballot provided for a proposal that is not the current one."
~pp:(fun ppf () -> Format.fprintf ppf "Invalid proposal")
empty
(function Invalid_proposal -> Some () | _ -> None)
(fun () -> Invalid_proposal) ;
(* Unexpected proposal *)
register_error_kind
`Branch
~id:"unexpected_proposal"
~title:"Unexpected proposal"
~description:"Proposal recorded outside of a proposal period."
~pp:(fun ppf () -> Format.fprintf ppf "Unexpected proposal")
empty
(function Unexpected_proposal -> Some () | _ -> None)
(fun () -> Unexpected_proposal) ;
(* Unauthorized proposal *)
register_error_kind
`Branch
~id:"unauthorized_proposal"
~title:"Unauthorized proposal"
~description:
"The delegate provided for the proposal is not in the voting listings."
~pp:(fun ppf () -> Format.fprintf ppf "Unauthorized proposal")
empty
(function Unauthorized_proposal -> Some () | _ -> None)
(fun () -> Unauthorized_proposal) ;
(* Unexpected ballot *)
register_error_kind
`Branch
~id:"unexpected_ballot"
~title:"Unexpected ballot"
~description:"Ballot recorded outside of a voting period."
~pp:(fun ppf () -> Format.fprintf ppf "Unexpected ballot")
empty
(function Unexpected_ballot -> Some () | _ -> None)
(fun () -> Unexpected_ballot) ;
(* Unauthorized ballot *)
register_error_kind
`Branch
~id:"unauthorized_ballot"
~title:"Unauthorized ballot"
~description:
"The delegate provided for the ballot is not in the voting listings."
~pp:(fun ppf () -> Format.fprintf ppf "Unauthorized ballot")
empty
(function Unauthorized_ballot -> Some () | _ -> None)
(fun () -> Unauthorized_ballot) ;
(* Too many proposals *)
register_error_kind
`Branch
~id:"too_many_proposals"
~title:"Too many proposals"
~description:
"The delegate reached the maximum number of allowed proposals."
~pp:(fun ppf () -> Format.fprintf ppf "Too many proposals")
empty
(function Too_many_proposals -> Some () | _ -> None)
(fun () -> Too_many_proposals) ;
(* Empty proposal *)
register_error_kind
`Branch
~id:"empty_proposal"
~title:"Empty proposal"
~description:"Proposal lists cannot be empty."
~pp:(fun ppf () -> Format.fprintf ppf "Empty proposal")
empty
(function Empty_proposal -> Some () | _ -> None)
(fun () -> Empty_proposal)
(* @return [true] if [List.length l] > [n] w/o computing length *)
let rec longer_than l n =
if Compare.Int.(n < 0) then assert false
else
match l with
| [] ->
false
| _ :: rest ->
if Compare.Int.(n = 0) then true
else (* n > 0 *)
longer_than rest (n - 1)
let record_proposals ctxt delegate proposals =
(match proposals with [] -> fail Empty_proposal | _ :: _ -> return_unit)
>>=? fun () ->
Vote.get_current_period_kind ctxt
>>=? function
| Proposal ->
Vote.in_listings ctxt delegate
>>= fun in_listings ->
if in_listings then
Vote.recorded_proposal_count_for_delegate ctxt delegate
>>=? fun count ->
fail_when
(longer_than proposals (Constants.max_proposals_per_delegate - count))
Too_many_proposals
>>=? fun () ->
fold_left_s
(fun ctxt proposal -> Vote.record_proposal ctxt proposal delegate)
ctxt
proposals
>>=? fun ctxt -> return ctxt
else fail Unauthorized_proposal
| Testing_vote | Testing | Promotion_vote ->
fail Unexpected_proposal
let record_ballot ctxt delegate proposal ballot =
Vote.get_current_period_kind ctxt
>>=? function
| Testing_vote | Promotion_vote ->
Vote.get_current_proposal ctxt
>>=? fun current_proposal ->
fail_unless
(Protocol_hash.equal proposal current_proposal)
Invalid_proposal
>>=? fun () ->
Vote.has_recorded_ballot ctxt delegate
>>= fun has_ballot ->
fail_when has_ballot Unauthorized_ballot
>>=? fun () ->
Vote.in_listings ctxt delegate
>>= fun in_listings ->
if in_listings then Vote.record_ballot ctxt delegate ballot
else fail Unauthorized_ballot
| Testing | Proposal ->
fail Unexpected_ballot
let last_of_a_voting_period ctxt l =
Compare.Int32.(
Int32.succ l.Level.voting_period_position
= Constants.blocks_per_voting_period ctxt)
let may_start_new_voting_period ctxt =
let level = Level.current ctxt in
if last_of_a_voting_period ctxt level then start_new_voting_period ctxt
else return ctxt
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
alpha_services.mli | open Alpha_context
module Seed : sig
val get: 'a #RPC_context.simple -> 'a -> Seed.seed shell_tzresult Lwt.t
end
module Nonce : sig
type info =
| Revealed of Nonce.t
| Missing of Nonce_hash.t
| Forgotten
val get:
'a #RPC_context.simple ->
'a -> Raw_level.t -> info shell_tzresult Lwt.t
end
module Contract = Contract_services
module Constants = Constants_services
module Delegate = Delegate_services
module Helpers = Helpers_services
module Forge = Helpers_services.Forge
module Parse = Helpers_services.Parse
val register: unit -> unit
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
GeneralMatrixMatrixTriangular.h |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2009-2010 Gael Guennebaud <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_GENERAL_MATRIX_MATRIX_TRIANGULAR_H
#define EIGEN_GENERAL_MATRIX_MATRIX_TRIANGULAR_H
namespace Eigen {
template<typename Scalar, typename Index, int StorageOrder, int UpLo, bool ConjLhs, bool ConjRhs>
struct selfadjoint_rank1_update;
namespace internal {
/**********************************************************************
* This file implements a general A * B product while
* evaluating only one triangular part of the product.
* This is a more general version of self adjoint product (C += A A^T)
* as the level 3 SYRK Blas routine.
**********************************************************************/
// forward declarations (defined at the end of this file)
template<typename LhsScalar, typename RhsScalar, typename Index, int mr, int nr, bool ConjLhs, bool ConjRhs, int UpLo>
struct tribb_kernel;
/* Optimized matrix-matrix product evaluating only one triangular half */
template <typename Index,
typename LhsScalar, int LhsStorageOrder, bool ConjugateLhs,
typename RhsScalar, int RhsStorageOrder, bool ConjugateRhs,
int ResStorageOrder, int UpLo, int Version = Specialized>
struct general_matrix_matrix_triangular_product;
// as usual if the result is row major => we transpose the product
template <typename Index, typename LhsScalar, int LhsStorageOrder, bool ConjugateLhs,
typename RhsScalar, int RhsStorageOrder, bool ConjugateRhs, int UpLo, int Version>
struct general_matrix_matrix_triangular_product<Index,LhsScalar,LhsStorageOrder,ConjugateLhs,RhsScalar,RhsStorageOrder,ConjugateRhs,RowMajor,UpLo,Version>
{
typedef typename ScalarBinaryOpTraits<LhsScalar, RhsScalar>::ReturnType ResScalar;
static EIGEN_STRONG_INLINE void run(Index size, Index depth,const LhsScalar* lhs, Index lhsStride,
const RhsScalar* rhs, Index rhsStride, ResScalar* res, Index resStride,
const ResScalar& alpha, level3_blocking<RhsScalar,LhsScalar>& blocking)
{
general_matrix_matrix_triangular_product<Index,
RhsScalar, RhsStorageOrder==RowMajor ? ColMajor : RowMajor, ConjugateRhs,
LhsScalar, LhsStorageOrder==RowMajor ? ColMajor : RowMajor, ConjugateLhs,
ColMajor, UpLo==Lower?Upper:Lower>
::run(size,depth,rhs,rhsStride,lhs,lhsStride,res,resStride,alpha,blocking);
}
};
template <typename Index, typename LhsScalar, int LhsStorageOrder, bool ConjugateLhs,
typename RhsScalar, int RhsStorageOrder, bool ConjugateRhs, int UpLo, int Version>
struct general_matrix_matrix_triangular_product<Index,LhsScalar,LhsStorageOrder,ConjugateLhs,RhsScalar,RhsStorageOrder,ConjugateRhs,ColMajor,UpLo,Version>
{
typedef typename ScalarBinaryOpTraits<LhsScalar, RhsScalar>::ReturnType ResScalar;
static EIGEN_STRONG_INLINE void run(Index size, Index depth,const LhsScalar* _lhs, Index lhsStride,
const RhsScalar* _rhs, Index rhsStride, ResScalar* _res, Index resStride,
const ResScalar& alpha, level3_blocking<LhsScalar,RhsScalar>& blocking)
{
typedef gebp_traits<LhsScalar,RhsScalar> Traits;
typedef const_blas_data_mapper<LhsScalar, Index, LhsStorageOrder> LhsMapper;
typedef const_blas_data_mapper<RhsScalar, Index, RhsStorageOrder> RhsMapper;
typedef blas_data_mapper<typename Traits::ResScalar, Index, ColMajor> ResMapper;
LhsMapper lhs(_lhs,lhsStride);
RhsMapper rhs(_rhs,rhsStride);
ResMapper res(_res, resStride);
Index kc = blocking.kc();
Index mc = (std::min)(size,blocking.mc());
// !!! mc must be a multiple of nr:
if(mc > Traits::nr)
mc = (mc/Traits::nr)*Traits::nr;
std::size_t sizeA = kc*mc;
std::size_t sizeB = kc*size;
ei_declare_aligned_stack_constructed_variable(LhsScalar, blockA, sizeA, blocking.blockA());
ei_declare_aligned_stack_constructed_variable(RhsScalar, blockB, sizeB, blocking.blockB());
gemm_pack_lhs<LhsScalar, Index, LhsMapper, Traits::mr, Traits::LhsProgress, LhsStorageOrder> pack_lhs;
gemm_pack_rhs<RhsScalar, Index, RhsMapper, Traits::nr, RhsStorageOrder> pack_rhs;
gebp_kernel<LhsScalar, RhsScalar, Index, ResMapper, Traits::mr, Traits::nr, ConjugateLhs, ConjugateRhs> gebp;
tribb_kernel<LhsScalar, RhsScalar, Index, Traits::mr, Traits::nr, ConjugateLhs, ConjugateRhs, UpLo> sybb;
for(Index k2=0; k2<depth; k2+=kc)
{
const Index actual_kc = (std::min)(k2+kc,depth)-k2;
// note that the actual rhs is the transpose/adjoint of mat
pack_rhs(blockB, rhs.getSubMapper(k2,0), actual_kc, size);
for(Index i2=0; i2<size; i2+=mc)
{
const Index actual_mc = (std::min)(i2+mc,size)-i2;
pack_lhs(blockA, lhs.getSubMapper(i2, k2), actual_kc, actual_mc);
// the selected actual_mc * size panel of res is split into three different part:
// 1 - before the diagonal => processed with gebp or skipped
// 2 - the actual_mc x actual_mc symmetric block => processed with a special kernel
// 3 - after the diagonal => processed with gebp or skipped
if (UpLo==Lower)
gebp(res.getSubMapper(i2, 0), blockA, blockB, actual_mc, actual_kc,
(std::min)(size,i2), alpha, -1, -1, 0, 0);
sybb(_res+resStride*i2 + i2, resStride, blockA, blockB + actual_kc*i2, actual_mc, actual_kc, alpha);
if (UpLo==Upper)
{
Index j2 = i2+actual_mc;
gebp(res.getSubMapper(i2, j2), blockA, blockB+actual_kc*j2, actual_mc,
actual_kc, (std::max)(Index(0), size-j2), alpha, -1, -1, 0, 0);
}
}
}
}
};
// Optimized packed Block * packed Block product kernel evaluating only one given triangular part
// This kernel is built on top of the gebp kernel:
// - the current destination block is processed per panel of actual_mc x BlockSize
// where BlockSize is set to the minimal value allowing gebp to be as fast as possible
// - then, as usual, each panel is split into three parts along the diagonal,
// the sub blocks above and below the diagonal are processed as usual,
// while the triangular block overlapping the diagonal is evaluated into a
// small temporary buffer which is then accumulated into the result using a
// triangular traversal.
template<typename LhsScalar, typename RhsScalar, typename Index, int mr, int nr, bool ConjLhs, bool ConjRhs, int UpLo>
struct tribb_kernel
{
typedef gebp_traits<LhsScalar,RhsScalar,ConjLhs,ConjRhs> Traits;
typedef typename Traits::ResScalar ResScalar;
enum {
BlockSize = meta_least_common_multiple<EIGEN_PLAIN_ENUM_MAX(mr,nr),EIGEN_PLAIN_ENUM_MIN(mr,nr)>::ret
};
void operator()(ResScalar* _res, Index resStride, const LhsScalar* blockA, const RhsScalar* blockB, Index size, Index depth, const ResScalar& alpha)
{
typedef blas_data_mapper<ResScalar, Index, ColMajor> ResMapper;
ResMapper res(_res, resStride);
gebp_kernel<LhsScalar, RhsScalar, Index, ResMapper, mr, nr, ConjLhs, ConjRhs> gebp_kernel;
Matrix<ResScalar,BlockSize,BlockSize,ColMajor> buffer((internal::constructor_without_unaligned_array_assert()));
// let's process the block per panel of actual_mc x BlockSize,
// again, each is split into three parts, etc.
for (Index j=0; j<size; j+=BlockSize)
{
Index actualBlockSize = std::min<Index>(BlockSize,size - j);
const RhsScalar* actual_b = blockB+j*depth;
if(UpLo==Upper)
gebp_kernel(res.getSubMapper(0, j), blockA, actual_b, j, depth, actualBlockSize, alpha,
-1, -1, 0, 0);
// selfadjoint micro block
{
Index i = j;
buffer.setZero();
// 1 - apply the kernel on the temporary buffer
gebp_kernel(ResMapper(buffer.data(), BlockSize), blockA+depth*i, actual_b, actualBlockSize, depth, actualBlockSize, alpha,
-1, -1, 0, 0);
// 2 - triangular accumulation
for(Index j1=0; j1<actualBlockSize; ++j1)
{
ResScalar* r = &res(i, j + j1);
for(Index i1=UpLo==Lower ? j1 : 0;
UpLo==Lower ? i1<actualBlockSize : i1<=j1; ++i1)
r[i1] += buffer(i1,j1);
}
}
if(UpLo==Lower)
{
Index i = j+actualBlockSize;
gebp_kernel(res.getSubMapper(i, j), blockA+depth*i, actual_b, size-i,
depth, actualBlockSize, alpha, -1, -1, 0, 0);
}
}
}
};
} // end namespace internal
// high level API
template<typename MatrixType, typename ProductType, int UpLo, bool IsOuterProduct>
struct general_product_to_triangular_selector;
template<typename MatrixType, typename ProductType, int UpLo>
struct general_product_to_triangular_selector<MatrixType,ProductType,UpLo,true>
{
static void run(MatrixType& mat, const ProductType& prod, const typename MatrixType::Scalar& alpha, bool beta)
{
typedef typename MatrixType::Scalar Scalar;
typedef typename internal::remove_all<typename ProductType::LhsNested>::type Lhs;
typedef internal::blas_traits<Lhs> LhsBlasTraits;
typedef typename LhsBlasTraits::DirectLinearAccessType ActualLhs;
typedef typename internal::remove_all<ActualLhs>::type _ActualLhs;
typename internal::add_const_on_value_type<ActualLhs>::type actualLhs = LhsBlasTraits::extract(prod.lhs());
typedef typename internal::remove_all<typename ProductType::RhsNested>::type Rhs;
typedef internal::blas_traits<Rhs> RhsBlasTraits;
typedef typename RhsBlasTraits::DirectLinearAccessType ActualRhs;
typedef typename internal::remove_all<ActualRhs>::type _ActualRhs;
typename internal::add_const_on_value_type<ActualRhs>::type actualRhs = RhsBlasTraits::extract(prod.rhs());
Scalar actualAlpha = alpha * LhsBlasTraits::extractScalarFactor(prod.lhs().derived()) * RhsBlasTraits::extractScalarFactor(prod.rhs().derived());
if(!beta)
mat.template triangularView<UpLo>().setZero();
enum {
StorageOrder = (internal::traits<MatrixType>::Flags&RowMajorBit) ? RowMajor : ColMajor,
UseLhsDirectly = _ActualLhs::InnerStrideAtCompileTime==1,
UseRhsDirectly = _ActualRhs::InnerStrideAtCompileTime==1
};
internal::gemv_static_vector_if<Scalar,Lhs::SizeAtCompileTime,Lhs::MaxSizeAtCompileTime,!UseLhsDirectly> static_lhs;
ei_declare_aligned_stack_constructed_variable(Scalar, actualLhsPtr, actualLhs.size(),
(UseLhsDirectly ? const_cast<Scalar*>(actualLhs.data()) : static_lhs.data()));
if(!UseLhsDirectly) Map<typename _ActualLhs::PlainObject>(actualLhsPtr, actualLhs.size()) = actualLhs;
internal::gemv_static_vector_if<Scalar,Rhs::SizeAtCompileTime,Rhs::MaxSizeAtCompileTime,!UseRhsDirectly> static_rhs;
ei_declare_aligned_stack_constructed_variable(Scalar, actualRhsPtr, actualRhs.size(),
(UseRhsDirectly ? const_cast<Scalar*>(actualRhs.data()) : static_rhs.data()));
if(!UseRhsDirectly) Map<typename _ActualRhs::PlainObject>(actualRhsPtr, actualRhs.size()) = actualRhs;
selfadjoint_rank1_update<Scalar,Index,StorageOrder,UpLo,
LhsBlasTraits::NeedToConjugate && NumTraits<Scalar>::IsComplex,
RhsBlasTraits::NeedToConjugate && NumTraits<Scalar>::IsComplex>
::run(actualLhs.size(), mat.data(), mat.outerStride(), actualLhsPtr, actualRhsPtr, actualAlpha);
}
};
template<typename MatrixType, typename ProductType, int UpLo>
struct general_product_to_triangular_selector<MatrixType,ProductType,UpLo,false>
{
static void run(MatrixType& mat, const ProductType& prod, const typename MatrixType::Scalar& alpha, bool beta)
{
typedef typename internal::remove_all<typename ProductType::LhsNested>::type Lhs;
typedef internal::blas_traits<Lhs> LhsBlasTraits;
typedef typename LhsBlasTraits::DirectLinearAccessType ActualLhs;
typedef typename internal::remove_all<ActualLhs>::type _ActualLhs;
typename internal::add_const_on_value_type<ActualLhs>::type actualLhs = LhsBlasTraits::extract(prod.lhs());
typedef typename internal::remove_all<typename ProductType::RhsNested>::type Rhs;
typedef internal::blas_traits<Rhs> RhsBlasTraits;
typedef typename RhsBlasTraits::DirectLinearAccessType ActualRhs;
typedef typename internal::remove_all<ActualRhs>::type _ActualRhs;
typename internal::add_const_on_value_type<ActualRhs>::type actualRhs = RhsBlasTraits::extract(prod.rhs());
typename ProductType::Scalar actualAlpha = alpha * LhsBlasTraits::extractScalarFactor(prod.lhs().derived()) * RhsBlasTraits::extractScalarFactor(prod.rhs().derived());
if(!beta)
mat.template triangularView<UpLo>().setZero();
enum {
IsRowMajor = (internal::traits<MatrixType>::Flags&RowMajorBit) ? 1 : 0,
LhsIsRowMajor = _ActualLhs::Flags&RowMajorBit ? 1 : 0,
RhsIsRowMajor = _ActualRhs::Flags&RowMajorBit ? 1 : 0,
SkipDiag = (UpLo&(UnitDiag|ZeroDiag))!=0
};
Index size = mat.cols();
if(SkipDiag)
size--;
Index depth = actualLhs.cols();
typedef internal::gemm_blocking_space<IsRowMajor ? RowMajor : ColMajor,typename Lhs::Scalar,typename Rhs::Scalar,
MatrixType::MaxColsAtCompileTime, MatrixType::MaxColsAtCompileTime, _ActualRhs::MaxColsAtCompileTime> BlockingType;
BlockingType blocking(size, size, depth, 1, false);
internal::general_matrix_matrix_triangular_product<Index,
typename Lhs::Scalar, LhsIsRowMajor ? RowMajor : ColMajor, LhsBlasTraits::NeedToConjugate,
typename Rhs::Scalar, RhsIsRowMajor ? RowMajor : ColMajor, RhsBlasTraits::NeedToConjugate,
IsRowMajor ? RowMajor : ColMajor, UpLo&(Lower|Upper)>
::run(size, depth,
&actualLhs.coeffRef(SkipDiag&&(UpLo&Lower)==Lower ? 1 : 0,0), actualLhs.outerStride(),
&actualRhs.coeffRef(0,SkipDiag&&(UpLo&Upper)==Upper ? 1 : 0), actualRhs.outerStride(),
mat.data() + (SkipDiag ? (bool(IsRowMajor) != ((UpLo&Lower)==Lower) ? 1 : mat.outerStride() ) : 0), mat.outerStride(), actualAlpha, blocking);
}
};
template<typename MatrixType, unsigned int UpLo>
template<typename ProductType>
TriangularView<MatrixType,UpLo>& TriangularViewImpl<MatrixType,UpLo,Dense>::_assignProduct(const ProductType& prod, const Scalar& alpha, bool beta)
{
EIGEN_STATIC_ASSERT((UpLo&UnitDiag)==0, WRITING_TO_TRIANGULAR_PART_WITH_UNIT_DIAGONAL_IS_NOT_SUPPORTED);
eigen_assert(derived().nestedExpression().rows() == prod.rows() && derived().cols() == prod.cols());
general_product_to_triangular_selector<MatrixType, ProductType, UpLo, internal::traits<ProductType>::InnerSize==1>::run(derived().nestedExpression().const_cast_derived(), prod, alpha, beta);
return derived();
}
} // end namespace Eigen
#endif // EIGEN_GENERAL_MATRIX_MATRIX_TRIANGULAR_H
| |
term_printer.ml |
open Fmlib
open Common
open Term
module type GAMMA =
sig
type t
val is_valid_index: int -> t -> bool
val name_of_index: int -> t -> string
val push_local: string -> Term.typ -> t -> t
end
module Pretty (Gamma: GAMMA) (P: Pretty_printer.SIG) =
struct
open Gamma
type pr_result =
Operator.t option * P.t
type print0 = Term.t -> Gamma.t -> P.t
type print = Term.t -> Gamma.t -> pr_result
let pi_info (info: Pi_info.t): string * bool =
Pi_info.name info,
Pi_info.is_typed info
let rec split_pi
(t:Term.t)
(c:t)
: (string * bool * Term.typ * t) list * Term.t * t =
match t with
| Pi (tp, t, info) when not (Pi_info.is_arrow info) ->
let name, is_typed = pi_info info
in
let lst, t_inner, c_inner =
split_pi t (push_local name tp c)
in
(name, is_typed, tp, c) :: lst, t_inner, c_inner
| _ ->
[], t, c
let print_sort: Sort.t -> pr_result = function
| Proposition ->
None, P.string "Proposition"
| Any i ->
let str =
if i = 0 then
"Any"
else
"Any(" ^ string_of_int i ^ ")"
in
None,
P.string str
let print_value: Value.t -> pr_result = function
| Value.Int i ->
None,
P.string (string_of_int i)
| Value.Char i ->
None,
P.(char '\'' <+> char (Char.chr i) <+> char '\'')
| Value.String str ->
None,
P.(char '"' <+> string str <+> char '"')
| Value.Unary _ | Value.Binary _ ->
None,
P.(string "<function>")
let parenthesize
((lower,pr): Operator.t option * P.t)
(is_left: bool)
(upper: Operator.t)
: P.t
=
if Operator.needs_parens lower is_left upper then
P.(chain [char '('; pr; char ')'])
else
pr
let two_operands
(a: Term.t) (b:Term.t) (upper: Operator.t)
(print: print)
(c:t)
: P.t * P.t =
parenthesize (print a c) true upper,
parenthesize (print b c) false upper
let formal_argument
(name: string)
(typed: bool)
(tp: Term.typ)
(print: print0)
(c: Gamma.t)
: P.t
=
let open P in
if typed then
char '(' <+> string name <+> string ": " <+> print tp c <+> char ')'
else
string name
let print_definition
(name: string)
(exp: Term.t)
(raw_print: print0)
(c: Gamma.t)
: P.t
=
let open P in
let rec print exp c =
match exp with
| Lambda (tp, exp, info) ->
let name = Lambda_info.name info in
group space
<+> formal_argument
name
(Lambda_info.is_typed info)
tp
raw_print
c
<+> print exp (push_local name tp c)
| _ ->
(
match exp with
| Typed (exp, tp) ->
char ':'
<+> group space <+> raw_print tp c
<+> group space <+> string ":="
<+> group (
nest 4 (space <+> raw_print exp c)
)
| _ ->
group space <+> string ":="
<+> group (
nest 4 (space <+> raw_print exp c)
)
)
in
string name <+> print exp c
let rec print (t:Term.t) (c:Gamma.t): pr_result =
let raw_print t c =
snd (print t c)
in
let print_name_type name is_typed tp c =
let name = if name = "" then P.char '_' else P.string name
in
if is_typed then
P.(char '('
<+> name
<+> string ": "
<+> snd (print tp c)
<+> char ')')
else
name
in
match t with
| Sort s ->
print_sort s
| Value v ->
print_value v
| Variable i ->
None,
P.string
(if is_valid_index i c then
let name = name_of_index i c in
let len = String.length name in
assert (1 <= len);
let c0 = name.[0] in
if Operator.is_keyword_operator name then
"(" ^ name ^ ")"
else if Char.is_letter c0
|| c0 = '_'
|| (2 <= len && Char.is_digit name.[1])
|| name = "[]"
|| name = "()"
then
name
else
"(" ^ name ^ ")"
else
"<invalid " ^ string_of_int i ^ ">")
| Typed (e, tp) ->
let e_pr, tp_pr = two_operands e tp Operator.colon print c in
Some Operator.colon,
P.( group (
e_pr <+> char ':'
<+> nest 4
(space <+> tp_pr)
)
)
| Appl (f, operand2, Binary) ->
let rec find_operand1 f =
match f with
| Appl (f, operand1, Binary) ->
Some (f, operand1)
| Appl (f, _, Implicit ) ->
find_operand1 f
| _ ->
None
in
let rec find_operator f =
match f with
| Appl (f, _, Implicit) ->
find_operator f
| Variable i when is_valid_index i c ->
Some i
| _ ->
None
in
let res =
Option.(
find_operand1 f >>= fun (f, operand1) ->
find_operator f >>= fun operator ->
Some (operator, operand1))
in
(match res with
| None ->
print (Appl (f, operand2, Normal)) c
| Some (op_idx, operand1) ->
let op_string = name_of_index op_idx c in
let op_data = Operator.of_string op_string in
let a_pr, b_pr =
two_operands operand1 operand2 op_data print c
in
Some op_data,
P.(chain [a_pr;
group space;
string op_string;
char ' ';
b_pr])
)
| Appl (Variable i, arg, Unary) ->
let op_str = name_of_index i c in
assert (Operator.is_unary op_str);
let op_data = Operator.of_string op_str
in
Some op_data,
P.(
string op_str
<+> char ' '
<+>
parenthesize (print arg c) false op_data
)
| Appl (f, _, Implicit) ->
print f c
| Appl (f, a, _) ->
Some Operator.application,
P.( parenthesize (print f c) true Operator.application
<+> char ' '
<+> parenthesize (print a c) false Operator.application )
| Lambda _ as term ->
Some Operator.assign,
print_definition "\\" term raw_print c
| Pi (tp, rt, info)
when
Pi_info.is_arrow info
|| not (Term.has_variable 0 rt)
->
let c_inner = push_local "_" tp c
and op_data = Operator.of_string "->"
in
let tp_pr =
parenthesize (print tp c) true op_data
and rt_pr =
parenthesize (print rt c_inner) false op_data
in
Some op_data,
P.(chain [tp_pr;
group space;
string "->";
char ' ';
rt_pr])
| Pi (tp, t, info) ->
let nme, is_typed = pi_info info in
let lst, t_inner, c_inner =
split_pi t (push_local nme tp c) in
let lst = (nme, is_typed, tp, c) :: lst in
Some Operator.colon,
P.(
group (
string "all "
<+> nest_relative 0 (
list_separated
space
(List.map
(fun (nme, is_typed, tp, c) ->
print_name_type nme is_typed tp c
)
lst
)
<+> cut <+> string ": "
<+> raw_print t_inner c_inner
)
)
)
| Where (name, tp, exp, value) ->
let open P in
let rec print_where name tp exp defs c =
let c = push_local name tp c in
match exp with
| Where (name, tp, exp, value) ->
print_where
name
tp
exp
(print_definition name value raw_print c :: defs)
c
| _ ->
raw_print exp c, defs
in
let exp, defs =
print_where
name
tp
exp
[print_definition name value raw_print c] c
in
Some Operator.where,
exp <+> group space <+> string "where"
<+> group (
nest 4 (space <+> list_separated (line "; ") defs)
)
let print (t:Term.t) (c: Gamma.t): P.t =
snd (print t c)
end (* Pretty *)
module String_print (Gamma:GAMMA) =
struct
let string_of_term (t:Term.t) (c: Gamma.t): string =
let module PP = Pretty_printer.Pretty (String_printer) in
let module P = Pretty (Gamma) (PP) in
String_printer.run
(PP.run 0 70 70 (P.print t c))
end
let string_of_term (t:Term.t) (c: Gamma.t): string =
let module SP = String_print (Gamma) in
SP.string_of_term t c
| |
json.ml | include Yojson.Basic
exception ParsingFailure of string
let write_string (j : t) : string =
to_string j
let pretty_to_string (j : t) : string =
pretty_to_string j
let from_string (s : string) : t =
from_string s
let get (key : string) (json : t) : t option =
match json with
| `Assoc kv_lst ->
(match (List.assoc_opt key kv_lst) with
| Some v -> Some v
| None -> None)
| _ -> raise (ParsingFailure "Not an association list")
let to_list (json : t) : t list =
match json with
| `List j_lst -> j_lst
| _ -> raise (ParsingFailure "Not a list")
let to_int (json : t) : int =
match json with
| `Int k -> k
| _ -> let pp = pretty_to_string json in
raise (ParsingFailure ("Not an int:"^pp))
let to_bool (json : t) : bool =
match json with
| `Bool b -> b
| _ -> raise (ParsingFailure "Not a bool")
let to_string (json : t) : string =
match json with
| `String s -> s
| _ -> raise (ParsingFailure "Not a string")
let raise_opt err (x : 'a option) : 'a =
match x with
| None
| Some `Null -> raise (ParsingFailure err)
| Some x -> x
let default d (x : 'a option) : 'a =
match x with
| None
| Some `Null -> d
| Some x -> x
let get_err (key : string) (json : t) : t =
raise_opt ("Unbound key: `"^key^"`") (get key json)
let add_entry (k : string) (v : t) (json : t) : t =
match json with
| `Assoc kv_lst -> (`Assoc ((k,v)::kv_lst))
| _ -> raise (ParsingFailure "Not an association list")
let remove_entry (k : string) (json : t) : t =
match json with
| `Assoc kv_lst -> (`Assoc (List.remove_assoc k kv_lst))
| _ -> raise (ParsingFailure "Not an association list")
let to_file (path : Path.root) (json : t) : unit =
to_file (Path.string_of_root path) json
let from_file (path : Path.root) : t =
from_file (Path.string_of_root path)
| (******************************************************************************)
(* Metadb *)
(* Copyright (C) 2022 Nathan Guermond *)
(* *)
(* This program is free software: you can redistribute it and/or modify it *)
(* under the terms of the GNU General Public License as published by the Free *)
(* Software Foundation, either version 3 of the License, or (at your option) *)
(* any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *)
(* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *)
(* for more details. *)
(* *)
(* You should have received a copy of the GNU General Public License along *)
(* with this program. If not, see <https://www.gnu.org/licenses/>. *)
(* *)
(******************************************************************************)
|
mc13d.c | /* mc13d.c (permutations to block triangular form) */
/***********************************************************************
* This code is part of GLPK (GNU Linear Programming Kit).
*
* This code is the result of translation of the Fortran subroutines
* MC13D and MC13E associated with the following paper:
*
* I.S.Duff, J.K.Reid, Algorithm 529: Permutations to block triangular
* form, ACM Trans. on Math. Softw. 4 (1978), 189-192.
*
* Use of ACM Algorithms is subject to the ACM Software Copyright and
* License Agreement. See <http://www.acm.org/publications/policies>.
*
* The translation was made by Andrew Makhorin <[email protected]>.
*
* GLPK is free software: you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GLPK is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
* License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GLPK. If not, see <http://www.gnu.org/licenses/>.
***********************************************************************/
#include "mc13d.h"
/***********************************************************************
* NAME
*
* mc13d - permutations to block triangular form
*
* SYNOPSIS
*
* #include "mc13d.h"
* int mc13d(int n, const int icn[], const int ip[], const int lenr[],
* int ior[], int ib[], int lowl[], int numb[], int prev[]);
*
* DESCRIPTION
*
* Given the column numbers of the nonzeros in each row of the sparse
* matrix, the routine mc13d finds a symmetric permutation that makes
* the matrix block lower triangular.
*
* INPUT PARAMETERS
*
* n order of the matrix.
*
* icn array containing the column indices of the non-zeros. Those
* belonging to a single row must be contiguous but the ordering
* of column indices within each row is unimportant and wasted
* space between rows is permitted.
*
* ip ip[i], i = 1,2,...,n, is the position in array icn of the
* first column index of a non-zero in row i.
*
* lenr lenr[i], i = 1,2,...,n, is the number of non-zeros in row i.
*
* OUTPUT PARAMETERS
*
* ior ior[i], i = 1,2,...,n, gives the position on the original
* ordering of the row or column which is in position i in the
* permuted form.
*
* ib ib[i], i = 1,2,...,num, is the row number in the permuted
* matrix of the beginning of block i, 1 <= num <= n.
*
* WORKING ARRAYS
*
* arp working array of length [1+n], where arp[0] is not used.
* arp[i] is one less than the number of unsearched edges leaving
* node i. At the end of the algorithm it is set to a permutation
* which puts the matrix in block lower triangular form.
*
* ib working array of length [1+n], where ib[0] is not used.
* ib[i] is the position in the ordering of the start of the ith
* block. ib[n+1-i] holds the node number of the ith node on the
* stack.
*
* lowl working array of length [1+n], where lowl[0] is not used.
* lowl[i] is the smallest stack position of any node to which a
* path from node i has been found. It is set to n+1 when node i
* is removed from the stack.
*
* numb working array of length [1+n], where numb[0] is not used.
* numb[i] is the position of node i in the stack if it is on it,
* is the permuted order of node i for those nodes whose final
* position has been found and is otherwise zero.
*
* prev working array of length [1+n], where prev[0] is not used.
* prev[i] is the node at the end of the path when node i was
* placed on the stack.
*
* RETURNS
*
* The routine mc13d returns num, the number of blocks found. */
int mc13d(int n, const int icn[], const int ip[], const int lenr[],
int ior[], int ib[], int lowl[], int numb[], int prev[])
{ int *arp = ior;
int dummy, i, i1, i2, icnt, ii, isn, ist, ist1, iv, iw, j, lcnt,
nnm1, num, stp;
/* icnt is the number of nodes whose positions in final ordering
* have been found. */
icnt = 0;
/* num is the number of blocks that have been found. */
num = 0;
nnm1 = n + n - 1;
/* Initialization of arrays. */
for (j = 1; j <= n; j++)
{ numb[j] = 0;
arp[j] = lenr[j] - 1;
}
for (isn = 1; isn <= n; isn++)
{ /* Look for a starting node. */
if (numb[isn] != 0) continue;
iv = isn;
/* ist is the number of nodes on the stack ... it is the stack
* pointer. */
ist = 1;
/* Put node iv at beginning of stack. */
lowl[iv] = numb[iv] = 1;
ib[n] = iv;
/* The body of this loop puts a new node on the stack or
* backtracks. */
for (dummy = 1; dummy <= nnm1; dummy++)
{ i1 = arp[iv];
/* Have all edges leaving node iv been searched? */
if (i1 >= 0)
{ i2 = ip[iv] + lenr[iv] - 1;
i1 = i2 - i1;
/* Look at edges leaving node iv until one enters a new
* node or all edges are exhausted. */
for (ii = i1; ii <= i2; ii++)
{ iw = icn[ii];
/* Has node iw been on stack already? */
if (numb[iw] == 0) goto L70;
/* Update value of lowl[iv] if necessary. */
if (lowl[iw] < lowl[iv]) lowl[iv] = lowl[iw];
}
/* There are no more edges leaving node iv. */
arp[iv] = -1;
}
/* Is node iv the root of a block? */
if (lowl[iv] < numb[iv]) goto L60;
/* Order nodes in a block. */
num++;
ist1 = n + 1 - ist;
lcnt = icnt + 1;
/* Peel block off the top of the stack starting at the top
* and working down to the root of the block. */
for (stp = ist1; stp <= n; stp++)
{ iw = ib[stp];
lowl[iw] = n + 1;
numb[iw] = ++icnt;
if (iw == iv) break;
}
ist = n - stp;
ib[num] = lcnt;
/* Are there any nodes left on the stack? */
if (ist != 0) goto L60;
/* Have all the nodes been ordered? */
if (icnt < n) break;
goto L100;
L60: /* Backtrack to previous node on path. */
iw = iv;
iv = prev[iv];
/* Update value of lowl[iv] if necessary. */
if (lowl[iw] < lowl[iv]) lowl[iv] = lowl[iw];
continue;
L70: /* Put new node on the stack. */
arp[iv] = i2 - ii - 1;
prev[iw] = iv;
iv = iw;
lowl[iv] = numb[iv] = ++ist;
ib[n+1-ist] = iv;
}
}
L100: /* Put permutation in the required form. */
for (i = 1; i <= n; i++)
arp[numb[i]] = i;
return num;
}
/**********************************************************************/
#ifdef GLP_TEST
#include "env.h"
void test(int n, int ipp);
int main(void)
{ /* test program for routine mc13d */
test( 1, 0);
test( 2, 1);
test( 2, 2);
test( 3, 3);
test( 4, 4);
test( 5, 10);
test(10, 10);
test(10, 20);
test(20, 20);
test(20, 50);
test(50, 50);
test(50, 200);
return 0;
}
void fa01bs(int max, int *nrand);
void setup(int n, char a[1+50][1+50], int ip[], int icn[], int lenr[]);
void test(int n, int ipp)
{ int ip[1+50], icn[1+1000], ior[1+50], ib[1+51], iw[1+150],
lenr[1+50];
char a[1+50][1+50], hold[1+100];
int i, ii, iblock, ij, index, j, jblock, jj, k9, num;
xprintf("\n\n\nMatrix is of order %d and has %d off-diagonal non-"
"zeros\n", n, ipp);
for (j = 1; j <= n; j++)
{ for (i = 1; i <= n; i++)
a[i][j] = 0;
a[j][j] = 1;
}
for (k9 = 1; k9 <= ipp; k9++)
{ /* these statements should be replaced by calls to your
* favorite random number generator to place two pseudo-random
* numbers between 1 and n in the variables i and j */
for (;;)
{ fa01bs(n, &i);
fa01bs(n, &j);
if (!a[i][j]) break;
}
a[i][j] = 1;
}
/* setup converts matrix a[i,j] to required sparsity-oriented
* storage format */
setup(n, a, ip, icn, lenr);
num = mc13d(n, icn, ip, lenr, ior, ib, &iw[0], &iw[n], &iw[n+n]);
/* output reordered matrix with blocking to improve clarity */
xprintf("\nThe reordered matrix which has %d block%s is of the fo"
"rm\n", num, num == 1 ? "" : "s");
ib[num+1] = n + 1;
index = 100;
iblock = 1;
for (i = 1; i <= n; i++)
{ for (ij = 1; ij <= index; ij++)
hold[ij] = ' ';
if (i == ib[iblock])
{ xprintf("\n");
iblock++;
}
jblock = 1;
index = 0;
for (j = 1; j <= n; j++)
{ if (j == ib[jblock])
{ hold[++index] = ' ';
jblock++;
}
ii = ior[i];
jj = ior[j];
hold[++index] = (char)(a[ii][jj] ? 'X' : '0');
}
xprintf("%.*s\n", index, &hold[1]);
}
xprintf("\nThe starting point for each block is given by\n");
for (i = 1; i <= num; i++)
{ if ((i - 1) % 12 == 0) xprintf("\n");
xprintf(" %4d", ib[i]);
}
xprintf("\n");
return;
}
void setup(int n, char a[1+50][1+50], int ip[], int icn[], int lenr[])
{ int i, j, ind;
for (i = 1; i <= n; i++)
lenr[i] = 0;
ind = 1;
for (i = 1; i <= n; i++)
{ ip[i] = ind;
for (j = 1; j <= n; j++)
{ if (a[i][j])
{ lenr[i]++;
icn[ind++] = j;
}
}
}
return;
}
double g = 1431655765.0;
double fa01as(int i)
{ /* random number generator */
g = fmod(g * 9228907.0, 4294967296.0);
if (i >= 0)
return g / 4294967296.0;
else
return 2.0 * g / 4294967296.0 - 1.0;
}
void fa01bs(int max, int *nrand)
{ *nrand = (int)(fa01as(1) * (double)max) + 1;
return;
}
#endif
/* eof */
| /* mc13d.c (permutations to block triangular form) */
|
test_sc_rollup_wasm.ml | (** Testing
-------
Component: Rollup layer 1 logic
Invocation: dune exec \
src/proto_alpha/lib_protocol/test/unit/main.exe \
-- test "^\[Unit\] sc rollup wasm$"
Subject: Unit test for the Wasm PVM
*)
open Protocol
open Tezos_micheline.Micheline
open Michelson_v1_primitives
open Tezos_webassembly_interpreter
module Context = Tezos_context_memory.Context_binary
open Wasm_utils
module Proof_encoding =
Tezos_context_merkle_proof_encoding.Merkle_proof_encoding
module Wasm_context = struct
module Tree = struct
include Context.Tree
type tree = Context.tree
type t = Context.t
type key = string list
type value = bytes
end
type tree = Context.tree
type proof = Context.Proof.tree Context.Proof.t
let verify_proof p f =
Lwt.map Result.to_option (Context.verify_tree_proof p f)
let produce_proof context tree step =
let open Lwt_syntax in
let* context = Context.add_tree context [] tree in
let* (_hash : Context_hash.t) =
Context.commit ~time:Time.Protocol.epoch context
in
let index = Context.index context in
match Context.Tree.kinded_key tree with
| Some k ->
let* p = Context.produce_tree_proof index k step in
return (Some p)
| None -> return None
let kinded_hash_to_state_hash = function
| `Value hash | `Node hash ->
Sc_rollup_repr.State_hash.context_hash_to_state_hash hash
let proof_before proof = kinded_hash_to_state_hash proof.Context.Proof.before
let proof_after proof = kinded_hash_to_state_hash proof.Context.Proof.after
let proof_encoding = Proof_encoding.V2.Tree2.tree_proof_encoding
end
module Full_Wasm =
Sc_rollup_wasm.V2_0_0.Make (Environment.Wasm_2_0_0.Make) (Wasm_context)
let test_initial_state_hash_wasm_pvm () =
let open Alpha_context in
let open Lwt_result_syntax in
let empty = Sc_rollup_helpers.make_empty_tree () in
let*! state = Sc_rollup_helpers.Wasm_pvm.initial_state ~empty in
let*! hash = Sc_rollup_helpers.Wasm_pvm.state_hash state in
let expected = Sc_rollup.Wasm_2_0_0PVM.reference_initial_state_hash in
if Sc_rollup.State_hash.(hash = expected) then return_unit
else
failwith
"incorrect hash, expected %a, got %a"
Sc_rollup.State_hash.pp
expected
Sc_rollup.State_hash.pp
hash
let test_metadata_size () =
let address = Sc_rollup_repr.Address.of_bytes_exn (Bytes.make 20 '\000') in
let metadata =
Sc_rollup_metadata_repr.{address; origination_level = Raw_level_repr.root}
in
let bytes =
Data_encoding.Binary.to_bytes_exn Sc_rollup_metadata_repr.encoding metadata
in
assert (
Bytes.length bytes
= Tezos_scoru_wasm.Host_funcs.Internal_for_tests.metadata_size) ;
Lwt_result_syntax.return_unit
let test_l1_input_kind () =
let open Lwt_result_syntax in
let open Sc_rollup_inbox_message_repr in
let open Tezos_scoru_wasm in
let check_msg msg expected =
let*? msg = Environment.wrap_tzresult @@ serialize msg in
let msg = unsafe_to_string msg |> Pvm_input_kind.from_raw_input in
assert (msg = expected) ;
return_unit
in
let* () = check_msg (Internal Start_of_level) (Internal Start_of_level) in
let* () = check_msg (Internal End_of_level) (Internal End_of_level) in
let* () = check_msg (External "payload") External in
return_unit
let make_transaction value text contract =
let entrypoint = Entrypoint_repr.default in
let destination : Contract_hash.t =
Contract_hash.of_bytes_exn @@ Bytes.of_string contract
in
let unparsed_parameters =
strip_locations
@@ Prim
( 0,
I_TICKET,
[Prim (0, I_PAIR, [Int (0, Z.of_int32 value); String (1, text)], [])],
[] )
in
Sc_rollup_outbox_message_repr.{unparsed_parameters; entrypoint; destination}
let make_transactions () =
let l =
QCheck2.Gen.(
generate1
@@ list_size
(return 3)
(triple (string_size @@ return 20) int32 (small_string ~gen:char)))
in
List.map (fun (contract, i, s) -> make_transaction i s contract) l
(* This is simple "echo kernel" it spits out the first four inputs (SOL,
Info_per_level, input, EOL) it receives. It uses the [write_output] host
function and so it is used to test this function. *)
let test_output () =
let open Lwt_result_syntax in
let level_offset = 20 in
let dst = 60 in
let max_bytes = 3600 in
let dst_without_header = dst + 2 in
let modul =
Format.sprintf
{|
(module
(type $t0 (func (param i32 i32) (result i32)))
(type $t3 (func (param i32 i32 i32) (result i32)))
(import "smart_rollup_core" "read_input" (func $read_input (type $t3)))
(import "smart_rollup_core" "write_output" (func $write_output (type $t0)))
(memory 1)
(export "memory" (memory 0))
(func (export "kernel_run")
(local $size i32)
(local.set $size (call $read_input
(i32.const %d)
(i32.const %d)
(i32.const %d)))
(call $write_output (i32.const %d)
(i32.sub (local.get $size) (i32.const 2)))
(local.set $size (call $read_input
(i32.const %d)
(i32.const %d)
(i32.const %d)))
(call $write_output (i32.const %d)
(i32.sub (local.get $size) (i32.const 2)))
(local.set $size (call $read_input
(i32.const %d)
(i32.const %d)
(i32.const %d)))
(call $write_output (i32.const %d)
(i32.sub (local.get $size) (i32.const 2)))
(local.set $size (call $read_input
(i32.const %d)
(i32.const %d)
(i32.const %d)))
(call $write_output (i32.const %d)
(local.get $size))
drop)
)
|}
level_offset
dst
max_bytes
dst_without_header
level_offset
dst
max_bytes
dst_without_header
level_offset
dst
max_bytes
dst_without_header
level_offset
dst
max_bytes
dst_without_header
in
let*! dummy = Context.init "/tmp" in
let dummy_context = Context.empty dummy in
let (empty_tree : Wasm.tree) = Context.Tree.empty dummy_context in
let parsed = Parse.string_to_module modul in
let parsed =
match parsed.it with Script.Textual m -> m | _ -> assert false
in
let*! boot_sector = Encode.encode parsed in
let*! tree = Wasm.initial_state empty_tree in
let*! tree =
Wasm.install_boot_sector
~ticks_per_snapshot:Sc_rollup_wasm.V2_0_0.ticks_per_snapshot
~outbox_validity_period:Sc_rollup_wasm.V2_0_0.outbox_validity_period
~outbox_message_limit:Sc_rollup_wasm.V2_0_0.outbox_message_limit
boot_sector
tree
in
let*! tree =
Wasm.Internal_for_tests.set_max_nb_ticks (Z.of_int64 50_000_000L) tree
in
let transactions = make_transactions () in
let out =
Sc_rollup_outbox_message_repr.(Atomic_transaction_batch {transactions})
in
let string_input_message =
Data_encoding.Binary.to_string_exn
Sc_rollup_outbox_message_repr.encoding
out
in
let*! tree = eval_until_input_requested tree in
let*! tree = set_full_input_step [string_input_message] 0l tree in
let*! final_tree = eval_until_input_requested tree in
let*! output = Wasm.Internal_for_tests.get_output_buffer final_tree in
let* last_outbox_level =
match output.Tezos_webassembly_interpreter.Output_buffer.last_level with
| Some level -> return level
| None -> failwith "The PVM output buffer does not contain any outbox."
in
let*! last_outbox =
Tezos_webassembly_interpreter.Output_buffer.get_outbox
output
last_outbox_level
in
let* end_of_level_message_index =
match Output_buffer.get_outbox_last_message_index last_outbox with
| Some index -> return index
| None -> failwith "The PVM output buffer does not contain any outbox."
in
(* The last message in the outbox corresponds to EOL, due to the nature of the
kernel. As such we must take the one preceding it. *)
let message_index = Z.pred end_of_level_message_index in
let*! bytes_output_message =
Tezos_webassembly_interpreter.Output_buffer.(
get_message output {outbox_level = last_outbox_level; message_index})
in
assert (string_input_message = Bytes.to_string bytes_output_message) ;
let message =
Data_encoding.Binary.of_bytes_exn
Sc_rollup_outbox_message_repr.encoding
bytes_output_message
in
assert (message = out) ;
let*? outbox_level =
Environment.wrap_tzresult @@ Raw_level_repr.of_int32 last_outbox_level
in
let output = Sc_rollup_PVM_sig.{outbox_level; message_index; message} in
let*! pf = Full_Wasm.produce_output_proof dummy_context final_tree output in
match pf with
| Ok proof ->
let*! valid = Full_Wasm.verify_output_proof proof in
fail_unless valid (Exn (Failure "An output proof is not valid."))
| Error _ -> failwith "Error during proof generation"
let tests =
[
Tztest.tztest
"initial state hash for Wasm"
`Quick
test_initial_state_hash_wasm_pvm;
Tztest.tztest "size of a rollup metadata" `Quick test_metadata_size;
Tztest.tztest "l1 input kind" `Quick test_l1_input_kind;
Tztest.tztest "test output proofs" `Quick test_output;
]
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2022 Nomadic Labs <[email protected]> *)
(* Copyright (c) 2022 Trili Tech, <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
tools.ml |
(* Original author: Nicolas Pouillard *)
(* Tools *)
open My_std
open Format
open Log
open Pathname.Operators
open Tags.Operators
open Rule
let pp_l = List.print String.print
let tags_of_pathname p =
Configuration.tags_of_filename (Pathname.to_string p)
++("file:"^p)
++("extension:" ^ Pathname.get_extension p)
let opt_print elt ppf =
function
| Some x -> fprintf ppf "@[<2>Some@ %a@]" elt x
| None -> pp_print_string ppf "None"
let path_and_context_of_string s =
if Pathname.is_implicit s then
let b = Pathname.basename s in
let d = Pathname.dirname s in
if d <> Pathname.current_dir_name then
let () = Pathname.define_context d [d] in
[s]
else
let include_dirs = Pathname.include_dirs_of d in
List.map (fun include_dir -> include_dir/b) include_dirs
else [s]
| (***********************************************************************)
(* *)
(* ocamlbuild *)
(* *)
(* Nicolas Pouillard, Berke Durak, projet Gallium, INRIA Rocquencourt *)
(* *)
(* Copyright 2007 Institut National de Recherche en Informatique et *)
(* en Automatique. All rights reserved. This file is distributed *)
(* under the terms of the GNU Library General Public License, with *)
(* the special exception on linking described in file ../LICENSE. *)
(* *)
(***********************************************************************)
|
ccl_factor.c |
#include "ccl_factor.h"
/*---------------------------------------------------------------------*/
/* Global Variables */
/*---------------------------------------------------------------------*/
/*---------------------------------------------------------------------*/
/* Forward Declarations */
/*---------------------------------------------------------------------*/
/*---------------------------------------------------------------------*/
/* Internal Functions */
/*---------------------------------------------------------------------*/
/*-----------------------------------------------------------------------
//
// Function: find_next_potential_eq_factor_partner()
//
// Given two positions, set pos2->literal to the next positive
// literal (at or including pos2->literal) distinct from
// pos1->literal.
//
// Global Variables:
//
// Side Effects :
//
/----------------------------------------------------------------------*/
Eqn_p find_next_potential_eq_factor_partner(ClausePos_p pos1,
ClausePos_p pos2)
{
Eqn_p lit;
lit = ClausePosFindPosLiteral(pos2, false);
if(lit==pos1->literal)
{
pos2->literal = pos2->literal->next;
lit = ClausePosFindPosLiteral(pos2, false);
}
return lit;
}
/*-----------------------------------------------------------------------
//
// Function: find_first_eq_factor_partner()
//
// Given the maximal positive literal described in pos1, set pos2 to
// the first potential partner for an equality factoring
// inference. Return the selected literal, or NULL if no exists.
//
// Global Variables: -
//
// Side Effects : Sets pos2
//
/----------------------------------------------------------------------*/
Eqn_p find_first_eq_factor_partner(ClausePos_p pos1, ClausePos_p pos2)
{
Eqn_p lit;
assert(pos1);
assert(pos2);
assert(pos1->clause);
assert(pos1->literal);
assert(EqnIsPositive(pos1->literal));
assert(EqnIsMaximal(pos1->literal));
pos2->clause = pos1->clause;
pos2->literal = pos1->clause->literals;
pos2->side = LeftSide;
PStackReset(pos2->pos);
lit = find_next_potential_eq_factor_partner(pos1, pos2);
return lit;
}
/*---------------------------------------------------------------------*/
/* Exported Functions */
/*---------------------------------------------------------------------*/
/*-----------------------------------------------------------------------
//
// Function: ComputeOrderedFactor()
//
// Given two positions in a clause, try to compute the ordered
// factor. Return it, if it exists, otherwise return NULL.
//
// Global Variables: -
//
// Side Effects : Creates clause, temporary bindings (will be
// backtracked).
//
/----------------------------------------------------------------------*/
Clause_p ComputeOrderedFactor(TB_p bank, OCB_p ocb, ClausePos_p pos1,
ClausePos_p pos2, VarBank_p freshvars)
{
Subst_p subst;
Clause_p new_clause = NULL;
Eqn_p new_literals;
bool unifies;
assert(pos1->clause == pos2->clause);
assert(pos1->literal != pos2->literal);
assert(pos1->side == LeftSide);
subst = SubstAlloc();
VarBankResetVCounts(freshvars);
if(pos2->side == RightSide)
{
EqnSwapSidesSimple(pos2->literal);
}
unifies = EqnUnifyDirected(pos1->literal, pos2->literal, subst);
if(pos2->side == RightSide)
{
EqnSwapSidesSimple(pos2->literal);
}
if(unifies)
{
if(EqnListEqnIsMaximal(ocb, pos1->clause->literals,
pos1->literal))
{
NormSubstEqnListExcept(pos1->clause->literals, pos2->literal,
subst, freshvars);
new_literals = EqnListCopyOptExcept(pos1->clause->literals,
pos2->literal);
EqnListRemoveResolved(&new_literals);
EqnListRemoveDuplicates(new_literals);
new_clause = ClauseAlloc(new_literals);
}
}
SubstDelete(subst);
return new_clause;
}
/*-----------------------------------------------------------------------
//
// Function: ClausePosFirstOrderedFactorLiterals()
//
// Given a clause, compute the first pair of literals were an
// ordered factor might be computed. See
// ClausePosNextFactorLiterals(). This works by setting an
// impossible initial state and searching for the next valid one...
//
// Global Variables: -
//
// Side Effects : Changes pos1, pos2
//
/----------------------------------------------------------------------*/
Eqn_p ClausePosFirstOrderedFactorLiterals(Clause_p clause, ClausePos_p
pos1, ClausePos_p pos2)
{
Eqn_p lit;
pos1->clause = clause;
pos1->literal = clause->literals;
pos1->side = LeftSide;
PStackReset(pos1->pos);
lit = ClausePosFindMaxLiteral(pos1, true);
if(lit)
{
pos2->clause = clause;
pos2->literal = pos1->literal;
pos2->side = RightSide;
PStackReset(pos2->pos);
return ClausePosNextOrderedFactorLiterals(pos1, pos2);
}
return NULL;
}
/*-----------------------------------------------------------------------
//
// Function: ClausePosNextOrderedFactorLiterals()
//
// Given a clause and two positions, set these position to
// the next valid combination for an ordered factor
// inference. Return the second literal, or NULL if no position pair
// exists. pos2->side is used to indicate wether the unification
// should take place as is or with one equation swapped.
//
//
// Global Variables: -
//
// Side Effects : Changes pos1, pos2
//
/----------------------------------------------------------------------*/
Eqn_p ClausePosNextOrderedFactorLiterals(ClausePos_p pos1, ClausePos_p
pos2)
{
Eqn_p lit;
if(pos2->side == LeftSide &&
(!EqnIsOriented(pos2->literal) ||
!EqnIsOriented(pos1->literal)))
{
pos2->side = RightSide;
return pos2->literal;
}
pos2->side = LeftSide;
pos2->literal = pos2->literal->next;
lit = ClausePosFindMaxLiteral(pos2, true);
while(!lit)
{
pos1->literal = pos1->literal->next;
lit = ClausePosFindMaxLiteral(pos1, true);
if(!lit)
{
break;
}
pos2->literal = pos1->literal->next;
lit = ClausePosFindMaxLiteral(pos2, true);
}
return lit;
}
/*-----------------------------------------------------------------------
//
// Function: ComputeEqualityFactor()
//
// Given two positions in a clause, try to compute the equality
// factor. Return it, if it exists, otherwise return NULL.
//
// Global Variables: -
//
// Side Effects : Creates clause, temporary bindings (will be
// backtracked).
//
/----------------------------------------------------------------------*/
Clause_p ComputeEqualityFactor(TB_p bank, OCB_p ocb, ClausePos_p pos1,
ClausePos_p pos2, VarBank_p freshvars)
{
Term_p max_term, with_term, min_term, new_lside, new_rside;
Eqn_p new_condition, new_literals;
Subst_p subst = SubstAlloc();
Clause_p new_clause = NULL;
assert(EqnIsPositive(pos1->literal));
assert(EqnIsMaximal(pos1->literal));
assert(EqnIsPositive(pos2->literal));
assert(!EqnIsOriented(pos1->literal) || (pos1->side == LeftSide));
max_term = ClausePosGetSide(pos1);
with_term = ClausePosGetSide(pos2);
if((!TermIsVar(max_term)||EqnIsEquLit(pos2->literal))&&
(!TermIsVar(with_term)||EqnIsEquLit(pos1->literal))&&
SubstMguComplete(max_term, with_term, subst))
{
min_term = ClausePosGetOtherSide(pos1);
if(!TOGreater(ocb, min_term, max_term, DEREF_ALWAYS, DEREF_ALWAYS)
&&
EqnListEqnIsMaximal(ocb, pos1->clause->literals,
pos1->literal))
{
NormSubstEqnListExcept(pos1->clause->literals, pos2->literal,
subst, freshvars);
new_lside = TBInsertNoProps(bank, min_term, DEREF_ALWAYS);
new_rside = TBInsertNoProps(bank,
ClausePosGetOtherSide(pos2),
DEREF_ALWAYS);
new_condition = EqnAlloc(new_lside, new_rside, bank, false);
new_literals = EqnListCopyOptExcept(pos1->clause->literals,
pos1->literal);
EqnListInsertFirst(&new_literals, new_condition);
EqnListRemoveResolved(&new_literals);
EqnListRemoveDuplicates(new_literals);
new_clause = ClauseAlloc(new_literals);
}
}
SubstDelete(subst);
return new_clause;
}
/*-----------------------------------------------------------------------
//
// Function: ClausePosFirstEqualityFactorSides()
//
// Given a clause and two uninialized positions, set the positions
// to the first potiental pair of sides for an equality factoring
// inference. Return the second literal, or NULL if no legal pair
// exists.
//
// Global Variables: -
//
// Side Effects : Changes pos1, pos2
//
/----------------------------------------------------------------------*/
Eqn_p ClausePosFirstEqualityFactorSides(Clause_p clause, ClausePos_p
pos1, ClausePos_p pos2)
{
Term_p side;
Eqn_p lit = NULL;
assert(clause);
assert(pos1);
assert(pos2);
pos1->clause = clause;
pos1->literal = clause->literals;
side = ClausePosFindFirstMaximalSide(pos1, true);
if(side)
{
lit = find_first_eq_factor_partner(pos1, pos2);
}
return lit;
}
/*-----------------------------------------------------------------------
//
// Function: ClausePosNextEqualityFactorSides()
//
// Given a pair of positions pos1, pos2, compute the next potential
// positions for a equality factoring inference.
//
// Global Variables: -
//
// Side Effects : Changes pos1, pos2
//
/----------------------------------------------------------------------*/
Eqn_p ClausePosNextEqualityFactorSides(ClausePos_p pos1, ClausePos_p
pos2)
{
Eqn_p lit;
Term_p side;
if(pos2->side == LeftSide)
{
pos2->side = RightSide;
return pos2->literal;
}
pos2->side = LeftSide;
pos2->literal = pos2->literal->next;
lit = find_next_potential_eq_factor_partner(pos1, pos2);
if(!lit)
{
side = ClausePosFindNextMaximalSide(pos1, true);
if(side)
{
lit = find_first_eq_factor_partner(pos1, pos2);
}
}
return lit;
}
/*---------------------------------------------------------------------*/
/* End of File */
/*---------------------------------------------------------------------*/
| /*-----------------------------------------------------------------------
File : ccl_factor.c
Author: Stephan Schulz
Contents
Functions for ordered and equality factorisation.
Copyright 1998, 1999 by the author.
This code is released under the GNU General Public Licence and
the GNU Lesser General Public License.
See the file COPYING in the main E directory for details..
Run "eprover -h" for contact information.
Changes
<1> Sun May 31 19:50:22 MET DST 1998
New
<2> Tue Oct 13 15:31:57 MET DST 1998
Added Equality Factoring
-----------------------------------------------------------------------*/ |
dune |
(library
(public_name hpack)
(libraries angstrom faraday))
(rule
(targets huffman_table.ml)
(deps ../util/huffman_table.txt)
(action
(with-stdout-to
%{targets}
(run ../util/gen_huffman.exe %{deps}))))
| |
roman.ml |
let roman_vals =
[(900, "CM"); (500, "D"); (400, "CD"); (100, "C"); (100, "C"); (100, "C");
(90, "XC"); (50, "L"); (40, "XL"); (10, "X"); (10, "X"); (10, "X");
(9, "IX"); (5, "V"); (4, "IV"); (1, "I"); (1, "I"); (1, "I")]
let rec roman_recurse b n = function
| [] -> ()
| (n', s) :: t ->
if n >= n' then
begin
Buffer.add_string b s;
roman_recurse b (n - n') t
end
else
roman_recurse b n t
let rec roman b n =
if n < 1 then ()
else if n > 999 then
begin
for _ = 1 to n / 1000 do Buffer.add_char b 'M' done;
roman b (n mod 1000)
end
else
roman_recurse b n roman_vals
let roman_string_of_int n =
let b = Buffer.create 32 in
roman b n;
Buffer.contents b
| |
parser_shims.mli |
module List : sig
include module type of List
val find_map : ('a -> 'b option) -> 'a list -> 'b option
(** @since ocaml-4.10 *)
end
module Int : sig
include module type of Int
val min : int -> int -> int
(** @since ocaml-4.13.0 *)
val max : int -> int -> int
(** @since ocaml-4.13.0 *)
end
module Misc : sig
include module type of Misc
module Color : sig
include module type of Color
val default_setting : setting
(** @since ocaml-4.09 *)
end
module Error_style : sig
include module type of Error_style
val default_setting : setting
(** @since ocaml-4.09 *)
end
end
module Clflags : sig
val include_dirs : string list ref
val debug : bool ref
val unsafe : bool ref
val open_modules : string list ref
val absname : bool ref
val use_threads : bool ref
val principal : bool ref
val recursive_types : bool ref
val applicative_functors : bool ref
val for_package : string option ref
val transparent_modules : bool ref
val locations : bool ref
val color : Misc.Color.setting option ref
val error_style : Misc.Error_style.setting option ref
val unboxed_types : bool ref
end
| |
raw_hashes.mli | val blake2b : bytes -> bytes
val sha256 : bytes -> bytes
val sha512 : bytes -> bytes
val keccak256 : bytes -> bytes
val sha3_256 : bytes -> bytes
val sha3_512 : bytes -> bytes
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
t-is_zero.c |
#include "fq_zech_vec.h"
#ifdef T
#undef T
#endif
#define T fq_zech
#define CAP_T FQ_ZECH
#include "fq_vec_templates/test/t-is_zero.c"
#undef CAP_T
#undef T
| /*
Copyright (C) 2013 Mike Hansen
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
Src_file.ml | (*
Keep a copy of the source input, suitable for returning source code
from ranges of locations.
*)
open Printf
type source = File of string | Stdin | String | Channel
(* The contents of the source document. *)
type t = { source : source; contents : string }
let source x = x.source
let show_source = function
| File s -> s
| Stdin -> "<stdin>"
| String -> "<string>"
| Channel -> "<channel>"
let source_string x = x |> source |> show_source
let contents x = x.contents
let length x = String.length x.contents
let replace_contents x f = { x with contents = f x.contents }
let of_string ?(source = String) contents = { source; contents }
let partial_input max_len ic =
let buf = Bytes.create max_len in
let rec read pos remaining =
if remaining > 0 then
let n_read = input ic buf pos remaining in
if n_read > 0 then read (pos + n_read) (remaining - n_read) else pos
else pos
in
let len = read 0 max_len in
Bytes.sub_string buf 0 len
let get_channel_length ic =
try Some (in_channel_length ic) with
| _ -> None
let input_all_from_nonseekable_channel ic =
let buf = Buffer.create 10000 in
try
while true do
bprintf buf "%s\n" (input_line ic)
done;
assert false
with
| End_of_file -> Buffer.contents buf
let of_channel ?(source = Channel) ?max_len ic =
let contents =
match max_len with
| None -> (
match get_channel_length ic with
| None -> input_all_from_nonseekable_channel ic
| Some len -> really_input_string ic len)
| Some max_len -> partial_input max_len ic
in
{ source; contents }
let of_stdin ?(source = Stdin) () = of_channel ~source stdin
let of_file ?source ?max_len file =
let source =
match source with
| None -> File file
| Some x -> x
in
(* This needs to work on named pipes such as those created by bash with
so-called "process substitution" (for those, 'in_channel_length' returns
but then we can't read the file again).
It's convenient for testing using the spacegrep command line:
$ spacegrep hello <(echo 'hello')
*)
let contents = Common.read_file ?max_len file in
{ source; contents }
let to_lexbuf x = Lexing.from_string x.contents
(* Find the index (position from the beginning of the string)
right after the end of the current line. *)
let rec find_end_of_line s i =
if i >= String.length s then i
else
match s.[i] with
| '\n' -> i + 1
| _ -> find_end_of_line s (i + 1)
(* Remove the trailing newline character if there is one. *)
let remove_trailing_newline s =
match s with
| "" -> ""
| s ->
let len = String.length s in
if s.[len - 1] = '\n' then String.sub s 0 (len - 1) (* nosem *) else s
(* Add a trailing newline character if the last character isn't a newline
(or there is no last character). *)
let ensure_newline s =
match s with
| "" -> ""
| s -> if s.[String.length s - 1] <> '\n' then s ^ "\n" else s
let insert_line_prefix prefix s =
if prefix = "" then s
else if s = "" then s
else
let buf = Buffer.create (2 * String.length s) in
Buffer.add_string buf prefix;
let len = String.length s in
for i = 0 to len - 1 do
let c = s.[i] in
Buffer.add_char buf c;
if c = '\n' && i < len - 1 then Buffer.add_string buf prefix
done;
Buffer.contents buf
let insert_highlight highlight s start end_ =
let len = String.length s in
if start < 0 || end_ > len || start > end_ then s
else
let buf = Buffer.create (2 * len) in
for i = 0 to start - 1 do
Buffer.add_char buf s.[i]
done;
let pos = ref start in
for i = start to end_ - 1 do
match s.[i] with
| '\n' ->
Buffer.add_string buf (highlight (String.sub s !pos (i - !pos)));
Buffer.add_char buf '\n';
pos := i + 1
| _ -> ()
done;
Buffer.add_string buf (highlight (String.sub s !pos (end_ - !pos)));
for i = end_ to len - 1 do
Buffer.add_char buf s.[i]
done;
Buffer.contents buf
(*
Same as String.sub but shrink the requested range to a valid range
if needed.
*)
let safe_string_sub s orig_start orig_len =
let s_len = String.length s in
let orig_end = orig_start + orig_len in
let start = min s_len (max 0 orig_start) in
let end_ = min s_len (max 0 orig_end) in
let len = max 0 (end_ - start) in
String.sub s start len
let region_of_pos_range x start_pos end_pos =
let open Lexing in
safe_string_sub x.contents start_pos.pos_cnum
(end_pos.pos_cnum - start_pos.pos_cnum)
let region_of_loc_range x (start_pos, _) (_, end_pos) =
region_of_pos_range x start_pos end_pos
let lines_of_pos_range ?(force_trailing_newline = true) ?highlight
?(line_prefix = "") x start_pos end_pos =
let s = x.contents in
let open Lexing in
let start = start_pos.pos_bol in
let match_start = start_pos.pos_cnum in
assert (match_start >= start);
let end_ = find_end_of_line s end_pos.pos_bol in
let match_end = end_pos.pos_cnum in
assert (match_end <= end_);
let lines =
let s = safe_string_sub s start (end_ - start) in
if force_trailing_newline then ensure_newline s else s
in
let with_highlight =
match highlight with
| None -> lines
| Some highlight ->
insert_highlight highlight lines (match_start - start)
(match_end - start)
in
insert_line_prefix line_prefix with_highlight
let lines_of_loc_range ?force_trailing_newline ?highlight ?line_prefix x
(start_pos, _) (_, end_pos) =
lines_of_pos_range ?force_trailing_newline ?highlight ?line_prefix x start_pos
end_pos
let list_lines_of_pos_range ?highlight ?line_prefix x start_pos end_pos =
let s =
lines_of_pos_range ~force_trailing_newline:false ?highlight ?line_prefix x
start_pos end_pos
in
remove_trailing_newline s |> String.split_on_char '\n'
let list_lines_of_loc_range ?highlight ?line_prefix x (start_pos, _) (_, end_pos)
=
list_lines_of_pos_range ?highlight ?line_prefix x start_pos end_pos
| (*
Keep a copy of the source input, suitable for returning source code
from ranges of locations.
*) |
cst.mli | (** Concrete syntax tree of s-expressions *)
(** This module exposes a type that describe the full contents of a source file containing
s-expressions.
One can use this type to do low-level rewriting of s-expression files.
*)
open! Import
type t =
| Atom of
{ loc : Positions.range
; atom : string
(** Source syntax of atom. The parser only fills this for atoms that are quoted in
the source, but it makes sense for unquoted atoms too (to ensure they get
printed unquoted). *)
; unescaped : string option
}
| List of
{ loc : Positions.range
; elements : t_or_comment list
}
and t_or_comment =
| Sexp of t
| Comment of comment
and comment =
| Plain_comment of
{ loc : Positions.range
; comment : string
} (** Line or block comment *)
| Sexp_comment of
{ hash_semi_pos : Positions.pos
; comments : comment list
; sexp : t
}
[@@deriving_inline sexp_of]
include sig
[@@@ocaml.warning "-32"]
val sexp_of_t : t -> Sexplib0.Sexp.t
val sexp_of_t_or_comment : t_or_comment -> Sexplib0.Sexp.t
val sexp_of_comment : comment -> Sexplib0.Sexp.t
end
[@@ocaml.doc "@inline"]
[@@@end]
val compare : t -> t -> int
val compare_t_or_comment : t_or_comment -> t_or_comment -> int
val compare_comment : comment -> comment -> int
module Forget : sig
val t : t -> Ppx_sexp_conv_lib.Sexp.t
val t_or_comment : t_or_comment -> Ppx_sexp_conv_lib.Sexp.t option
val t_or_comments : t_or_comment list -> Ppx_sexp_conv_lib.Sexp.t list
end
| (** Concrete syntax tree of s-expressions *)
|
dune |
(executables (public_names bar baz)) | |
ObjectFile.h |
//===- ObjectFile.h - File format independent object file -------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file declares a file format independent ObjectFile class.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_OBJECT_OBJECTFILE_H
#define LLVM_OBJECT_OBJECTFILE_H
#include "llvm/ADT/DenseMapInfo.h"
#include "llvm/ADT/StringRef.h"
#include "llvm/ADT/Triple.h"
#include "llvm/ADT/iterator_range.h"
#include "llvm/BinaryFormat/Magic.h"
#include "llvm/MC/SubtargetFeature.h"
#include "llvm/Object/Binary.h"
#include "llvm/Object/Error.h"
#include "llvm/Object/SymbolicFile.h"
#include "llvm/Support/Casting.h"
#include "llvm/Support/Error.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/MemoryBuffer.h"
#include <cassert>
#include <cstdint>
#include <memory>
#include <system_error>
namespace llvm {
class ARMAttributeParser;
namespace object {
class COFFObjectFile;
class MachOObjectFile;
class ObjectFile;
class SectionRef;
class SymbolRef;
class symbol_iterator;
class WasmObjectFile;
using section_iterator = content_iterator<SectionRef>;
/// This is a value type class that represents a single relocation in the list
/// of relocations in the object file.
class RelocationRef {
DataRefImpl RelocationPimpl;
const ObjectFile *OwningObject = nullptr;
public:
RelocationRef() = default;
RelocationRef(DataRefImpl RelocationP, const ObjectFile *Owner);
bool operator==(const RelocationRef &Other) const;
void moveNext();
uint64_t getOffset() const;
symbol_iterator getSymbol() const;
uint64_t getType() const;
/// Get a string that represents the type of this relocation.
///
/// This is for display purposes only.
void getTypeName(SmallVectorImpl<char> &Result) const;
DataRefImpl getRawDataRefImpl() const;
const ObjectFile *getObject() const;
};
using relocation_iterator = content_iterator<RelocationRef>;
/// This is a value type class that represents a single section in the list of
/// sections in the object file.
class SectionRef {
friend class SymbolRef;
DataRefImpl SectionPimpl;
const ObjectFile *OwningObject = nullptr;
public:
SectionRef() = default;
SectionRef(DataRefImpl SectionP, const ObjectFile *Owner);
bool operator==(const SectionRef &Other) const;
bool operator!=(const SectionRef &Other) const;
bool operator<(const SectionRef &Other) const;
void moveNext();
Expected<StringRef> getName() const;
uint64_t getAddress() const;
uint64_t getIndex() const;
uint64_t getSize() const;
Expected<StringRef> getContents() const;
/// Get the alignment of this section as the actual value (not log 2).
uint64_t getAlignment() const;
bool isCompressed() const;
/// Whether this section contains instructions.
bool isText() const;
/// Whether this section contains data, not instructions.
bool isData() const;
/// Whether this section contains BSS uninitialized data.
bool isBSS() const;
bool isVirtual() const;
bool isBitcode() const;
bool isStripped() const;
/// Whether this section will be placed in the text segment, according to the
/// Berkeley size format. This is true if the section is allocatable, and
/// contains either code or readonly data.
bool isBerkeleyText() const;
/// Whether this section will be placed in the data segment, according to the
/// Berkeley size format. This is true if the section is allocatable and
/// contains data (e.g. PROGBITS), but is not text.
bool isBerkeleyData() const;
bool containsSymbol(SymbolRef S) const;
relocation_iterator relocation_begin() const;
relocation_iterator relocation_end() const;
iterator_range<relocation_iterator> relocations() const {
return make_range(relocation_begin(), relocation_end());
}
Expected<section_iterator> getRelocatedSection() const;
DataRefImpl getRawDataRefImpl() const;
const ObjectFile *getObject() const;
};
struct SectionedAddress {
const static uint64_t UndefSection = UINT64_MAX;
uint64_t Address = 0;
uint64_t SectionIndex = UndefSection;
};
inline bool operator<(const SectionedAddress &LHS,
const SectionedAddress &RHS) {
return std::tie(LHS.SectionIndex, LHS.Address) <
std::tie(RHS.SectionIndex, RHS.Address);
}
inline bool operator==(const SectionedAddress &LHS,
const SectionedAddress &RHS) {
return std::tie(LHS.SectionIndex, LHS.Address) ==
std::tie(RHS.SectionIndex, RHS.Address);
}
/// This is a value type class that represents a single symbol in the list of
/// symbols in the object file.
class SymbolRef : public BasicSymbolRef {
friend class SectionRef;
public:
enum Type {
ST_Unknown, // Type not specified
ST_Data,
ST_Debug,
ST_File,
ST_Function,
ST_Other
};
SymbolRef() = default;
SymbolRef(DataRefImpl SymbolP, const ObjectFile *Owner);
SymbolRef(const BasicSymbolRef &B) : BasicSymbolRef(B) {
assert(isa<ObjectFile>(BasicSymbolRef::getObject()));
}
Expected<StringRef> getName() const;
/// Returns the symbol virtual address (i.e. address at which it will be
/// mapped).
Expected<uint64_t> getAddress() const;
/// Return the value of the symbol depending on the object this can be an
/// offset or a virtual address.
uint64_t getValue() const;
/// Get the alignment of this symbol as the actual value (not log 2).
uint32_t getAlignment() const;
uint64_t getCommonSize() const;
Expected<SymbolRef::Type> getType() const;
/// Get section this symbol is defined in reference to. Result is
/// end_sections() if it is undefined or is an absolute symbol.
Expected<section_iterator> getSection() const;
const ObjectFile *getObject() const;
};
class symbol_iterator : public basic_symbol_iterator {
public:
symbol_iterator(SymbolRef Sym) : basic_symbol_iterator(Sym) {}
symbol_iterator(const basic_symbol_iterator &B)
: basic_symbol_iterator(SymbolRef(B->getRawDataRefImpl(),
cast<ObjectFile>(B->getObject()))) {}
const SymbolRef *operator->() const {
const BasicSymbolRef &P = basic_symbol_iterator::operator *();
return static_cast<const SymbolRef*>(&P);
}
const SymbolRef &operator*() const {
const BasicSymbolRef &P = basic_symbol_iterator::operator *();
return static_cast<const SymbolRef&>(P);
}
};
/// This class is the base class for all object file types. Concrete instances
/// of this object are created by createObjectFile, which figures out which type
/// to create.
class ObjectFile : public SymbolicFile {
virtual void anchor();
protected:
ObjectFile(unsigned int Type, MemoryBufferRef Source);
const uint8_t *base() const {
return reinterpret_cast<const uint8_t *>(Data.getBufferStart());
}
// These functions are for SymbolRef to call internally. The main goal of
// this is to allow SymbolRef::SymbolPimpl to point directly to the symbol
// entry in the memory mapped object file. SymbolPimpl cannot contain any
// virtual functions because then it could not point into the memory mapped
// file.
//
// Implementations assume that the DataRefImpl is valid and has not been
// modified externally. It's UB otherwise.
friend class SymbolRef;
virtual Expected<StringRef> getSymbolName(DataRefImpl Symb) const = 0;
Error printSymbolName(raw_ostream &OS,
DataRefImpl Symb) const override;
virtual Expected<uint64_t> getSymbolAddress(DataRefImpl Symb) const = 0;
virtual uint64_t getSymbolValueImpl(DataRefImpl Symb) const = 0;
virtual uint32_t getSymbolAlignment(DataRefImpl Symb) const;
virtual uint64_t getCommonSymbolSizeImpl(DataRefImpl Symb) const = 0;
virtual Expected<SymbolRef::Type> getSymbolType(DataRefImpl Symb) const = 0;
virtual Expected<section_iterator>
getSymbolSection(DataRefImpl Symb) const = 0;
// Same as above for SectionRef.
friend class SectionRef;
virtual void moveSectionNext(DataRefImpl &Sec) const = 0;
virtual Expected<StringRef> getSectionName(DataRefImpl Sec) const = 0;
virtual uint64_t getSectionAddress(DataRefImpl Sec) const = 0;
virtual uint64_t getSectionIndex(DataRefImpl Sec) const = 0;
virtual uint64_t getSectionSize(DataRefImpl Sec) const = 0;
virtual Expected<ArrayRef<uint8_t>>
getSectionContents(DataRefImpl Sec) const = 0;
virtual uint64_t getSectionAlignment(DataRefImpl Sec) const = 0;
virtual bool isSectionCompressed(DataRefImpl Sec) const = 0;
virtual bool isSectionText(DataRefImpl Sec) const = 0;
virtual bool isSectionData(DataRefImpl Sec) const = 0;
virtual bool isSectionBSS(DataRefImpl Sec) const = 0;
// A section is 'virtual' if its contents aren't present in the object image.
virtual bool isSectionVirtual(DataRefImpl Sec) const = 0;
virtual bool isSectionBitcode(DataRefImpl Sec) const;
virtual bool isSectionStripped(DataRefImpl Sec) const;
virtual bool isBerkeleyText(DataRefImpl Sec) const;
virtual bool isBerkeleyData(DataRefImpl Sec) const;
virtual relocation_iterator section_rel_begin(DataRefImpl Sec) const = 0;
virtual relocation_iterator section_rel_end(DataRefImpl Sec) const = 0;
virtual Expected<section_iterator> getRelocatedSection(DataRefImpl Sec) const;
// Same as above for RelocationRef.
friend class RelocationRef;
virtual void moveRelocationNext(DataRefImpl &Rel) const = 0;
virtual uint64_t getRelocationOffset(DataRefImpl Rel) const = 0;
virtual symbol_iterator getRelocationSymbol(DataRefImpl Rel) const = 0;
virtual uint64_t getRelocationType(DataRefImpl Rel) const = 0;
virtual void getRelocationTypeName(DataRefImpl Rel,
SmallVectorImpl<char> &Result) const = 0;
uint64_t getSymbolValue(DataRefImpl Symb) const;
public:
ObjectFile() = delete;
ObjectFile(const ObjectFile &other) = delete;
uint64_t getCommonSymbolSize(DataRefImpl Symb) const {
assert(getSymbolFlags(Symb) & SymbolRef::SF_Common);
return getCommonSymbolSizeImpl(Symb);
}
virtual std::vector<SectionRef> dynamic_relocation_sections() const {
return std::vector<SectionRef>();
}
using symbol_iterator_range = iterator_range<symbol_iterator>;
symbol_iterator_range symbols() const {
return symbol_iterator_range(symbol_begin(), symbol_end());
}
virtual section_iterator section_begin() const = 0;
virtual section_iterator section_end() const = 0;
using section_iterator_range = iterator_range<section_iterator>;
section_iterator_range sections() const {
return section_iterator_range(section_begin(), section_end());
}
/// The number of bytes used to represent an address in this object
/// file format.
virtual uint8_t getBytesInAddress() const = 0;
virtual StringRef getFileFormatName() const = 0;
virtual Triple::ArchType getArch() const = 0;
virtual SubtargetFeatures getFeatures() const = 0;
virtual void setARMSubArch(Triple &TheTriple) const { }
virtual Expected<uint64_t> getStartAddress() const {
// XXX BINARYEN
llvm_unreachable("getStartAddress");
//return errorCodeToError(object_error::parse_failed);
};
/// Create a triple from the data in this object file.
Triple makeTriple() const;
/// Maps a debug section name to a standard DWARF section name.
virtual StringRef mapDebugSectionName(StringRef Name) const { return Name; }
/// True if this is a relocatable object (.o/.obj).
virtual bool isRelocatableObject() const = 0;
/// @returns Pointer to ObjectFile subclass to handle this type of object.
/// @param ObjectPath The path to the object file. ObjectPath.isObject must
/// return true.
/// Create ObjectFile from path.
static Expected<OwningBinary<ObjectFile>>
createObjectFile(StringRef ObjectPath);
static Expected<std::unique_ptr<ObjectFile>>
createObjectFile(MemoryBufferRef Object, llvm::file_magic Type);
static Expected<std::unique_ptr<ObjectFile>>
createObjectFile(MemoryBufferRef Object) {
return createObjectFile(Object, llvm::file_magic::unknown);
}
static bool classof(const Binary *v) {
return v->isObject();
}
static Expected<std::unique_ptr<COFFObjectFile>>
createCOFFObjectFile(MemoryBufferRef Object);
static Expected<std::unique_ptr<ObjectFile>>
createXCOFFObjectFile(MemoryBufferRef Object, unsigned FileType);
static Expected<std::unique_ptr<ObjectFile>>
createELFObjectFile(MemoryBufferRef Object);
static Expected<std::unique_ptr<MachOObjectFile>>
createMachOObjectFile(MemoryBufferRef Object,
uint32_t UniversalCputype = 0,
uint32_t UniversalIndex = 0);
static Expected<std::unique_ptr<WasmObjectFile>>
createWasmObjectFile(MemoryBufferRef Object);
};
// Inline function definitions.
inline SymbolRef::SymbolRef(DataRefImpl SymbolP, const ObjectFile *Owner)
: BasicSymbolRef(SymbolP, Owner) {}
inline Expected<StringRef> SymbolRef::getName() const {
return getObject()->getSymbolName(getRawDataRefImpl());
}
inline Expected<uint64_t> SymbolRef::getAddress() const {
return getObject()->getSymbolAddress(getRawDataRefImpl());
}
inline uint64_t SymbolRef::getValue() const {
return getObject()->getSymbolValue(getRawDataRefImpl());
}
inline uint32_t SymbolRef::getAlignment() const {
return getObject()->getSymbolAlignment(getRawDataRefImpl());
}
inline uint64_t SymbolRef::getCommonSize() const {
return getObject()->getCommonSymbolSize(getRawDataRefImpl());
}
inline Expected<section_iterator> SymbolRef::getSection() const {
return getObject()->getSymbolSection(getRawDataRefImpl());
}
inline Expected<SymbolRef::Type> SymbolRef::getType() const {
return getObject()->getSymbolType(getRawDataRefImpl());
}
inline const ObjectFile *SymbolRef::getObject() const {
const SymbolicFile *O = BasicSymbolRef::getObject();
return cast<ObjectFile>(O);
}
/// SectionRef
inline SectionRef::SectionRef(DataRefImpl SectionP,
const ObjectFile *Owner)
: SectionPimpl(SectionP)
, OwningObject(Owner) {}
inline bool SectionRef::operator==(const SectionRef &Other) const {
return OwningObject == Other.OwningObject &&
SectionPimpl == Other.SectionPimpl;
}
inline bool SectionRef::operator!=(const SectionRef &Other) const {
return !(*this == Other);
}
inline bool SectionRef::operator<(const SectionRef &Other) const {
assert(OwningObject == Other.OwningObject);
return SectionPimpl < Other.SectionPimpl;
}
inline void SectionRef::moveNext() {
return OwningObject->moveSectionNext(SectionPimpl);
}
inline Expected<StringRef> SectionRef::getName() const {
return OwningObject->getSectionName(SectionPimpl);
}
inline uint64_t SectionRef::getAddress() const {
return OwningObject->getSectionAddress(SectionPimpl);
}
inline uint64_t SectionRef::getIndex() const {
return OwningObject->getSectionIndex(SectionPimpl);
}
inline uint64_t SectionRef::getSize() const {
return OwningObject->getSectionSize(SectionPimpl);
}
inline Expected<StringRef> SectionRef::getContents() const {
Expected<ArrayRef<uint8_t>> Res =
OwningObject->getSectionContents(SectionPimpl);
if (!Res)
return Res.takeError();
return StringRef(reinterpret_cast<const char *>(Res->data()), Res->size());
}
inline uint64_t SectionRef::getAlignment() const {
return OwningObject->getSectionAlignment(SectionPimpl);
}
inline bool SectionRef::isCompressed() const {
return OwningObject->isSectionCompressed(SectionPimpl);
}
inline bool SectionRef::isText() const {
return OwningObject->isSectionText(SectionPimpl);
}
inline bool SectionRef::isData() const {
return OwningObject->isSectionData(SectionPimpl);
}
inline bool SectionRef::isBSS() const {
return OwningObject->isSectionBSS(SectionPimpl);
}
inline bool SectionRef::isVirtual() const {
return OwningObject->isSectionVirtual(SectionPimpl);
}
inline bool SectionRef::isBitcode() const {
return OwningObject->isSectionBitcode(SectionPimpl);
}
inline bool SectionRef::isStripped() const {
return OwningObject->isSectionStripped(SectionPimpl);
}
inline bool SectionRef::isBerkeleyText() const {
return OwningObject->isBerkeleyText(SectionPimpl);
}
inline bool SectionRef::isBerkeleyData() const {
return OwningObject->isBerkeleyData(SectionPimpl);
}
inline relocation_iterator SectionRef::relocation_begin() const {
return OwningObject->section_rel_begin(SectionPimpl);
}
inline relocation_iterator SectionRef::relocation_end() const {
return OwningObject->section_rel_end(SectionPimpl);
}
inline Expected<section_iterator> SectionRef::getRelocatedSection() const {
return OwningObject->getRelocatedSection(SectionPimpl);
}
inline DataRefImpl SectionRef::getRawDataRefImpl() const {
return SectionPimpl;
}
inline const ObjectFile *SectionRef::getObject() const {
return OwningObject;
}
/// RelocationRef
inline RelocationRef::RelocationRef(DataRefImpl RelocationP,
const ObjectFile *Owner)
: RelocationPimpl(RelocationP)
, OwningObject(Owner) {}
inline bool RelocationRef::operator==(const RelocationRef &Other) const {
return RelocationPimpl == Other.RelocationPimpl;
}
inline void RelocationRef::moveNext() {
return OwningObject->moveRelocationNext(RelocationPimpl);
}
inline uint64_t RelocationRef::getOffset() const {
return OwningObject->getRelocationOffset(RelocationPimpl);
}
inline symbol_iterator RelocationRef::getSymbol() const {
return OwningObject->getRelocationSymbol(RelocationPimpl);
}
inline uint64_t RelocationRef::getType() const {
return OwningObject->getRelocationType(RelocationPimpl);
}
inline void RelocationRef::getTypeName(SmallVectorImpl<char> &Result) const {
return OwningObject->getRelocationTypeName(RelocationPimpl, Result);
}
inline DataRefImpl RelocationRef::getRawDataRefImpl() const {
return RelocationPimpl;
}
inline const ObjectFile *RelocationRef::getObject() const {
return OwningObject;
}
} // end namespace object
template <> struct DenseMapInfo<object::SectionRef> {
static bool isEqual(const object::SectionRef &A,
const object::SectionRef &B) {
return A == B;
}
static object::SectionRef getEmptyKey() {
return object::SectionRef({}, nullptr);
}
static object::SectionRef getTombstoneKey() {
object::DataRefImpl TS;
TS.p = (uintptr_t)-1;
return object::SectionRef(TS, nullptr);
}
static unsigned getHashValue(const object::SectionRef &Sec) {
object::DataRefImpl Raw = Sec.getRawDataRefImpl();
return hash_combine(Raw.p, Raw.d.a, Raw.d.b);
}
};
} // end namespace llvm
#endif // LLVM_OBJECT_OBJECTFILE_H
| |
optimizations.mli |
open Ast
val optimize_program : 'm program -> Shared_ast.untyped program
(** Warning/todo: no effort was yet made to ensure correct propagation of type
annotations in the typed case *)
| (* This file is part of the Catala compiler, a specification language for tax
and social benefits computation rules. Copyright (C) 2020 Inria, contributor:
Denis Merigoux <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *) |
dune |
(library
(name carton_git)
(public_name carton-git)
(modules carton_git)
(libraries logs decompress.zl lwt decompress.de bigstringaf fmt carton))
(library
(name carton_git_unix)
(public_name carton-git.unix)
(modules carton_git_unix)
(libraries
astring
result
bigstringaf
fmt
lwt
fpath
carton
carton-lwt
carton-git
unix
lwt.unix))
| |
config.mli | (* System configuration *)
val version: string
(* The current version number of the system *)
val interface_suffix: string ref
(* Suffix for interface file names *)
val exec_magic_number: string
(* Magic number for bytecode executable files *)
val cmi_magic_number: string
(* Magic number for compiled interface files *)
val cmo_magic_number: string
(* Magic number for object bytecode files *)
val cma_magic_number: string
(* Magic number for archive files *)
val cmx_magic_number: string
(* Magic number for compilation unit descriptions *)
val cmxa_magic_number: string
(* Magic number for libraries of compilation unit descriptions *)
val ast_intf_magic_number: string
(* Magic number for file holding an interface syntax tree *)
val ast_impl_magic_number: string
(* Magic number for file holding an implementation syntax tree *)
val cmxs_magic_number: string
(* Magic number for dynamically-loadable plugins *)
val cmt_magic_number: string
(* Magic number for compiled interface files *)
val max_tag: int
(* Biggest tag that can be stored in the header of a regular block. *)
val safe_string: bool
val flat_float_array: bool
(**/**)
val merlin : bool
(**/**)
| (**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *)
(* *)
(* Copyright 1996 Institut National de Recherche en Informatique et *)
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
(* the GNU Lesser General Public License version 2.1, with the *)
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
|
owl_dataset.ml |
(** Dataset: easy access to various datasets *)
open Owl_types
let remote_data_path () = "https://github.com/ryanrhymes/owl_dataset/raw/master/"
let local_data_path () : string =
let home = Sys.getenv "HOME" ^ "/.owl" in
let d = home ^ "/dataset/" in
Owl_log.info "create %s if not present" d;
(* Note: use of Sys.file_exist is racy *)
(try Unix.mkdir home 0o755 with
| Unix.Unix_error (EEXIST, _, _) -> ());
(try Unix.mkdir d 0o755 with
| Unix.Unix_error (EEXIST, _, _) -> ());
d
let download_data fname =
let fn0 = remote_data_path () ^ fname in
let fn1 = local_data_path () ^ fname in
let cmd0 = "wget " ^ fn0 ^ " -O " ^ fn1 in
let cmd1 = "gunzip " ^ fn1 in
ignore (Sys.command cmd0);
ignore (Sys.command cmd1)
let download_all () =
let l =
[ "stopwords.txt.gz"
; "enron.test.gz"
; "enron.train.gz"
; "nips.test.gz"
; "nips.train.gz"
; "mnist-test-images.gz"
; "mnist-test-labels.gz"
; "mnist-test-lblvec.gz"
; "mnist-train-images.gz"
; "mnist-train-labels.gz"
; "mnist-train-lblvec.gz"
; "cifar10_test_data.gz"
; "cifar10_test_labels.gz"
; "cifar10_test_filenames.gz"
; "cifar10_test_lblvec.gz"
; "cifar10_train1_data.gz"
; "cifar10_train1_labels.gz"
; "cifar10_train1_filenames.gz"
; "cifar10_train1_lblvec.gz"
; "cifar10_train2_data.gz"
; "cifar10_train2_labels.gz"
; "cifar10_train2_filenames.gz"
; "cifar10_train2_lblvec.gz"
; "cifar10_train3_data.gz"
; "cifar10_train3_labels.gz"
; "cifar10_train3_filenames.gz"
; "cifar10_train3_lblvec.gz"
; "cifar10_train4_data.gz"
; "cifar10_train4_labels.gz"
; "cifar10_train4_filenames.gz"
; "cifar10_train4_lblvec.gz"
; "cifar10_train5_data.gz"
; "cifar10_train5_labels.gz"
; "cifar10_train5_filenames.gz"
; "cifar10_train5_lblvec.gz"
]
in
List.iter (fun fname -> download_data fname) l
let draw_samples x y n =
let x', y', _ = Owl_dense_matrix_generic.draw_rows2 ~replacement:false x y n in
x', y'
(* load mnist train data, the return is a triplet. The first is a 60000 x 784
matrix where each row represents a 28 x 28 image. The second is label and the
third is the corresponding unravelled row vector of the label. *)
let load_mnist_train_data () =
let p = local_data_path () in
( Owl_dense_matrix.S.load (p ^ "mnist-train-images")
, Owl_dense_matrix.S.load (p ^ "mnist-train-labels")
, Owl_dense_matrix.S.load (p ^ "mnist-train-lblvec") )
let load_mnist_test_data () =
let p = local_data_path () in
( Owl_dense_matrix.S.load (p ^ "mnist-test-images")
, Owl_dense_matrix.S.load (p ^ "mnist-test-labels")
, Owl_dense_matrix.S.load (p ^ "mnist-test-lblvec") )
let print_mnist_image x =
Owl_dense_matrix_generic.reshape x [| 28; 28 |]
|> Owl_dense_matrix_generic.iter_rows (fun v ->
Owl_dense_matrix_generic.iter
(function
| 0. -> Printf.printf " "
| _ -> Printf.printf "■")
v;
print_endline "")
(* similar to load_mnist_train_data but returns [x] as [*,28,28,1] ndarray *)
let load_mnist_train_data_arr () =
let x, label, y = load_mnist_train_data () in
let m = Owl_dense_matrix.S.row_num x in
let x = Owl_dense_ndarray.S.reshape x [| m; 28; 28; 1 |] in
x, label, y
let load_mnist_test_data_arr () =
let x, label, y = load_mnist_test_data () in
let m = Owl_dense_matrix.S.row_num x in
let x = Owl_dense_ndarray.S.reshape x [| m; 28; 28; 1 |] in
x, label, y
(* load cifar train data, there are five batches in total. The loaded data is a
10000 * 3072 matrix. Each row represents a 32 x 32 image of three colour
channels, unravelled into a row vector. The labels are also returned. *)
let load_cifar_train_data batch =
let p = local_data_path () in
( Owl_dense_ndarray.S.load (p ^ "cifar10_train" ^ string_of_int batch ^ "_data")
, Owl_dense_matrix.S.load (p ^ "cifar10_train" ^ string_of_int batch ^ "_labels")
, Owl_dense_matrix.S.load (p ^ "cifar10_train" ^ string_of_int batch ^ "_lblvec") )
let load_cifar_test_data () =
let p = local_data_path () in
( Owl_dense_ndarray.S.load (p ^ "cifar10_test_data")
, Owl_dense_matrix.S.load (p ^ "cifar10_test_labels")
, Owl_dense_matrix.S.load (p ^ "cifar10_test_lblvec") )
let draw_samples_cifar x y n =
let col_num = (Owl_dense_ndarray_generic.shape x).(0) in
let a = Array.init col_num (fun i -> i) in
let a = Owl_stats.choose a n |> Array.to_list in
( Owl_dense_ndarray.S.get_fancy [ L a; R []; R []; R [] ] x
, Owl_dense_matrix.S.get_fancy [ L a; R [] ] y )
(* load text data and stopwords *)
let load_stopwords () =
let p = local_data_path () in
Owl_nlp_utils.load_stopwords (p ^ "stopwords.txt")
let load_nips_train_data stopwords =
let p = local_data_path () in
Owl_nlp_utils.load_from_file ~stopwords (p ^ "nips.train")
| (*
* OWL - OCaml Scientific and Engineering Computing
* Copyright (c) 2016-2020 Liang Wang <[email protected]>
*) |
dune |
(library
(name hexstring)
(public_name hexstring)
(libraries)
(inline_tests)
(preprocess
(pps ppx_inline_test)))
| |
float_block_1.ml | (* TEST *)
(* Effect are not named to allow different evaluation orders (flambda
and clambda differ on this point).
*)
let test =
let rec x = print_endline "effect"; [| 1; 2; 3 |]
and y = print_endline "effect"; [| 1.; 2.; 3. |]
in
assert (x = [| 1; 2; 3 |]);
assert (y = [| 1.; 2.; 3. |]);
()
| (* TEST *)
|
all.c |
#include "bool_mat.h"
int
bool_mat_all(const bool_mat_t mat)
{
slong i, j;
if (bool_mat_is_empty(mat))
return 1;
for (i = 0; i < bool_mat_nrows(mat); i++)
for (j = 0; j < bool_mat_ncols(mat); j++)
if (!bool_mat_get_entry(mat, i, j))
return 0;
return 1;
}
| /*
Copyright (C) 2016 Arb authors
This file is part of Arb.
Arb is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <http://www.gnu.org/licenses/>.
*/ |
raw_context_intf.ml | (** All context manipulation functions. This signature is included
as-is for direct context accesses, and used in {!Storage_functors}
to provide restricted views to the context. *)
(** The tree depth of a fold. See the [fold] function for more information. *)
type depth = [`Eq of int | `Le of int | `Lt of int | `Ge of int | `Gt of int]
(** The type for context configuration. If two trees or stores have the
same configuration, they will generate the same context hash. *)
type config = Context.config
module type VIEW = sig
(* Same as [Environment_context.VIEW] but with extra getters and
setters functions. *)
(** The type for context views. *)
type t
(** The type for context keys. *)
type key = string list
(** The type for context values. *)
type value = bytes
(** The type for context trees. *)
type tree
(** {2 Getters} *)
(** [mem t k] is an Lwt promise that resolves to [true] iff [k] is bound
to a value in [t]. *)
val mem : t -> key -> bool Lwt.t
(** [mem_tree t k] is like {!mem} but for trees. *)
val mem_tree : t -> key -> bool Lwt.t
(** [get t k] is an Lwt promise that resolves to [Ok v] if [k] is
bound to the value [v] in [t] and {!Storage_Error Missing_key}
otherwise. *)
val get : t -> key -> value tzresult Lwt.t
(** [get_tree] is like {!get} but for trees. *)
val get_tree : t -> key -> tree tzresult Lwt.t
(** [find t k] is an Lwt promise that resolves to [Some v] if [k] is
bound to the value [v] in [t] and [None] otherwise. *)
val find : t -> key -> value option Lwt.t
(** [find_tree t k] is like {!find} but for trees. *)
val find_tree : t -> key -> tree option Lwt.t
(** [list t key] is the list of files and sub-nodes stored under [k] in [t].
The result order is not specified but is stable.
[offset] and [length] are used for pagination. *)
val list :
t -> ?offset:int -> ?length:int -> key -> (string * tree) list Lwt.t
(** {2 Setters} *)
(** [init t k v] is an Lwt promise that resolves to [Ok c] if:
- [k] is unbound in [t];
- [k] is bound to [v] in [c];
- and [c] is similar to [t] otherwise.
It is {!Storage_error Existing_key} if [k] is already bound in [t]. *)
val init : t -> key -> value -> t tzresult Lwt.t
(** [init_tree] is like {!init} but for trees. *)
val init_tree : t -> key -> tree -> t tzresult Lwt.t
(** [update t k v] is an Lwt promise that resolves to [Ok c] if:
- [k] is bound in [t];
- [k] is bound to [v] in [c];
- and [c] is similar to [t] otherwise.
It is {!Storage_error Missing_key} if [k] is not already bound in [t]. *)
val update : t -> key -> value -> t tzresult Lwt.t
(** [update_tree] is like {!update} but for trees. *)
val update_tree : t -> key -> tree -> t tzresult Lwt.t
(** [add t k v] is an Lwt promise that resolves to [c] such that:
- [k] is bound to [v] in [c];
- and [c] is similar to [t] otherwise.
If [k] was already bound in [t] to a value that is physically equal
to [v], the result of the function is a promise that resolves to
[t]. Otherwise, the previous binding of [k] in [t] disappears. *)
val add : t -> key -> value -> t Lwt.t
(** [add_tree] is like {!add} but for trees. *)
val add_tree : t -> key -> tree -> t Lwt.t
(** [remove t k v] is an Lwt promise that resolves to [c] such that:
- [k] is unbound in [c];
- and [c] is similar to [t] otherwise. *)
val remove : t -> key -> t Lwt.t
(** [remove_existing t k v] is an Lwt promise that resolves to [Ok c] if:
- [k] is bound in [t] to a value;
- [k] is unbound in [c];
- and [c] is similar to [t] otherwise.*)
val remove_existing : t -> key -> t tzresult Lwt.t
(** [remove_existing_tree t k v] is an Lwt promise that reolves to [Ok c] if:
- [k] is bound in [t] to a tree;
- [k] is unbound in [c];
- and [c] is similar to [t] otherwise.*)
val remove_existing_tree : t -> key -> t tzresult Lwt.t
(** [add_or_remove t k v] is:
- [add t k x] if [v] is [Some x];
- [remove t k] otherwise. *)
val add_or_remove : t -> key -> value option -> t Lwt.t
(** [add_or_remove_tree t k v] is:
- [add_tree t k x] if [v] is [Some x];
- [remove t k] otherwise. *)
val add_or_remove_tree : t -> key -> tree option -> t Lwt.t
(** {2 Folds} *)
(** [fold ?depth t root ~order ~init ~f] recursively folds over the trees
and values of [t]. The [f] callbacks are called with a key relative
to [root]. [f] is never called with an empty key for values; i.e.,
folding over a value is a no-op.
The depth is 0-indexed. If [depth] is set (by default it is not), then [f]
is only called when the conditions described by the parameter is true:
- [Eq d] folds over nodes and values of depth exactly [d].
- [Lt d] folds over nodes and values of depth strictly less than [d].
- [Le d] folds over nodes and values of depth less than or equal to [d].
- [Gt d] folds over nodes and values of depth strictly more than [d].
- [Ge d] folds over nodes and values of depth more than or equal to [d].
If [order] is [`Sorted] (the default), the elements are traversed in
lexicographic order of their keys. For large nodes, it is memory-consuming,
use [`Undefined] for a more memory efficient [fold]. *)
val fold :
?depth:depth ->
t ->
key ->
order:[`Sorted | `Undefined] ->
init:'a ->
f:(key -> tree -> 'a -> 'a Lwt.t) ->
'a Lwt.t
(** {2 Hash configurations} *)
(** [config t] is [t]'s hash configuration. *)
val config : t -> config
(** [length t key] is an Lwt promise that resolves to the number of files and
sub-nodes stored under [k] in [t].
It is equivalent to [list t k >|= List.length] but has a constant-time
complexity.
Most of the time, this function does not perform any I/O as the length is
cached in the tree. It may perform one read to load the root node of the
tree in case it has not been loaded already. The initial constant is the
same between [list] and [length]. They both perform the same kind of I/O
reads. While [list] usually performs a linear number of reads, [length]
does at most one. *)
val length : t -> key -> int Lwt.t
end
module Kind = struct
type t = [`Value | `Tree]
end
module type TREE = sig
(** [Tree] provides immutable, in-memory partial mirror of the
context, with lazy reads and delayed writes. The trees are Merkle
trees that carry the same hash as the part of the context they
mirror.
Trees are immutable and non-persistent (they disappear if the
host crash), held in memory for efficiency, where reads are done
lazily and writes are done only when needed, e.g. on
[Context.commit]. If a key is modified twice, only the last
value will be written to disk on commit. *)
(** The type for context views. *)
type t
(** The type for context trees. *)
type tree
include VIEW with type t := tree and type tree := tree
(** [empty _] is the empty tree. *)
val empty : t -> tree
(** [is_empty t] is true iff [t] is [empty _]. *)
val is_empty : tree -> bool
(** [kind t] is [t]'s kind. It's either a tree node or a leaf
value. *)
val kind : tree -> Kind.t
(** [to_value t] is an Lwt promise that resolves to [Some v] if [t]
is a leaf tree and [None] otherwise. It is equivalent to [find t
[]]. *)
val to_value : tree -> value option Lwt.t
(** [hash t] is [t]'s Merkle hash. *)
val hash : tree -> Context_hash.t
(** [equal x y] is true iff [x] and [y] have the same Merkle hash. *)
val equal : tree -> tree -> bool
(** {2 Caches} *)
(** [clear ?depth t] clears all caches in the tree [t] for subtrees with a
depth higher than [depth]. If [depth] is not set, all of the subtrees are
cleared. *)
val clear : ?depth:int -> tree -> unit
end
module type PROOF = sig
(** Proofs are compact representations of trees which can be shared
between peers.
This is expected to be used as follows:
- A first peer runs a function [f] over a tree [t]. While performing
this computation, it records: the hash of [t] (called [before]
below), the hash of [f t] (called [after] below) and a subset of [t]
which is needed to replay [f] without any access to the first peer's
storage. Once done, all these informations are packed into a proof of
type [t] that is sent to the second peer.
- The second peer generates an initial tree [t'] from [p] and computes
[f t']. Once done, it compares [t']'s hash and [f t']'s hash to [before]
and [after]. If they match, they know that the result state [f t'] is a
valid context state, without having to have access to the full storage
of the first peer. *)
(** The type for file and directory names. *)
type step = string
(** The type for values. *)
type value = bytes
(** The type of indices for inodes' children. *)
type index = int
(** The type for hashes. *)
type hash = Context_hash.t
(** The type for (internal) inode proofs.
These proofs encode large directories into a tree-like structure. This
reflects irmin-pack's way of representing nodes and computing
hashes (tree-like representations for nodes scales better than flat
representations).
[length] is the total number of entries in the children of the inode.
It's the size of the "flattened" version of that inode. [length] can be
used to prove the correctness of operations such [Tree.length] and
[Tree.list ~offset ~length] in an efficient way.
In proofs with [version.is_binary = false], an inode at depth 0 has a
[length] of at least [257]. Below that threshold a [Node] tag is used in
[tree]. That threshold is [3] when [version.is_binary = true].
[proofs] contains the children proofs. It is a sparse list of ['a] values.
These values are associated to their index in the list, and the list is
kept sorted in increasing order of indices. ['a] can be a concrete proof
or a hash of that proof.
In proofs with [version.is_binary = true], inodes have at most 2 proofs
(indexed 0 or 1).
In proofs with [version.is_binary = false], inodes have at most 32 proofs
(indexed from 0 to 31). *)
type 'a inode = {length : int; proofs : (index * 'a) list}
(** The type for inode extenders.
An extender is a compact representation of a sequence of [inode] which
contain only one child. As for inodes, The ['a] parameter can be a
concrete proof or a hash of that proof.
If an inode proof contains singleton children [i_0, ..., i_n] such as:
[{length=l; proofs = [ (i_0, {proofs = ... { proofs = [ (i_n, p) ] }})]}],
then it is compressed into the inode extender
[{length=l; segment = [i_0;..;i_n]; proof=p}] sharing the same lenght [l]
and final proof [p]. *)
type 'a inode_extender = {length : int; segment : index list; proof : 'a}
(** The type for compressed and partial Merkle tree proofs.
Tree proofs do not provide any guarantee with the ordering of
computations. For instance, if two effects commute, they won't be
distinguishable by this kind of proofs.
[Value v] proves that a value [v] exists in the store.
[Blinded_value h] proves a value with hash [h] exists in the store.
[Node ls] proves that a a "flat" node containing the list of files [ls]
exists in the store.
In proofs with [version.is_binary = true], the length of [ls] is at most
2.
In proofs with [version.is_binary = false], the length of [ls] is at most
256.
[Blinded_node h] proves that a node with hash [h] exists in the store.
[Inode i] proves that an inode [i] exists in the store.
[Extender e] proves that an inode extender [e] exist in the store. *)
type tree =
| Value of value
| Blinded_value of hash
| Node of (step * tree) list
| Blinded_node of hash
| Inode of inode_tree inode
| Extender of inode_tree inode_extender
(** The type for inode trees. It is a subset of [tree], limited to nodes.
[Blinded_inode h] proves that an inode with hash [h] exists in the store.
[Inode_values ls] is similar to trees' [Node].
[Inode_tree i] is similar to tree's [Inode].
[Inode_extender e] is similar to trees' [Extender]. *)
and inode_tree =
| Blinded_inode of hash
| Inode_values of (step * tree) list
| Inode_tree of inode_tree inode
| Inode_extender of inode_tree inode_extender
(** The type for kinded hashes. *)
type kinded_hash = [`Value of hash | `Node of hash]
module Stream : sig
(** Stream proofs represent an explicit traversal of a Merle tree proof.
Every element (a node, a value, or a shallow pointer) met is first
"compressed" by shallowing its children and then recorded in the proof.
As stream proofs directly encode the recursive construction of the
Merkle root hash is slightly simpler to implement: verifier simply
need to hash the compressed elements lazily, without any memory or
choice.
Moreover, the minimality of stream proofs is trivial to check.
Once the computation has consumed the compressed elements required,
it is sufficient to check that no more compressed elements remain
in the proof.
However, as the compressed elements contain all the hashes of their
shallow children, the size of stream proofs is larger
(at least double in size in practice) than tree proofs, which only
contains the hash for intermediate shallow pointers. *)
(** The type for elements of stream proofs.
[Value v] is a proof that the next element read in the store is the
value [v].
[Node n] is a proof that the next element read in the store is the
node [n].
[Inode i] is a proof that the next element read in the store is the
inode [i].
[Inode_extender e] is a proof that the next element read in the store
is the node extender [e]. *)
type elt =
| Value of value
| Node of (step * kinded_hash) list
| Inode of hash inode
| Inode_extender of hash inode_extender
(** The type for stream proofs.
The sequence [e_1 ... e_n] proves that the [e_1], ..., [e_n] are
read in the store in sequence. *)
type t = elt Seq.t
end
type stream = Stream.t
(** The type for proofs of kind ['a].
A proof [p] proves that the state advanced from [before p] to
[after p]. [state p]'s hash is [before p], and [state p] contains
the minimal information for the computation to reach [after p].
[version p] is the proof version, it packs several informations.
[is_stream] discriminates between the stream proofs and the tree proofs.
[is_binary] discriminates between proofs emitted from
[Tezos_context(_memory).Context_binary] and
[Tezos_context(_memory).Context].
It will also help discriminate between the data encoding techniques used.
The version is meant to be decoded and encoded using the
{!Tezos_context_helpers.Context.decode_proof_version} and
{!Tezos_context_helpers.Context.encode_proof_version}. *)
type 'a t = {
version : int;
before : kinded_hash;
after : kinded_hash;
state : 'a;
}
end
module type T = sig
(** The type for root contexts. *)
type root
include VIEW
module Tree :
TREE
with type t := t
and type key := key
and type value := value
and type tree := tree
module Proof : PROOF
(** [verify p f] runs [f] in checking mode. [f] is a function that takes a
tree as input and returns a new version of the tree and a result. [p] is a
proof, that is a minimal representation of the tree that contains what [f]
should be expecting.
Therefore, contrary to trees found in a storage, the contents of the trees
passed to [f] may not be available. For this reason, looking up a value at
some [path] can now produce three distinct outcomes:
- A value [v] is present in the proof [p] and returned : [find tree path]
is a promise returning [Some v];
- [path] is known to have no value in [tree] : [find tree path] is a
promise returning [None]; and
- [path] is known to have a value in [tree] but [p] does not provide it
because [f] should not need it: [verify] returns an error classifying
[path] as an invalid path (see below).
The same semantics apply to all operations on the tree [t] passed to [f]
and on all operations on the trees built from [f].
The generated tree is the tree after [f] has completed. That tree is
disconnected from any storage (i.e. [index]). It is possible to run
operations on it as long as they don't require loading shallowed subtrees.
The result is [Error (`Msg _)] if the proof is rejected:
- For tree proofs: when [p.before] is different from the hash of
[p.state];
- For tree and stream proofs: when [p.after] is different from the hash
of [f p.state];
- For tree proofs: when [f p.state] tries to access invalid paths in
[p.state];
- For stream proofs: when the proof is not consumed in the exact same
order it was produced;
- For stream proofs: when the proof is too short or not empty once [f] is
done.
@raise Failure if the proof version is invalid or incompatible with the
verifier. *)
type ('proof, 'result) verifier :=
'proof ->
(tree -> (tree * 'result) Lwt.t) ->
( tree * 'result,
[ `Proof_mismatch of string
| `Stream_too_long of string
| `Stream_too_short of string ] )
result
Lwt.t
(** The type for tree proofs.
Guarantee that the given computation performs exactly the same state
operations as the generating computation, *in some order*. *)
type tree_proof := Proof.tree Proof.t
(** [verify_tree_proof] is the verifier of tree proofs. *)
val verify_tree_proof : (tree_proof, 'a) verifier
(** The type for stream proofs.
Guarantee that the given computation performs exactly the same state
operations as the generating computation, in the exact same order. *)
type stream_proof := Proof.stream Proof.t
(** [verify_stream] is the verifier of stream proofs. *)
val verify_stream_proof : (stream_proof, 'a) verifier
(** The equality function for context configurations. If two context have the
same configuration, they will generate the same context hashes. *)
val equal_config : config -> config -> bool
(** Internally used in {!Storage_functors} to escape from a view. *)
val project : t -> root
(** Internally used in {!Storage_functors} to retrieve a full key
from partial key relative a view. *)
val absolute_key : t -> key -> key
(** Raised if block gas quota is exhausted during gas
consumption. *)
type error += Block_quota_exceeded
(** Raised if operation gas quota is exhausted during gas
consumption. *)
type error += Operation_quota_exceeded
(** Internally used in {!Storage_functors} to consume gas from
within a view. May raise {!Block_quota_exceeded} or
{!Operation_quota_exceeded}. *)
val consume_gas : t -> Gas_limit_repr.cost -> t tzresult
(** Check if consume_gas will fail *)
val check_enough_gas : t -> Gas_limit_repr.cost -> unit tzresult
val description : t Storage_description.t
(** The type for local context accesses instead from the root. In order for
the carbonated storage functions to consume the gas, this has gas
infomation *)
type local_context
(**
[with_local_context ctxt key f] runs function [f] over the local
context at path [key] of the global [ctxt]. Using the local context [f]
can perform faster context accesses under [key].
*)
val with_local_context :
t ->
key ->
(local_context -> (local_context * 'a) tzresult Lwt.t) ->
(t * 'a) tzresult Lwt.t
(** [Local_context] provides functions for local access from a specific
directory. *)
module Local_context : sig
include
VIEW
with type t = local_context
and type tree := tree
and type key := key
and type value := value
(** Internally used in {!Storage_functors} to consume gas from
within a view. May raise {!Block_quota_exceeded} or
{!Operation_quota_exceeded}. *)
val consume_gas :
local_context -> Gas_limit_repr.cost -> local_context tzresult
(** Internally used in {!Storage_functors} to retrieve the full key of a
partial key relative to the [local_context]. *)
val absolute_key : local_context -> key -> key
end
end
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* Copyright (c) 2018-2021 Tarides <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
hello.ml | (** {0 Bogue tutorial — Hello world.} *)
(**
In this tutorial we will learn how to open a graphical window displaying a
short text, like "Hello world". We then take advantage of this to familiarise
with basic Bogue concepts.
{1 Hello world}
Let's start right ahead with the "minimal code" mentionned in Bogue's
{{:http://sanette.github.io/bogue/Principles.html}documentation}:
*)
(* +CODE:begin *)
open Bogue
let () =
Widget.label "Hello world"
|> Layout.resident
|> Bogue.of_layout
|> Bogue.run
(* +CODE:end *)
(** We can copy this code in an OCaml toplevel and execute it; see
{{!page-index}here} for general instructions.
A small window should pop up like this:
+IMAGE:"hello.png"
So, how does this work? Let go through this again line by line.
First, instead of using the convenient [|>] operator, let's give names to
the various steps; we have the following equivalent code:
*)
(* +CODE:begin *)
let () =
let widget = Widget.label "Hello world" in
let layout = Layout.resident widget in
let board = Bogue.of_layout layout in
Bogue.run board
(* +CODE:end *)
(**
Bogue uses the "housing" metaphor: a GUI is a big house with inhabitants living
in various rooms, (and potentially communicating with each other).
The inhabitants are the "widgets". The rooms are the "layouts". There are several kinds of widgets; here we create only one widget, of [label] type:
{[
let widget = Widget.label "Hello world" in
]}
and we install it in a layout, as in single resident:
{[
let layout = Layout.resident widget in
]}
Finally, this layout is the only "room" in our house, so we use it to create our "board" (which is our complete GUI):
{[
let board = Bogue.of_layout layout in
]}
This board can be seen as our application, we run it using:
{[
Bogue.run board
]}
Simple, isn't it?
{1 More space}
Well, of course there is more to it. For instance, you may find that the text
label is a bit tight and needs more space around it. (In other words, the
resident needs a larger room ;) )
So let's have a look at the documentation for the function
{{:https://sanette.github.io/bogue/Bogue.Layout.html#VALresident}Layout.resident}:
{[
val resident :
?name:string -> ?x:int -> ?y:int -> ?w:int -> ?h:int ->
?background:background ->
?draggable:bool ->
?canvas:Draw.canvas ->
?keyboard_focus:bool -> Widget.t -> t
]}
We spot the optional parameters [?w] and [?h] which should set the desired
widht and height of our layout. Let's try:
*)
(* +CODE:begin *)
let () =
Widget.label "Hello world"
|> Layout.resident ~w:300 ~h:150
|> Bogue.of_layout
|> Bogue.run
(* +CODE:end *)
(**
+IMAGE:"hello-wide.png"
{1 Several widgets in a layout}
Great, but the text feels alone... Suppose we want to display an image below
our label.
Can we fit several residents in a room? Well, not really. Strictly speaking,
a room can contain only one resident (widget). But, the trick is that a
layout can in fact contain several rooms. Thus, an element of type [Layout.t]
can either be:
- a true "{b room}" (containing a single resident), or
- a "{b house}" containing several rooms.
+SIDE:begin
{b Side-note:} we have here the usual construction for a {e tree} data
structure: each node is either terminal (and called a leaf, which for us are
widgets), or a vertex (for us, a layout), pointing to a list of sub-nodes.
To summarize, in Bogue, the complete GUI is simply a tree of layouts, and
the leaves contain a widget.
The trunk of the tree (our main house, if you wish), will correspond to
the layout associated with the window of the GUI. In Bogue we often call this
special layout the "top layout", or "top house". (Yes, this may sound weird:
our tree grows top-down...)
+SIDE:end
So, we want to display an image below the label. Our label is a widget:
{[
let hello = Widget.label "Hello world"
]}
An image is also a widget:
{[
let image = Widget.image "bogue-icon.png"
]}
Now, to put one on top of the other, we use the function [Layout.tower_of_w]
(short for "tower of widget") which constructs a "tower":
*)
(* +CODE:begin *)
let () =
let hello = Widget.label "Hello world" in
let image = Widget.image "bogue-icon.png" in
let layout = Layout.tower_of_w [hello; image] in
let board = Bogue.of_layout layout in
Bogue.run board
(* +CODE:end *)
(**
This opens a window like this:
+IMAGE:"hello-image.png"
What exactly does this function [Layout.tower_of_w]? It takes a list of
widgets, and for each one, installs it in a room, as a resident. Then it
constructs a new layout by piling up the rooms vertically.
The doc for
{{:http://sanette.github.io/bogue/Bogue.Layout.html#VALtower_of_w}[tower_of_w]}
shows interesting options. For instance, to center everything horizontally,
use [~align:Draw.Center]:
+IMAGE:"hello-image-center.png"
{1 Exercise: vertical text}
What about applying what we've just learned to write "Hello world" {e vertically}?
{b Solution:} Let's use [Layout.tower_of_w] to build a "tower" of letters.
*)
(* +CODE:begin *)
let vertical text =
Array.init (String.length text) (String.get text)
|> Array.to_list
|> List.map (String.make 1)
|> List.map Widget.label
|> Layout.tower_of_w
let () =
vertical "Hello world"
|> Bogue.of_layout
|> Bogue.run
(* +CODE:end *)
(**
+IMAGE:"hello-vertical.png"
Et voila !
We now know enough Bogue to play with layouts full of text and image. But, of
course, a crucial part of a GUI is missing: user interaction. This will be
the goal of the "counter" tutorial.
*)
| (** {0 Bogue tutorial — Hello world.} *)
|
SuiteSparseQRSupport.h |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2012 Desire Nuentsa <[email protected]>
// Copyright (C) 2014 Gael Guennebaud <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_SUITESPARSEQRSUPPORT_H
#define EIGEN_SUITESPARSEQRSUPPORT_H
namespace Eigen {
template<typename MatrixType> class SPQR;
template<typename SPQRType> struct SPQRMatrixQReturnType;
template<typename SPQRType> struct SPQRMatrixQTransposeReturnType;
template <typename SPQRType, typename Derived> struct SPQR_QProduct;
namespace internal {
template <typename SPQRType> struct traits<SPQRMatrixQReturnType<SPQRType> >
{
typedef typename SPQRType::MatrixType ReturnType;
};
template <typename SPQRType> struct traits<SPQRMatrixQTransposeReturnType<SPQRType> >
{
typedef typename SPQRType::MatrixType ReturnType;
};
template <typename SPQRType, typename Derived> struct traits<SPQR_QProduct<SPQRType, Derived> >
{
typedef typename Derived::PlainObject ReturnType;
};
} // End namespace internal
/**
* \ingroup SPQRSupport_Module
* \class SPQR
* \brief Sparse QR factorization based on SuiteSparseQR library
*
* This class is used to perform a multithreaded and multifrontal rank-revealing QR decomposition
* of sparse matrices. The result is then used to solve linear leasts_square systems.
* Clearly, a QR factorization is returned such that A*P = Q*R where :
*
* P is the column permutation. Use colsPermutation() to get it.
*
* Q is the orthogonal matrix represented as Householder reflectors.
* Use matrixQ() to get an expression and matrixQ().transpose() to get the transpose.
* You can then apply it to a vector.
*
* R is the sparse triangular factor. Use matrixQR() to get it as SparseMatrix.
* NOTE : The Index type of R is always SuiteSparse_long. You can get it with SPQR::Index
*
* \tparam _MatrixType The type of the sparse matrix A, must be a column-major SparseMatrix<>
*
* \implsparsesolverconcept
*
*
*/
template<typename _MatrixType>
class SPQR : public SparseSolverBase<SPQR<_MatrixType> >
{
protected:
typedef SparseSolverBase<SPQR<_MatrixType> > Base;
using Base::m_isInitialized;
public:
typedef typename _MatrixType::Scalar Scalar;
typedef typename _MatrixType::RealScalar RealScalar;
typedef SuiteSparse_long StorageIndex ;
typedef SparseMatrix<Scalar, ColMajor, StorageIndex> MatrixType;
typedef Map<PermutationMatrix<Dynamic, Dynamic, StorageIndex> > PermutationType;
enum {
ColsAtCompileTime = Dynamic,
MaxColsAtCompileTime = Dynamic
};
public:
SPQR()
: m_ordering(SPQR_ORDERING_DEFAULT), m_allow_tol(SPQR_DEFAULT_TOL), m_tolerance (NumTraits<Scalar>::epsilon()), m_useDefaultThreshold(true)
{
cholmod_l_start(&m_cc);
}
explicit SPQR(const _MatrixType& matrix)
: m_ordering(SPQR_ORDERING_DEFAULT), m_allow_tol(SPQR_DEFAULT_TOL), m_tolerance (NumTraits<Scalar>::epsilon()), m_useDefaultThreshold(true)
{
cholmod_l_start(&m_cc);
compute(matrix);
}
~SPQR()
{
SPQR_free();
cholmod_l_finish(&m_cc);
}
void SPQR_free()
{
cholmod_l_free_sparse(&m_H, &m_cc);
cholmod_l_free_sparse(&m_cR, &m_cc);
cholmod_l_free_dense(&m_HTau, &m_cc);
std::free(m_E);
std::free(m_HPinv);
}
void compute(const _MatrixType& matrix)
{
if(m_isInitialized) SPQR_free();
MatrixType mat(matrix);
/* Compute the default threshold as in MatLab, see:
* Tim Davis, "Algorithm 915, SuiteSparseQR: Multifrontal Multithreaded Rank-Revealing
* Sparse QR Factorization, ACM Trans. on Math. Soft. 38(1), 2011, Page 8:3
*/
RealScalar pivotThreshold = m_tolerance;
if(m_useDefaultThreshold)
{
RealScalar max2Norm = 0.0;
for (int j = 0; j < mat.cols(); j++) max2Norm = numext::maxi(max2Norm, mat.col(j).norm());
if(max2Norm==RealScalar(0))
max2Norm = RealScalar(1);
pivotThreshold = 20 * (mat.rows() + mat.cols()) * max2Norm * NumTraits<RealScalar>::epsilon();
}
cholmod_sparse A;
A = viewAsCholmod(mat);
m_rows = matrix.rows();
Index col = matrix.cols();
m_rank = SuiteSparseQR<Scalar>(m_ordering, pivotThreshold, col, &A,
&m_cR, &m_E, &m_H, &m_HPinv, &m_HTau, &m_cc);
if (!m_cR)
{
m_info = NumericalIssue;
m_isInitialized = false;
return;
}
m_info = Success;
m_isInitialized = true;
m_isRUpToDate = false;
}
/**
* Get the number of rows of the input matrix and the Q matrix
*/
inline Index rows() const {return m_rows; }
/**
* Get the number of columns of the input matrix.
*/
inline Index cols() const { return m_cR->ncol; }
template<typename Rhs, typename Dest>
void _solve_impl(const MatrixBase<Rhs> &b, MatrixBase<Dest> &dest) const
{
eigen_assert(m_isInitialized && " The QR factorization should be computed first, call compute()");
eigen_assert(b.cols()==1 && "This method is for vectors only");
//Compute Q^T * b
typename Dest::PlainObject y, y2;
y = matrixQ().transpose() * b;
// Solves with the triangular matrix R
Index rk = this->rank();
y2 = y;
y.resize((std::max)(cols(),Index(y.rows())),y.cols());
y.topRows(rk) = this->matrixR().topLeftCorner(rk, rk).template triangularView<Upper>().solve(y2.topRows(rk));
// Apply the column permutation
// colsPermutation() performs a copy of the permutation,
// so let's apply it manually:
for(Index i = 0; i < rk; ++i) dest.row(m_E[i]) = y.row(i);
for(Index i = rk; i < cols(); ++i) dest.row(m_E[i]).setZero();
// y.bottomRows(y.rows()-rk).setZero();
// dest = colsPermutation() * y.topRows(cols());
m_info = Success;
}
/** \returns the sparse triangular factor R. It is a sparse matrix
*/
const MatrixType matrixR() const
{
eigen_assert(m_isInitialized && " The QR factorization should be computed first, call compute()");
if(!m_isRUpToDate) {
m_R = viewAsEigen<Scalar,ColMajor, typename MatrixType::StorageIndex>(*m_cR);
m_isRUpToDate = true;
}
return m_R;
}
/// Get an expression of the matrix Q
SPQRMatrixQReturnType<SPQR> matrixQ() const
{
return SPQRMatrixQReturnType<SPQR>(*this);
}
/// Get the permutation that was applied to columns of A
PermutationType colsPermutation() const
{
eigen_assert(m_isInitialized && "Decomposition is not initialized.");
return PermutationType(m_E, m_cR->ncol);
}
/**
* Gets the rank of the matrix.
* It should be equal to matrixQR().cols if the matrix is full-rank
*/
Index rank() const
{
eigen_assert(m_isInitialized && "Decomposition is not initialized.");
return m_cc.SPQR_istat[4];
}
/// Set the fill-reducing ordering method to be used
void setSPQROrdering(int ord) { m_ordering = ord;}
/// Set the tolerance tol to treat columns with 2-norm < =tol as zero
void setPivotThreshold(const RealScalar& tol)
{
m_useDefaultThreshold = false;
m_tolerance = tol;
}
/** \returns a pointer to the SPQR workspace */
cholmod_common *cholmodCommon() const { return &m_cc; }
/** \brief Reports whether previous computation was successful.
*
* \returns \c Success if computation was succesful,
* \c NumericalIssue if the sparse QR can not be computed
*/
ComputationInfo info() const
{
eigen_assert(m_isInitialized && "Decomposition is not initialized.");
return m_info;
}
protected:
bool m_analysisIsOk;
bool m_factorizationIsOk;
mutable bool m_isRUpToDate;
mutable ComputationInfo m_info;
int m_ordering; // Ordering method to use, see SPQR's manual
int m_allow_tol; // Allow to use some tolerance during numerical factorization.
RealScalar m_tolerance; // treat columns with 2-norm below this tolerance as zero
mutable cholmod_sparse *m_cR; // The sparse R factor in cholmod format
mutable MatrixType m_R; // The sparse matrix R in Eigen format
mutable StorageIndex *m_E; // The permutation applied to columns
mutable cholmod_sparse *m_H; //The householder vectors
mutable StorageIndex *m_HPinv; // The row permutation of H
mutable cholmod_dense *m_HTau; // The Householder coefficients
mutable Index m_rank; // The rank of the matrix
mutable cholmod_common m_cc; // Workspace and parameters
bool m_useDefaultThreshold; // Use default threshold
Index m_rows;
template<typename ,typename > friend struct SPQR_QProduct;
};
template <typename SPQRType, typename Derived>
struct SPQR_QProduct : ReturnByValue<SPQR_QProduct<SPQRType,Derived> >
{
typedef typename SPQRType::Scalar Scalar;
typedef typename SPQRType::StorageIndex StorageIndex;
//Define the constructor to get reference to argument types
SPQR_QProduct(const SPQRType& spqr, const Derived& other, bool transpose) : m_spqr(spqr),m_other(other),m_transpose(transpose) {}
inline Index rows() const { return m_transpose ? m_spqr.rows() : m_spqr.cols(); }
inline Index cols() const { return m_other.cols(); }
// Assign to a vector
template<typename ResType>
void evalTo(ResType& res) const
{
cholmod_dense y_cd;
cholmod_dense *x_cd;
int method = m_transpose ? SPQR_QTX : SPQR_QX;
cholmod_common *cc = m_spqr.cholmodCommon();
y_cd = viewAsCholmod(m_other.const_cast_derived());
x_cd = SuiteSparseQR_qmult<Scalar>(method, m_spqr.m_H, m_spqr.m_HTau, m_spqr.m_HPinv, &y_cd, cc);
res = Matrix<Scalar,ResType::RowsAtCompileTime,ResType::ColsAtCompileTime>::Map(reinterpret_cast<Scalar*>(x_cd->x), x_cd->nrow, x_cd->ncol);
cholmod_l_free_dense(&x_cd, cc);
}
const SPQRType& m_spqr;
const Derived& m_other;
bool m_transpose;
};
template<typename SPQRType>
struct SPQRMatrixQReturnType{
SPQRMatrixQReturnType(const SPQRType& spqr) : m_spqr(spqr) {}
template<typename Derived>
SPQR_QProduct<SPQRType, Derived> operator*(const MatrixBase<Derived>& other)
{
return SPQR_QProduct<SPQRType,Derived>(m_spqr,other.derived(),false);
}
SPQRMatrixQTransposeReturnType<SPQRType> adjoint() const
{
return SPQRMatrixQTransposeReturnType<SPQRType>(m_spqr);
}
// To use for operations with the transpose of Q
SPQRMatrixQTransposeReturnType<SPQRType> transpose() const
{
return SPQRMatrixQTransposeReturnType<SPQRType>(m_spqr);
}
const SPQRType& m_spqr;
};
template<typename SPQRType>
struct SPQRMatrixQTransposeReturnType{
SPQRMatrixQTransposeReturnType(const SPQRType& spqr) : m_spqr(spqr) {}
template<typename Derived>
SPQR_QProduct<SPQRType,Derived> operator*(const MatrixBase<Derived>& other)
{
return SPQR_QProduct<SPQRType,Derived>(m_spqr,other.derived(), true);
}
const SPQRType& m_spqr;
};
}// End namespace Eigen
#endif
| |
compose.c |
#include "fq_nmod_poly.h"
#ifdef T
#undef T
#endif
#define T fq_nmod
#define CAP_T FQ_NMOD
#include "fq_poly_templates/compose.c"
#undef CAP_T
#undef T
| /*
Copyright (C) 2013 Mike Hansen
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
dune |
(cram
(deps
(glob_files bin/*.exe)))
| |
test_file_path_unix.mli | (*_ Make sure everything gets tested. *)
include File_path_unix.S
| (*_ Make sure everything gets tested. *)
include File_path_unix.S |
dune |
; OCaml implementation of the 'semgrep ci' subcommand.
;
(library
(name osemgrep_cli_ci)
(wrapped false)
(libraries
cmdliner
semgrep.utils
osemgrep_core
osemgrep_networking
osemgrep_cli_scan ; reusing the same flags and most of the code
)
(preprocess
(pps
ppx_profiling
ppx_deriving.show
ppx_deriving.eq
ppx_hash
)
)
)
| |
client_request.mli |
open! Import
open Types
open Extension
type _ t =
| Shutdown : unit t
| Initialize : InitializeParams.t -> InitializeResult.t t
| TextDocumentHover : HoverParams.t -> Hover.t option t
| TextDocumentDefinition : DefinitionParams.t -> Locations.t option t
| TextDocumentDeclaration :
TextDocumentPositionParams.t
-> Locations.t option t
| TextDocumentTypeDefinition : TypeDefinitionParams.t -> Locations.t option t
| TextDocumentImplementation : ImplementationParams.t -> Locations.t option t
| TextDocumentCompletion :
CompletionParams.t
-> [ `CompletionList of CompletionList.t
| `List of CompletionItem.t list
]
option
t
| TextDocumentCodeLens : CodeLensParams.t -> CodeLens.t list t
| TextDocumentCodeLensResolve : CodeLens.t -> CodeLens.t t
| TextDocumentPrepareCallHierarchy :
CallHierarchyPrepareParams.t
-> CallHierarchyItem.t list option t
| TextDocumentPrepareRename : PrepareRenameParams.t -> Range.t option t
| TextDocumentRangeFormatting :
DocumentRangeFormattingParams.t
-> TextEdit.t list option t
| TextDocumentRename : RenameParams.t -> WorkspaceEdit.t t
| TextDocumentLink : DocumentLinkParams.t -> DocumentLink.t list option t
| TextDocumentLinkResolve : DocumentLink.t -> DocumentLink.t t
| TextDocumentMoniker : MonikerParams.t -> Moniker.t list option t
| DocumentSymbol :
DocumentSymbolParams.t
-> [ `DocumentSymbol of DocumentSymbol.t list
| `SymbolInformation of SymbolInformation.t list
]
option
t
| WorkspaceSymbol :
WorkspaceSymbolParams.t
-> SymbolInformation.t list option t
| DebugEcho : DebugEcho.Params.t -> DebugEcho.Result.t t
| DebugTextDocumentGet :
DebugTextDocumentGet.Params.t
-> DebugTextDocumentGet.Result.t t
| TextDocumentReferences : ReferenceParams.t -> Location.t list option t
| TextDocumentHighlight :
DocumentHighlightParams.t
-> DocumentHighlight.t list option t
| TextDocumentFoldingRange :
FoldingRangeParams.t
-> FoldingRange.t list option t
| SignatureHelp : SignatureHelpParams.t -> SignatureHelp.t t
| CodeAction : CodeActionParams.t -> CodeActionResult.t t
| CodeActionResolve : CodeAction.t -> CodeAction.t t
| CompletionItemResolve : CompletionItem.t -> CompletionItem.t t
| WillSaveWaitUntilTextDocument :
WillSaveTextDocumentParams.t
-> TextEdit.t list option t
| TextDocumentFormatting :
DocumentFormattingParams.t
-> TextEdit.t list option t
| TextDocumentOnTypeFormatting :
DocumentOnTypeFormattingParams.t
-> TextEdit.t list option t
| TextDocumentColorPresentation :
ColorPresentationParams.t
-> ColorPresentation.t list t
| TextDocumentColor : DocumentColorParams.t -> ColorInformation.t list t
| SelectionRange : SelectionRangeParams.t -> SelectionRange.t list t
| ExecuteCommand : ExecuteCommandParams.t -> Json.t t
| SemanticTokensFull : SemanticTokensParams.t -> SemanticTokens.t option t
| SemanticTokensDelta :
SemanticTokensDeltaParams.t
-> [ `SemanticTokens of SemanticTokens.t
| `SemanticTokensDelta of SemanticTokensDelta.t
]
option
t
| SemanticTokensRange :
SemanticTokensRangeParams.t
-> SemanticTokens.t option t
| LinkedEditingRange :
LinkedEditingRangeParams.t
-> LinkedEditingRanges.t option t
| CallHierarchyIncomingCalls :
CallHierarchyIncomingCallsParams.t
-> CallHierarchyIncomingCall.t list option t
| CallHierarchyOutgoingCalls :
CallHierarchyOutgoingCallsParams.t
-> CallHierarchyOutgoingCall.t list option t
| WillCreateFiles : CreateFilesParams.t -> WorkspaceEdit.t option t
| WillDeleteFiles : DeleteFilesParams.t -> WorkspaceEdit.t option t
| WillRenameFiles : RenameFilesParams.t -> WorkspaceEdit.t option t
| UnknownRequest :
{ meth : string
; params : Jsonrpc.Structured.t option
}
-> Json.t t
val yojson_of_result : 'a t -> 'a -> Json.t
type packed = E : 'r t -> packed
val of_jsonrpc : Jsonrpc.Request.t -> (packed, string) Result.t
val to_jsonrpc_request : _ t -> id:Jsonrpc.Id.t -> Jsonrpc.Request.t
val response_of_json : 'a t -> Json.t -> 'a
val text_document :
_ t
-> ( meth:string
-> params:Jsonrpc.Structured.t option
-> TextDocumentIdentifier.t option)
-> TextDocumentIdentifier.t option
| |
dune |
(executable
(name main)
(public_name box-bin)
(libraries box)
(flags
(:standard -open Box)))
(include_subdirs unqualified)
| |
fuzzy_match.ml |
open! Core
let is_match ~char_equal ~pattern text =
let pattern_length = String.length pattern in
let text_length = String.length text in
let rec helper pattern_index text_index =
if pattern_index = pattern_length
then true
else if text_index = text_length
then false
else (
let pattern_char = String.unsafe_get pattern pattern_index in
let text_char = String.unsafe_get text text_index in
if char_equal pattern_char text_char
then helper (pattern_index + 1) (text_index + 1)
else helper pattern_index (text_index + 1))
in
helper 0 0
;;
| |
main_blocking.ml |
open Caqti_common_priv
open Testlib
open Testlib_blocking
module Test_error_cause = Test_error_cause.Make (Testlib_blocking)
module Test_param = Test_param.Make (Testlib_blocking)
module Test_sql = Test_sql.Make (Testlib_blocking)
module Test_failure = Test_failure.Make (Testlib_blocking)
let mk_test (name, pool) =
let pass_conn pool (name, speed, f) =
let f' () =
Caqti_blocking.Pool.use (fun c -> Ok (f c)) pool |> function
| Ok () -> ()
| Error err -> Alcotest.failf "%a" Caqti_error.pp err
in
(name, speed, f')
in
let pass_pool pool (name, speed, f) = (name, speed, (fun () -> f pool)) in
let test_cases =
List.map (pass_conn pool) Test_sql.connection_test_cases @
List.map (pass_conn pool) Test_error_cause.test_cases @
List.map (pass_conn pool) Test_param.test_cases @
List.map (pass_conn pool) Test_failure.test_cases @
List.map (pass_pool pool) Test_sql.pool_test_cases
in
(name, test_cases)
let post_connect conn =
List_result_future.iter_s (fun f -> f conn) [
Test_sql.post_connect;
]
let env =
let (&) f g di var = try f di var with Not_found -> g di var in
Test_sql.env & Test_error_cause.env
let mk_tests {uris; tweaks_version} =
let connect_pool uri =
(match Caqti_blocking.connect_pool uri
~max_size:1 ~post_connect ?tweaks_version ~env with
| Ok pool -> (test_name_of_uri uri, pool)
| Error err -> raise (Caqti_error.Exn err))
in
let pools = List.map connect_pool uris in
List.map mk_test pools
let () =
Alcotest_cli.run_with_args_dependency "test_sql_blocking"
Testlib.common_args mk_tests
| (* Copyright (C) 2018--2021 Petter A. Urkedal <[email protected]>
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version, with the LGPL-3.0 Linking Exception.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library. If not, see
* <http://www.gnu.org/licenses/> and <https://spdx.org>, respectively.
*) |
zeluc.ml | (* the main *)
open Zmisc
open Initial
open Compiler
let compile file =
Modules.clear();
if !no_stdlib then set_no_stdlib();
if Filename.check_suffix file ".zls" || Filename.check_suffix file ".zlus"
then
let filename = Filename.chop_extension file in
let modname = String.capitalize_ascii (Filename.basename filename) in
compile modname filename
else if Filename.check_suffix file ".zli"
then
let filename = Filename.chop_suffix file ".zli" in
let modname = String.capitalize_ascii (Filename.basename filename) in
interface modname filename
else if Filename.check_suffix file ".mli"
then
let filename = Filename.chop_suffix file ".mli" in
let modname = String.capitalize_ascii (Filename.basename filename) in
scalar_interface modname filename
else
raise (Arg.Bad ("don't know what to do with " ^ file))
module SS = Zdepend.StringSet
let build file =
Deps_tools.add_to_load_path Filename.current_dir_name;
let rec _build acc file =
let deps =
match (Filename.extension file) with
| ".zls" -> Deps_tools.zls_dependencies file
| ".zli" -> Deps_tools.zli_dependencies file
| _ -> raise (Arg.Bad ("don't know what to do with " ^ file))
in
let acc = List.fold_left _build acc deps in
let basename = Filename.chop_extension file in
if not (SS.mem basename acc) then begin
compile file;
SS.add basename acc
end else
acc
in
ignore (_build (SS.empty) file)
let doc_verbose = "\t Set verbose mode"
let doc_vverbose = "\t Set even more verbose mode"
and doc_version = "\t The version of the compiler"
and doc_outname = "<name> \t Simulation file name <name>"
and doc_print_types = "\t Print types"
and doc_print_causality_types = "\t Print causality types"
and doc_print_initialization_types = "\t Print initialization types"
and doc_include = "<dir> \t Add <dir> to the list of include directories"
and doc_stdlib = "<dir> \t Directory for the standard library"
and doc_locate_stdlib = "\t Locate standard libray"
and doc_no_stdlib = "\t Do not load the stdlib module"
and doc_no_zlstdlib = "\t Do not load the zlstdlib module"
and doc_typeonly = "\t Stop after typing"
and doc_hybrid = "\t Select hybrid translation"
and doc_simulation =
"<node> \t Simulates the node <node> and generates a file <out>.ml\n\
\t\t where <out> is equal to the argument of -o if the flag\n\
\t\t has been set, or <node> otherwise"
and doc_sampling = "<p> \t Sets the sampling period to p (float <= 1.0)"
and doc_check = "<n> \t Check that the simulated node returns true for n steps"
and doc_use_gtk =
"\t Use lablgtk2 interface."
and doc_inlining_level = "<n> \t Level of inlining"
and doc_inline_all = "\t Inline all function calls"
and doc_dzero = "\t Turn on discrete zero-crossing detection"
and doc_nocausality = "\t (undocumented)"
and doc_no_opt = "\t (undocumented)"
and doc_no_deadcode = "\t (undocumented)"
and doc_noinitialisation = "\t (undocumented)"
and doc_nosimplify = "\t (undocumented)"
and doc_noreduce = "\t (undocumented)"
and doc_lmm = "<n>\t Translate the node into Lustre--"
and doc_red_name = "\t Static reduction for"
and doc_zsign = "\t Use the sign function for the zero-crossing argument"
and doc_with_copy = "\t Add of a copy method for the state"
and doc_rif = "\t Use RIF format over stdin and stdout to communicate I/O to the node being simulated"
and doc_deps = "\t Recursively compile dependencies"
let errmsg = "Options are:"
let set_verbose () =
verbose := true;
Printexc.record_backtrace true
let set_vverbose () =
vverbose := true;
set_verbose ()
let add_include d =
Deps_tools.add_to_load_path d;
load_path := d :: !load_path
let set_gtk () =
use_gtk := true;
match !load_path with
| [stdlib] -> add_include (stdlib ^ "-gtk")
| _ -> ()
let main () =
try
Arg.parse
(Arg.align [
"-v", Arg.Unit set_verbose, doc_verbose;
"-vv", Arg.Unit set_vverbose, doc_vverbose;
"-version", Arg.Unit show_version, doc_version;
"-o", Arg.String set_outname, doc_outname;
"-I", Arg.String add_include, doc_include;
"-i", Arg.Set print_types, doc_print_types;
"-ic", Arg.Set print_causality_types, doc_print_causality_types;
"-ii", Arg.Set print_initialization_types, doc_print_initialization_types;
"-where", Arg.Unit locate_stdlib, doc_locate_stdlib;
"-stdlib", Arg.String set_stdlib, doc_stdlib;
"-nostdlib", Arg.Set no_stdlib, doc_no_stdlib;
"-typeonly", Arg.Set typeonly, doc_typeonly;
"-s", Arg.String set_simulation_node, doc_simulation;
"-sampling", Arg.Float set_sampling_period, doc_sampling;
"-check", Arg.Int set_check, doc_check;
"-gtk2", Arg.Unit set_gtk, doc_use_gtk;
"-dzero", Arg.Set dzero, doc_dzero;
"-nocausality", Arg.Set no_causality, doc_nocausality;
"-nopt", Arg.Set no_opt, doc_no_opt;
"-nodeadcode", Arg.Set no_deadcode, doc_no_deadcode;
"-noinit", Arg.Set no_initialisation, doc_noinitialisation;
"-inline", Arg.Int set_inlining_level, doc_inlining_level;
"-inlineall", Arg.Set inline_all, doc_inline_all;
"-nosimplify", Arg.Set no_simplify_causality_type, doc_nosimplify;
"-noreduce", Arg.Set no_reduce, doc_noreduce;
"-zsign", Arg.Set zsign, doc_zsign;
"-copy", Arg.Set with_copy, doc_with_copy;
"-lmm", Arg.String set_lmm_nodes, doc_lmm;
"-rif", Arg.Set use_rif, doc_rif;
"-deps", Arg.Set build_deps, doc_deps;
])
(fun filename -> if !build_deps then build filename else compile filename)
errmsg;
begin
match !simulation_node with
| Some(name) ->
Simulator.main !outname name !sampling_period !number_of_checks !use_gtk
| _ -> ()
end
with
| Zmisc.Error -> exit 2;;
main ();;
exit 0;;
| (***********************************************************************)
(* *)
(* *)
(* Zelus, a synchronous language for hybrid systems *)
(* *)
(* (c) 2020 Inria Paris (see the AUTHORS file) *)
(* *)
(* Copyright Institut National de Recherche en Informatique et en *)
(* Automatique. All rights reserved. This file is distributed under *)
(* the terms of the INRIA Non-Commercial License Agreement (see the *)
(* LICENSE file). *)
(* *)
(* *********************************************************************)
|
typing.ml | (* type checking *)
(* H |-{k} e : t and H, W |-{k} D *)
(* H : typing environment *)
(* D : set of variables written by D *)
(* k : either any, discrete, continuous *)
(* e : expression with type t *)
(* input: H, e, k - output: t, W *)
open Zident
open Global
open Modules
open Zelus
open Deftypes
open Ztypes
open Typerrors
(* accesses in symbol tables for global identifiers *)
let find_value loc f =
try find_value f
with Not_found -> error loc (Eglobal_undefined(Value, f))
let find_type loc f =
try find_type f
with Not_found -> error loc (Eglobal_undefined(Type, f))
let find_constr loc c =
try find_constr c
with Not_found -> error loc (Eglobal_undefined(Constr, c))
let find_label loc l =
try find_label l
with Not_found -> error loc (Eglobal_undefined(Label, l))
(** The main unification functions *)
let unify loc expected_ty actual_ty =
try
Ztypes.unify expected_ty actual_ty
with
| Ztypes.Unify -> error loc (Etype_clash(actual_ty, expected_ty))
let equal_sizes loc expected_size actual_size =
try
Ztypes.equal_sizes expected_size actual_size
with
| Ztypes.Unify -> error loc (Esize_clash(actual_size, expected_size))
let unify_expr expr expected_ty actual_ty =
try
Ztypes.unify expected_ty actual_ty
with
| Ztypes.Unify -> error expr.e_loc (Etype_clash(actual_ty, expected_ty))
let unify_pat pat expected_ty actual_ty =
try
Ztypes.unify expected_ty actual_ty
with
| Ztypes.Unify -> error pat.p_loc (Etype_clash(actual_ty, expected_ty))
let less_than loc actual_k expected_k =
try
Ztypes.less_than actual_k expected_k
with
| Ztypes.Unify -> error loc (Ekind_clash(actual_k, expected_k))
let type_is_in_kind loc expected_k ty =
try
Ztypes.kind expected_k ty
with
| Ztypes.Unify -> error loc (Etype_kind_clash(expected_k, ty))
let lift loc left_k right_k =
try
Ztypes.lift left_k right_k
with
| Ztypes.Unify -> error loc (Ekind_clash(right_k, left_k))
let sort_less_than loc sort expected_k =
match expected_k, sort with
| Tstatic _, Sstatic -> ()
| Tstatic _, _ -> error loc (Ekind_clash(Deftypes.Tany, expected_k))
| _ -> ()
let check_is_vec loc actual_ty =
try
let ty_arg, size = Ztypes.filter_vec actual_ty in ty_arg, size
with
| Ztypes.Unify -> error loc Esize_of_vec_is_undetermined
(* An expression is expansive if it is an application *)
let rec expansive { e_desc = desc } =
match desc with
| Elocal _ | Eglobal _ | Econst _ | Econstr0 _ -> false
| Etuple(e_list) -> List.exists expansive e_list
| Erecord(l_e_list) -> List.exists (fun (_, e) -> expansive e) l_e_list
| Erecord_access(e, _) | Etypeconstraint(e, _) -> expansive e
| Erecord_with(e, l_e_list) ->
expansive e || List.exists (fun (_, e) -> expansive e) l_e_list
| _ -> true
let check_statefull loc expected_k =
if not (Ztypes.is_statefull_kind expected_k)
then error loc Ekind_not_combinatorial
(** The type of states in automata *)
(** We emit a warning when a state is entered both by reset and history *)
type state = { mutable s_reset: bool option; s_parameters: typ list }
let check_target_state loc expected_reset actual_reset =
match expected_reset with
| None -> Some(actual_reset)
| Some(expected_reset) ->
if expected_reset <> actual_reset then
warning loc (Wreset_target_state(actual_reset, expected_reset));
Some(expected_reset)
(* Every shared variable defined in the initial state of an automaton *)
(* left weakly is considered to be an initialized state variable. *)
let turn_vars_into_memories h { dv = dv } =
let add n acc =
let ({ t_sort = sort; t_typ = typ } as tentry) = Env.find n h in
match sort with
| Smem({ m_init = Noinit } as m) ->
Env.add n { tentry with t_sort = Smem { m with m_init = InitEq } } acc
| Sstatic | Sval | Svar _ | Smem _ -> acc in
let first_h = S.fold add dv Env.empty in
first_h, Env.append first_h h
(** Typing immediate values *)
let immediate = function
| Ebool _ -> Initial.typ_bool
| Eint(i) -> Initial.typ_int
| Efloat(i) -> Initial.typ_float
| Echar(c) -> Initial.typ_char
| Estring(c) -> Initial.typ_string
| Evoid -> Initial.typ_unit
(* once all branch of the automaton has been typed *)
(* incorporate the information computed about variables from *)
(* the initial environment into the global one *)
let incorporate_into_env first_h h =
let mark n { t_sort = sort } =
let tentry = Env.find n h in
match sort with
| Smem({ m_init = InitEq } as m) ->
tentry.t_sort <- Smem { m with m_init = Noinit }
| _ -> () in
Env.iter mark first_h
(** Variables in a pattern *)
let vars pat = Vars.fv_pat S.empty S.empty pat
(** Types for local identifiers *)
let var loc h n =
try Env.find n h
with Not_found -> error loc (Evar_undefined(n))
let typ_of_var loc h n = let { t_typ = typ } = var loc h n in typ
(* Typing [last n] *)
let last loc h n =
let { t_sort = sort; t_typ = typ } as entry = var loc h n in
(* [last n] is allowed only if [n] is a state variable *)
begin match sort with
| Sstatic | Sval | Svar _ | Smem { m_next = Some(true) } ->
error loc (Elast_forbidden(n))
| Smem (m) ->
entry.t_sort <- Smem { m with m_previous = true }
end; typ
(* Typing [der n = ...] *)
let derivative loc h n =
let { t_typ = typ; t_sort = sort } as entry = var loc h n in
(* [der n] is allowed only if [n] is a state variable *)
match sort with
| Sstatic | Sval | Svar _ ->
error loc (Eder_forbidden(n))
| Smem(m) -> entry.t_sort <- Smem { m with m_kind = Some(Cont) }; typ
(* Typing [n += ...] *)
let pluseq loc h n =
(* check that a name [n] is declared with a combination function *)
let ({ t_typ = typ; t_sort = sort } as entry) = var loc h n in
match sort with
| Svar { v_combine = Some _ } -> typ
| Sstatic | Sval | Svar { v_combine = None } | Smem { m_combine = None } ->
error loc (Ecombination_function(n))
| Smem ({ m_next = n_opt } as m) ->
match n_opt with
| None -> entry.t_sort <- Smem { m with m_next = Some(false) }; typ
| Some(false) -> typ
| Some(true) -> error loc (Ealready_with_different_kinds(Next, Multi, n))
(* Typing [init n = ...] *)
let init loc h n =
(* set that [n] is initialized if it is not already at the definition point *)
let { t_typ = typ; t_sort = sort } as entry = var loc h n in
match sort with
| Sstatic | Sval | Svar _ -> assert false
| Smem ({ m_init = i } as m) ->
match i with
| Noinit -> entry.t_sort <- Smem { m with m_init = InitEq }; typ
| InitEq -> typ
| InitDecl _ -> error loc (Ealready(Initial, n))
(* Typing [next n = ...] *)
let next loc h n =
let { t_typ = typ; t_sort = sort } as entry = var loc h n in
match sort with
| Sstatic | Sval | Svar _ -> assert false
| Smem { m_previous = true } -> error loc (Enext_forbidden(n))
| Smem ({ m_next = n_opt } as m) ->
match n_opt with
| None -> entry.t_sort <- Smem { m with m_next = Some(true) }; typ
| Some(true) -> typ
| Some(false) ->
error loc (Ealready_with_different_kinds(Current, Next, n))
(* Typing [n = ...] *)
let def loc h n =
let { t_sort = sort } as entry = var loc h n in
match sort with
| Sstatic | Sval | Svar _ -> ()
| Smem ({ m_next = n_opt } as m) ->
match n_opt with
| None -> entry.t_sort <- Smem { m with m_next = Some(false) }
| Some(false) -> ()
| Some(true) ->
error loc (Ealready_with_different_kinds(Next, Current, n))
(** Types for global identifiers *)
let global loc expected_k lname =
let { qualid = qualid;
info = { value_static = is_static;
value_typ = tys } } = find_value loc lname in
less_than loc (if is_static then Tstatic true else expected_k) expected_k;
qualid, Ztypes.instance_of_type tys
let global_with_instance loc expected_k lname =
let { qualid = qualid;
info = { value_static = is_static;
value_typ = tys } } = find_value loc lname in
less_than loc (if is_static then Tstatic true else expected_k) expected_k;
let typ_instance, typ_body = Ztypes.instance_and_vars_of_type tys in
qualid, typ_instance, typ_body
let label loc l =
let { qualid = qualid; info = tys_label } = find_label loc l in
qualid, Ztypes.label_instance tys_label
let constr loc c =
let { qualid = qualid; info = tys_c } = find_constr loc c in
qualid, Ztypes.constr_instance tys_c
let rec get_all_labels loc ty =
match ty.t_desc with
| Tconstr(qual, _, _) ->
let { info = { type_desc = ty_c } } =
find_type loc (Lident.Modname(qual)) in
begin match ty_c with
Record_type(l) -> l
| _ -> assert false
end
| Tlink(link) -> get_all_labels loc link
| _ -> assert false
(** Check that every declared name is associated to a *)
(** defining equation and that an initialized state variable is *)
(** not initialized again in the body *)
(** Returns a new [defined_names] where names from [n_list] *)
(** have been removed *)
let check_definitions_for_every_name defined_names n_list =
List.fold_left
(fun { dv = dv; di = di; der = der; nv = nv; mv = mv }
{ vardec_name = n; vardec_default = d_opt; vardec_loc = loc } ->
let in_dv = S.mem n dv in
let in_di = S.mem n di in
let in_der = S.mem n der in
let in_nv = S.mem n nv in
let in_mv = S.mem n mv in
(* check that n is defined by an equation *)
if not (in_dv || in_di || in_der || in_nv || in_mv)
then error loc (Eequation_is_missing(n));
(* remove local names *)
{ dv = if in_dv then S.remove n dv else dv;
di = if in_di then S.remove n di else di;
der = if in_der then S.remove n der else der;
nv = if in_nv then S.remove n nv else nv;
mv = if in_mv then S.remove n mv else mv })
defined_names n_list
(** Typing a declaration *)
(* type checking of the combination function *)
let combine loc expected_ty lname =
let { qualid = qualid; info = { value_typ = tys } } =
find_value loc lname in
let ty = Ztypes.instance_of_type tys in
(* Its signature must be [expected_ty * expected_ty -A-> expected_ty] *)
let ty_combine = Ztypes.type_of_combine () in
unify loc ty_combine ty
(* type checking of the declared default/init value *)
let constant loc expected_k expected_ty = function
| Cimmediate(i) ->
let actual_ty = immediate(i) in
unify loc expected_ty actual_ty
| Cglobal(lname) ->
let qualid, actual_ty = global loc expected_k lname in
unify loc expected_ty actual_ty
(* Typing the declaration of variables. The result is a typing environment *)
(* [inames] is the set of initialized variables, that is, variable *)
(* which appear in an [init x = e] equation *)
let vardec_list expected_k n_list inames =
let default loc expected_ty c_opt = function
| Init(v) ->
(* the initialization must appear in a statefull function *)
if not (Ztypes.is_statefull_kind expected_k)
then error loc Ekind_not_combinatorial;
constant loc expected_k expected_ty v;
Deftypes.Smem
(Deftypes.cmem c_opt { empty_mem with m_init = InitDecl(v) })
| Default(v) ->
constant loc expected_k expected_ty v;
Deftypes.default (Some(v)) c_opt in
(* typing every declaration *)
let vardec h0
{ vardec_name = n; vardec_default = d_opt; vardec_combine = c_opt;
vardec_loc = loc } =
let expected_ty = Ztypes.new_var () in
Zmisc.optional_unit (combine loc) expected_ty c_opt;
let sort =
match d_opt with
| Some(d) -> default loc expected_ty c_opt d
| None ->
match expected_k with
| Tstatic _ -> Deftypes.static
| Tany | Tdiscrete false -> Deftypes.default None c_opt
| Tdiscrete true
| Tcont
| Tproba ->
Deftypes.Smem (Deftypes.cmem c_opt
(if S.mem n inames then Deftypes.imem
else Deftypes.empty_mem)) in
Env.add n { t_typ = expected_ty; t_sort = sort } h0 in
List.fold_left vardec Env.empty n_list
(** Computes the set of names defined in a list of definitions *)
let rec build (names, inames) { eq_desc = desc } =
(* block *)
let block_with_bounded (names, inames)
{ b_vars = b_vars; b_body = eq_list } =
let vardec acc { vardec_name = n } = S.add n acc in
let bounded = List.fold_left vardec S.empty b_vars in
let (local_names, local_inames) = build_list (S.empty, S.empty) eq_list in
bounded, (S.union names (S.diff local_names bounded),
S.union inames (S.diff local_inames bounded)) in
let block (names, inames) b = snd (block_with_bounded (names, inames) b) in
match desc with
| EQeq(p, _) -> Vars.fv_pat S.empty names p, inames
| EQder(n, _, _, _)
| EQpluseq(n, _) | EQnext(n, _, _)
| EQemit(n, _) -> S.add n names, inames
| EQinit(n, _) -> S.add n names, S.add n inames
| EQreset(eq_list, _)
| EQand(eq_list)
| EQbefore(eq_list) -> build_list (names, inames) eq_list
| EQblock(b) -> block (names, inames) b
| EQpresent(ph_list, b_opt) ->
(* present handler *)
let handler (names, inames) { p_body = b } = block (names, inames) b in
let names, inames =
List.fold_left handler (names, inames) ph_list in
Zmisc.optional block (names, inames) b_opt
| EQmatch(_, _, mh_list) ->
(* match handler *)
let handler (names, inames) { m_body = b } = block (names, inames) b in
List.fold_left handler (names, inames) mh_list
| EQautomaton(is_weak, sh_list, _) ->
(* escape handler *)
let escape (names, inames) { e_block = b_opt } =
Zmisc.optional block (names, inames) b_opt in
(* automaton handler *)
let handler (names, inames) { s_body = b; s_trans = esc_list } =
let bounded, (names, inames) =
block_with_bounded (names, inames) b in
let esc_names, esc_inames =
List.fold_left escape (names, inames) esc_list in
S.union names (if is_weak then S.diff esc_names bounded else esc_names),
S.union inames
(if is_weak then S.diff esc_inames bounded else esc_inames)
in
List.fold_left handler (names, inames) sh_list
| EQforall { for_index = in_list; for_init = init_list } ->
let index (names, inames) { desc = desc } =
match desc with
| Einput _ | Eindex _ -> names, inames
| Eoutput(_, n) -> S.add n names, inames in
let init (names, inames) { desc = desc } =
match desc with
| Einit_last(n, _) -> S.add n names, inames in
let names, inames = List.fold_left index (names, inames) in_list in
List.fold_left init (names, inames) init_list
and build_list (names, inames) eq_list =
List.fold_left build (names, inames) eq_list
let env_of_eq_list expected_k eq_list =
let names, inames = build_list (S.empty, S.empty) eq_list in
S.fold
(fun n acc ->
let sort =
match expected_k with
| Deftypes.Tstatic _ -> Deftypes.static
| Deftypes.Tany | Deftypes.Tdiscrete false -> Deftypes.variable
| Deftypes. Tcont
| Deftypes.Tdiscrete true
| Deftypes.Tproba ->
if S.mem n inames then Deftypes.imemory
else Deftypes.Smem (Deftypes.empty_mem) in
Env.add n { t_typ = Ztypes.new_var (); t_sort = sort } acc) names Env.empty
(* introduce a variable with the proper kind *)
(* [last x] is only be possible when [expected_k] is statefull *)
let intro_sort_of_var expected_k =
match expected_k with
| Deftypes.Tstatic _ -> Deftypes.static
| Deftypes.Tany | Deftypes.Tdiscrete false -> Deftypes.Sval
| Deftypes. Tcont
| Deftypes.Tdiscrete true
| Deftypes.Tproba -> Deftypes.Smem (Deftypes.empty_mem)
let env_of_scondpat expected_k scpat =
let rec env_of acc { desc = desc } =
match desc with
| Econdand(sc1, sc2) -> env_of (env_of acc sc1) sc2
| Econdor(sc, _) | Econdon(sc, _) -> env_of acc sc
| Econdexp _ -> acc
| Econdpat(_, pat) -> Vars.fv_pat S.empty acc pat in
let acc = env_of S.empty scpat in
S.fold
(fun n acc ->
Env.add n
{ t_typ = Ztypes.new_var (); t_sort = intro_sort_of_var expected_k } acc)
acc Env.empty
let env_of_statepat expected_k spat =
let rec env_of acc { desc = desc } =
match desc with
| Estate0pat _ -> acc
| Estate1pat(_, l) -> List.fold_left (fun acc n -> S.add n acc) acc l in
let acc = env_of S.empty spat in
S.fold
(fun n acc ->
Env.add n
{ t_typ = Ztypes.new_var (); t_sort = intro_sort_of_var expected_k } acc)
acc Env.empty
let env_of_pattern expected_k h0 pat =
let acc = Vars.fv_pat S.empty S.empty pat in
S.fold
(fun n acc ->
Env.add n
{ t_typ = Ztypes.new_var (); t_sort = intro_sort_of_var expected_k } acc)
acc h0
(* the [n-1] first arguments are static. If [expected_k] is static *)
(* the last one too *)
let env_of_pattern_list expected_k env p_list =
let p_list, p = Zmisc.firsts p_list in
let env = List.fold_left (env_of_pattern (Deftypes.Tstatic true)) env p_list in
env_of_pattern expected_k env p
let env_of_pattern expected_k pat = env_of_pattern expected_k Env.empty pat
(** Typing patterns *)
(* the kind of variables in [p] must be equal to [expected_k] *)
let rec pattern h ({ p_desc = desc; p_loc = loc } as pat) ty =
match desc with
| Ewildpat ->
(* type annotation *)
pat.p_typ <- ty
| Econstpat(im) ->
unify_pat pat ty (immediate im);
(* type annotation *)
pat.p_typ <- ty
| Econstr0pat(c0) ->
let qualid, { constr_res = ty_res; constr_arity = n } = constr loc c0 in
(* check the arity *)
if n <> 0 then error loc (Econstr_arity(c0, n, 0));
unify_pat pat ty ty_res;
pat.p_desc <- Econstr0pat(Lident.Modname(qualid));
(* type annotation *)
pat.p_typ <- ty
| Econstr1pat(c1, pat_list) ->
let qualid,
{ constr_arg = ty_list; constr_res = ty_res; constr_arity = n } =
constr loc c1 in
(* check the arity *)
let actual_n = List.length pat_list in
if n <> actual_n then error loc (Econstr_arity(c1, n, actual_n));
unify_pat pat ty ty_res;
pat.p_desc <- Econstr1pat(Lident.Modname(qualid), pat_list);
(* type annotation *)
pat.p_typ <- ty;
List.iter2 (pattern h) pat_list ty_list
| Evarpat(x) ->
unify_pat pat ty (typ_of_var loc h x);
(* type annotation *)
pat.p_typ <- ty
| Etuplepat(pat_list) ->
let ty_list = List.map (fun _ -> new_var ()) pat_list in
unify_pat pat ty (product ty_list);
(* type annotation *)
pat.p_typ <- ty;
List.iter2 (pattern h) pat_list ty_list
| Etypeconstraintpat(p, typ_expr) ->
let expected_typ =
Ztypes.instance_of_type(Interface.scheme_of_type typ_expr) in
unify_pat pat expected_typ ty;
(* type annotation *)
pat.p_typ <- ty;
pattern h p ty
| Erecordpat(label_pat_list) ->
(* type annotation *)
pat.p_typ <- ty;
let label_pat_list =
List.map
(fun (lab, pat_label) ->
let qualid, { label_arg = ty_arg; label_res = ty_res } =
label pat.p_loc lab in
unify_pat pat_label ty ty_arg;
pattern h pat_label ty_res;
Lident.Modname(qualid), pat_label) label_pat_list in
pat.p_desc <- Erecordpat(label_pat_list)
| Ealiaspat(p, x) ->
unify_pat pat ty (typ_of_var loc h x);
(* type annotation *)
pat.p_typ <- ty;
pattern h p ty
| Eorpat(p1, p2) ->
(* type annotation *)
pat.p_typ <- ty;
pattern h p1 ty;
pattern h p2 ty
(* typing a list of patterns. The first [n-1] patterns define static *)
(* values; the [n]-th one has no constraint *)
let pattern_list h pat_list ty_list = List.iter2 (pattern h) pat_list ty_list
(* check that a pattern is total *)
let check_total_pattern p =
let is_exhaustive = Patternsig.check_activate p.p_loc p in
if not is_exhaustive then error p.p_loc Epattern_not_total
let check_total_pattern_list p_list = List.iter check_total_pattern p_list
(** Typing a pattern matching. Returns defined names *)
let match_handlers body loc expected_k h total m_handlers pat_ty ty =
let handler ({ m_pat = pat; m_body = b } as mh) =
let h0 = env_of_pattern expected_k pat in
pattern h0 pat pat_ty;
mh.m_env <- h0;
let h = Env.append h0 h in
body expected_k h b ty in
let defined_names_list = List.map handler m_handlers in
(* check partiality/redundancy of the pattern matching *)
let is_exhaustive =
!total || (Patternsig.check_match_handlers loc m_handlers) in
let defined_names_list =
if is_exhaustive then defined_names_list
else Deftypes.empty :: defined_names_list in
(* set total to the right value *)
total := is_exhaustive;
(* identify variables which are defined partially *)
Total.merge loc h defined_names_list
(** Typing a present handler. Returns defined names *)
(** for every branch the expected kind is discrete. For the default case *)
(** it is the kind of the context. *)
let present_handlers scondpat body loc expected_k h p_h_list b_opt expected_ty =
let handler ({ p_cond = scpat; p_body = b } as ph) =
(* local variables from [scpat] cannot be accessed through a last *)
let h0 = env_of_scondpat expected_k scpat in
let h = Env.append h0 h in
let is_zero = Ztypes.is_continuous_kind expected_k in
scondpat expected_k is_zero h scpat;
(* sets [zero = true] is [expected_k = Tcont] *)
ph.p_zero <- is_zero;
ph.p_env <- h0;
body (Ztypes.lift_to_discrete expected_k) h b expected_ty in
let defined_names_list = List.map handler p_h_list in
(* treat the optional default case *)
let defined_names_list =
match b_opt with
| None -> Deftypes.empty :: defined_names_list
| Some(b) -> let defined_names = body expected_k h b expected_ty in
defined_names :: defined_names_list in
(* identify variables which are defined partially *)
Total.merge loc h defined_names_list
(* [expression expected_k h e] returns the type for [e] *)
let rec expression expected_k h ({ e_desc = desc; e_loc = loc } as e) =
let ty = match desc with
| Econst(i) -> immediate i
| Elocal(x) ->
let { t_typ = typ; t_sort = sort } = var loc h x in
sort_less_than loc sort expected_k;
typ
| Eglobal { lname = lname } ->
let qualid, typ_instance, ty =
global_with_instance loc expected_k lname in
e.e_desc <- Eglobal { lname = Lident.Modname(qualid);
typ_instance = typ_instance }; ty
| Elast(x) -> last loc h x
| Etuple(e_list) ->
product (List.map (expression expected_k h) e_list)
| Eop(Eaccess, [e1; e2]) ->
(* Special typing for [e1.(e2)]. [e1] must be of type [ty[size]] *)
(* with [size] a known expression at that point *)
let ty = expression expected_k h e1 in
let ty_arg, _ = check_is_vec e1.e_loc ty in
expect expected_k h e2 Initial.typ_int; ty_arg
| Eop(Eupdate, [e1; i; e2]) ->
(* Special typing for [{ e1 with (i) = e2 }]. *)
(* [e1] must be of type [ty[size]] *)
(* with [size] a known expression at that point *)
let ty = expression expected_k h e1 in
let ty_arg,_ = check_is_vec e1.e_loc ty in
expect expected_k h i Initial.typ_int;
expect expected_k h e2 ty_arg; ty
| Eop(Eslice(s1, s2), [e]) ->
(* Special typing for [e{ e1 .. e2}] *)
(* [e1] and [e2] must be size expressions *)
let s1 = size h s1 in
let s2 = size h s2 in
let ty = expression expected_k h e in
let ty_arg, _ = check_is_vec e.e_loc ty in
Ztypes.vec ty_arg (Ztypes.plus (Ztypes.minus s2 s1) (Ztypes.const 1))
| Eop(Econcat, [e1; e2]) ->
let ty1 = expression expected_k h e1 in
let ty_arg1, s1 = check_is_vec e1.e_loc ty1 in
let ty2 = expression expected_k h e2 in
let ty_arg2, s2 = check_is_vec e2.e_loc ty2 in
unify_expr e2 ty_arg1 ty_arg2;
Ztypes.vec ty_arg1 (Ztypes.plus s1 s2)
| Eop(op, e_list) ->
operator expected_k h loc op e_list
| Eapp({ app_statefull = is_statefull }, e, e_list) ->
apply loc is_statefull expected_k h e e_list
| Econstr0(c0) ->
let qualid, { constr_res = ty_res; constr_arity = n } =
constr loc c0 in
if n <> 0 then error loc (Econstr_arity(c0, n, 0));
e.e_desc <- Econstr0(Lident.Modname(qualid)); ty_res
| Econstr1(c1, e_list) ->
let qualid,
{ constr_arg = ty_list; constr_res = ty_res; constr_arity = n } =
constr loc c1 in
let actual_arity = List.length e_list in
if n <> actual_arity then
error loc (Econstr_arity(c1, n, actual_arity));
List.iter2 (expect expected_k h) e_list ty_list;
e.e_desc <- Econstr1(Lident.Modname(qualid), e_list); ty_res
| Erecord_access(e1, lab) ->
let qualid, { label_arg = ty_arg; label_res = ty_res } =
label loc lab in
expect expected_k h e1 ty_arg;
e.e_desc <- Erecord_access(e1, Lident.Modname(qualid)); ty_res
| Erecord(label_e_list) ->
let ty = new_var () in
let label_e_list =
List.map
(fun (lab, e_label) ->
let qualid, { label_arg = ty_arg; label_res = ty_res } =
label loc lab in
unify_expr e ty ty_arg;
expect expected_k h e_label ty_res;
(Lident.Modname(qualid), e_label)) label_e_list in
e.e_desc <- Erecord(label_e_list);
(* check that no field is missing *)
let label_desc_list = get_all_labels loc ty in
if List.length label_e_list <> List.length label_desc_list
then error loc Esome_labels_are_missing;
ty
| Erecord_with(e1, label_e_list) ->
let ty = new_var () in
let label_e_list =
List.map
(fun (lab, e_label) ->
let qualid, { label_arg = ty_arg; label_res = ty_res } =
label loc lab in
unify_expr e ty ty_arg;
expect expected_k h e_label ty_res;
(Lident.Modname(qualid), e_label)) label_e_list in
expect expected_k h e1 ty;
e.e_desc <- Erecord_with(e1, label_e_list);
ty
| Etypeconstraint(exp, typ_expr) ->
let expected_typ =
Ztypes.instance_of_type (Interface.scheme_of_type typ_expr) in
expect expected_k h exp expected_typ;
expected_typ
| Elet(l, e) ->
let h = local expected_k h l in
expression expected_k h e
| Eblock(b, e) ->
let h, _ = block_eq_list expected_k h b in
expression expected_k h e
| Eseq(e1, e2) ->
ignore (expression expected_k h e1);
expression expected_k h e2
| Eperiod(p) ->
(* periods are only valid in a continuous context *)
less_than loc Tcont expected_k;
(* a period must be a static expression *)
period (Tstatic(true)) h p;
Ztypes.zero_type expected_k
| Ematch(total, e, m_h_list) ->
let expected_pat_ty = expression expected_k h e in
let expected_ty = new_var () in
ignore
(match_handler_exp_list
loc expected_k h total m_h_list expected_pat_ty expected_ty);
expected_ty
| Epresent(p_h_list, e_opt) ->
let expected_ty = new_var () in
ignore
(present_handler_exp_list loc expected_k h p_h_list e_opt expected_ty);
expected_ty in
(* check that ty belongs to kind expected_k *)
type_is_in_kind loc expected_k ty;
(* type annotation *)
e.e_typ <- ty;
ty
(** Typing a size expression *)
and size h { desc = desc; loc = loc } =
match desc with
| Sconst(i) -> Ztypes.const i
| Sglobal(ln) ->
let qualid, _, typ_body = global_with_instance loc (Tstatic(true)) ln in
unify loc Initial.typ_int typ_body;
Ztypes.global(qualid)
| Sname(x) ->
let { t_typ = typ; t_sort = sort } = var loc h x in
sort_less_than loc sort (Tstatic(true));
unify loc Initial.typ_int typ;
Ztypes.name x
| Sop(Splus, s1, s2) ->
let s1 = size h s1 in
let s2 = size h s2 in
Ztypes.plus s1 s2
| Sop(Sminus, s1, s2) ->
let s1 = size h s1 in
let s2 = size h s2 in
Ztypes.minus s1 s2
(** Convert an expression into a size expression *)
and size_of_exp { e_desc = desc; e_loc = loc } =
match desc with
| Econst(Eint(i)) -> Tconst(i)
| Elocal(n) -> Tname(n)
| Eglobal { lname = Lident.Modname(qualid) } -> Tglobal(qualid)
| Eapp(_, { e_desc = Eglobal { lname = Lident.Modname(qualid) } }, [e1; e2])
when qualid = Initial.stdlib_name "+" ->
Top(Tplus, size_of_exp e1, size_of_exp e2)
| Eapp(_, { e_desc = Eglobal { lname = Lident.Modname(qualid) } }, [e1; e2])
when qualid = Initial.stdlib_name "-" ->
Top(Tminus, size_of_exp e1, size_of_exp e2)
| _ -> error loc Enot_a_size_expression
(** The type of primitives and imported functions *)
and operator expected_k h loc op e_list =
let actual_k, ty_args, ty_res =
match op with
| Eifthenelse ->
let ty = new_var () in
Tany, [Initial.typ_bool; ty; ty], ty
| Eunarypre ->
let ty = new_var () in
Tdiscrete(true), [ty], ty
| (Eminusgreater | Efby) ->
let ty = new_var () in
Tdiscrete(true), [ty; ty], ty
| (Eup | Ehorizon) ->
Tcont, [Initial.typ_float], Initial.typ_zero
| Etest ->
let ty = new_var () in
Tany, [Initial.typ_signal ty], Initial.typ_bool
| Edisc ->
let ty = new_var () in
Tcont, [ty], Initial.typ_zero
| Einitial ->
Tcont, [], Initial.typ_zero
| Eatomic ->
let ty = new_var () in
expected_k, [ty], ty
| Eaccess | Eupdate | Eslice _ | Econcat -> assert false in
less_than loc actual_k expected_k;
List.iter2 (expect expected_k h) e_list ty_args;
ty_res
and period expected_k h { p_phase = p1_opt; p_period = p2 } =
expect expected_k h p2 Initial.typ_float;
match p1_opt with None -> () | Some(p1) -> expect expected_k h p1 Initial.typ_float
(** Typing an expression with expected type [expected_type] *)
and expect expected_k h e expected_ty =
let actual_ty = expression expected_k h e in
unify_expr e expected_ty actual_ty
and apply loc is_statefull expected_k h e arg_list =
(* the function [e] must be static *)
let ty_fct = expression (Tstatic(true)) h e in
(* [run f e] forces [f] to be of type [t1 -expected_k-> t2] *)
(* and [k] to be either [D] or [C] *)
if is_statefull then
begin
check_statefull loc expected_k;
unify_expr e (Ztypes.run_type expected_k) ty_fct
end;
let intro_k = Ztypes.intro expected_k in
(* typing the list of arguments *)
(* the [n-1] arguments must be static; the [nth] is of kind [expected_k] *)
let rec args ty_fct = function
| [] -> ty_fct
| arg :: arg_list ->
let actual_k, n_opt, ty1, ty2 =
try Ztypes.filter_arrow intro_k ty_fct
with Unify -> error loc (Eapplication_of_non_function) in
let expected_k = lift loc expected_k actual_k in
expect expected_k h arg ty1;
let ty2 =
match n_opt with
| None -> ty2
| Some(n) -> subst_in_type (Env.singleton n (size_of_exp arg)) ty2 in
args ty2 arg_list in
args ty_fct arg_list
(** Typing an equation. Returns the set of defined names *)
and equation expected_k h ({ eq_desc = desc; eq_loc = loc } as eq) =
let defnames = match desc with
| EQeq(p, e) ->
let ty_e = expression expected_k h e in
pattern h p ty_e;
(* check that the pattern is total *)
check_total_pattern p;
let dv = vars p in
S.iter (def loc h) dv;
{ Deftypes.empty with dv = dv }
| EQpluseq(n, e) ->
let actual_ty = expression expected_k h e in
let expected_ty = pluseq loc h n in
unify loc expected_ty actual_ty;
{ Deftypes.empty with mv = S.singleton n }
| EQinit(n, e0) ->
(* an initialization is valid only in a continuous or discrete context *)
check_statefull loc expected_k;
let actual_ty = init loc h n in
expect (Ztypes.lift_to_discrete expected_k) h e0 actual_ty;
(* sets that every variable from [di] is initialized *)
{ Deftypes.empty with di = S.singleton n }
| EQnext(n, e, e0_opt) ->
(* a next is valid only in a discrete context *)
less_than loc (Tdiscrete(true)) expected_k;
let actual_ty = next loc h n in
expect expected_k h e actual_ty;
let di =
match e0_opt with
| None -> S.empty
| Some(e) ->
expect expected_k h e actual_ty; ignore (init loc h n);
S.singleton n
in
{ Deftypes.empty with nv = S.singleton n; di = di }
| EQder(n, e, e0_opt, p_h_e_list) ->
(* integration is only valid in a continuous context *)
less_than loc Tcont expected_k;
let actual_ty = derivative loc h n in
unify loc Initial.typ_float actual_ty;
expect expected_k h e actual_ty;
let di =
match e0_opt with
| None -> S.empty
| Some(e) ->
expect (Ztypes.lift_to_discrete expected_k) h e Initial.typ_float;
ignore (init loc h n); S.singleton n in
ignore (present_handler_exp_list
loc expected_k h p_h_e_list None Initial.typ_float);
{ Deftypes.empty with di = di; der = S.singleton n }
| EQautomaton(is_weak, s_h_list, se_opt) ->
(* automata are only valid in continuous or discrete context *)
check_statefull loc expected_k;
automaton_handlers is_weak loc expected_k h s_h_list se_opt
| EQmatch(total, e, m_h_list) ->
let expected_pat_ty = expression expected_k h e in
match_handler_block_eq_list
loc expected_k h total m_h_list expected_pat_ty
| EQpresent(p_h_list, b_opt) ->
present_handler_block_eq_list loc expected_k h p_h_list b_opt
| EQreset(eq_list, e) ->
expect expected_k h e (Ztypes.zero_type expected_k);
equation_list expected_k h eq_list
| EQand(eq_list)
| EQbefore(eq_list) -> equation_list expected_k h eq_list
| EQemit(n, e_opt) ->
less_than loc expected_k (Ztypes.lift_to_discrete expected_k);
let ty_e = new_var () in
let ty_name = typ_of_var loc h n in
begin match e_opt with
| None -> unify loc (Initial.typ_signal Initial.typ_unit) ty_name
| Some(e) ->
unify loc (Initial.typ_signal ty_e) ty_name;
expect expected_k h e ty_e
end;
{ Deftypes.empty with dv = S.singleton n }
| EQblock(b_eq_list) ->
snd (block_eq_list expected_k h b_eq_list)
| EQforall
({ for_index = i_list; for_init = init_list; for_body = b_eq_list }
as body) ->
(* all output variables [xi] such that [xi ou x] *)
(* must have a declaration in the body *)
(* A non local variable [xi] defined in the body of the loop must be *)
(* either declared in the initialization part [initialize ...] *)
(* or used to define an output array [xi out x] *)
(* returns a new set [{ dv; di; der; nv; mv }] *)
(* where [xi] is replaced by [x] *)
let merge ({ dv = dv; di = di; der = der; nv = nv; mv = mv } as defnames)
h init_h out_h xi_out_x =
(* check that all names in [out_h] are defined in defnames *)
let out_set = Env.fold (fun x _ acc -> S.add x acc) out_h S.empty in
let out_not_defined =
S.diff out_set (Deftypes.names S.empty defnames) in
if not (S.is_empty out_not_defined)
then error loc (Eequation_is_missing(S.choose out_not_defined));
(* rename [xi] into [x] if [xi out x] appears in [xi_out_x] *)
let x_of_xi xi =
try Env.find xi xi_out_x with Not_found -> xi in
let out xi acc =
try S.add (Env.find xi xi_out_x) acc with Not_found -> acc in
(* all variables in [dv], [der] must appear either *)
(* in [init_h] or [out_h] or as combined variables in [h] *)
(* all variables in [di] must appear in [out_h] and not in [init_h] *)
let belong_to_init_out xi =
if not ((Env.mem xi init_h) || (Env.mem xi out_h))
then error loc (Ealready_in_forall(xi)) in
let belong_to_out_not_init xi =
if not (Env.mem xi out_h) || (Env.mem xi init_h)
then error loc (Ealready_in_forall(xi)) in
S.iter belong_to_init_out dv;
S.iter belong_to_init_out nv;
S.iter belong_to_init_out der;
S.iter belong_to_out_not_init di;
(* change the sort of [x] so that it is equal to that of [xi] *)
S.iter (def loc h) (S.fold out dv S.empty);
S.iter (fun n -> ignore (init loc h n)) (S.fold out di S.empty);
S.iter
(fun n -> ignore (derivative loc h n)) (S.fold out der S.empty);
(* all name [xi] from [defnames] such that [xi out x] *)
(* is replaced by [x] in the new [defnames] *)
{ dv = S.map x_of_xi dv; di = S.map x_of_xi di;
der = S.map x_of_xi der; nv = S.map x_of_xi nv;
mv = S.map x_of_xi mv } in
(* outputs are either shared or state variables *)
let sort = if Ztypes.is_statefull_kind expected_k
then Deftypes.Smem Deftypes.empty_mem
else Deftypes.variable in
(* bounds for loops must be static *)
(* computes the set of array names returned by the loop *)
(* declarations are red from left to right. For [i in e0..e1], *)
(* compute the size [(e1 - e0) + 1)] for the arrays *)
let index (in_h, out_h, xi_out_x, size_opt)
{ desc = desc; loc = loc } =
let size_of loc size_opt =
match size_opt with
| None -> error loc Esize_of_vec_is_undetermined
| Some(actual_size) -> actual_size in
match desc with
| Einput(xi, e) ->
let ty = Ztypes.new_var () in
let si = size_of loc size_opt in
expect Tany h e (Ztypes.vec ty si);
Env.add xi { t_typ = ty; t_sort = Sval } in_h,
out_h, xi_out_x, size_opt
| Eoutput(xi, x) ->
let ty_xi = Ztypes.new_var () in
let ty_x = typ_of_var loc h x in
let si = size_of loc size_opt in
unify loc (Ztypes.vec ty_xi si) ty_x;
in_h, Env.add xi { t_typ = ty_xi; t_sort = sort } out_h,
Env.add xi x xi_out_x, size_opt
| Eindex(i, e0, e1) ->
expect (Tstatic(true)) h e0 Initial.typ_int;
expect (Tstatic(true)) h e1 Initial.typ_int;
(* check that the size [(e1 - e0) + 1)] is the same for *)
(* all indices *)
let e0 = size_of_exp e0 in
let e1 = size_of_exp e1 in
let actual_size =
Ztypes.plus (Ztypes.minus e1 e0) (Ztypes.const 1) in
let size_opt =
match size_opt with
| None -> Some(actual_size)
| Some(expected_size) ->
equal_sizes loc expected_size actual_size; size_opt in
Env.add i { t_typ = Initial.typ_int; t_sort = Sval } in_h,
out_h, xi_out_x, size_opt in
(* returns the set of names defined by the loop body *)
let init init_h { desc = desc; loc = loc } =
match desc with
| Einit_last(i, e) ->
let ty = typ_of_var loc h i in
expect expected_k h e ty;
Env.add i { t_typ = ty; t_sort = Deftypes.memory } init_h in
let init_h = List.fold_left init Env.empty init_list in
let in_h, out_h, xi_out_x, _ =
List.fold_left index (Env.empty, Env.empty, Env.empty, None) i_list in
body.for_in_env <- in_h;
body.for_out_env <- out_h;
(* the environment [h] is extended with [in_h], [out_h] and [init_h] *)
let h_eq_list =
Env.append in_h (Env.append out_h (Env.append init_h h)) in
let _, defnames =
block_eq_list expected_k h_eq_list b_eq_list in
(* check that every name in defnames is either declared *)
(* in the initialize branch, an output or a multi-emitted value *)
merge defnames h init_h out_h xi_out_x in
(* set the names defined in the current equation *)
eq.eq_write <- defnames;
(* every equation must define at least a name *)
(* if S.is_empty (Deftypes.names S.empty defnames) *)
(* then warning loc Wequation_does_not_define_a_name; *)
defnames
and equation_list expected_k h eq_list =
List.fold_left
(fun defined_names eq ->
Total.join eq.eq_loc (equation expected_k h eq) defined_names)
Deftypes.empty eq_list
(** Type a present handler when the body is an expression *)
and present_handler_exp_list loc expected_k h p_h_list e0_opt expected_ty =
present_handlers scondpat
(fun expected_k h e expected_ty ->
expect expected_k h e expected_ty; Deftypes.empty)
loc expected_k h p_h_list e0_opt expected_ty
and present_handler_block_eq_list loc expected_k h p_h_list b_opt =
present_handlers scondpat
(fun expected_k h b _ -> snd (block_eq_list expected_k h b))
loc expected_k h p_h_list b_opt Initial.typ_unit
and match_handler_block_eq_list loc expected_k h total m_h_list pat_ty =
match_handlers
(fun expected_k h b _ -> snd (block_eq_list expected_k h b))
loc expected_k h total m_h_list pat_ty Initial.typ_unit
and match_handler_exp_list loc expected_k h total m_h_list pat_ty ty =
match_handlers
(fun expected_k h e expected_ty ->
expect expected_k h e expected_ty; Deftypes.empty)
loc expected_k h total m_h_list pat_ty ty
and block_eq_list expected_k h
({ b_vars = n_list; b_locals = l_list;
b_body = eq_list } as b) =
(* initialize the local environment *)
let _, inames = build_list (S.empty, S.empty) eq_list in
let h0 = vardec_list expected_k n_list inames in
let h = Env.append h0 h in
let new_h = List.fold_left (local expected_k) h l_list in
let defined_names = equation_list expected_k new_h eq_list in
(* check that every local variable from [l_list] appears in *)
(* [defined_variable] and that initialized state variables are not *)
(* re-initialized in the body *)
let defined_names =
check_definitions_for_every_name defined_names n_list in
(* annotate the block with the set of written variables and environment *)
b.b_write <- defined_names;
b.b_env <- h0;
new_h, defined_names
and local expected_k h ({ l_eq = eq_list } as l) =
(* decide whether [last x] is allowed or not on every [x] from [h0] *)
let h0 = env_of_eq_list expected_k eq_list in
l.l_env <- h0;
let new_h = Env.append h0 h in
ignore (equation_list expected_k new_h eq_list);
Env.append h0 h
(** Typing a signal condition *)
(* when [is_zero_type = true], [scpat] must be either of type *)
(* [zero] or [t signal]. [h] is the typing environment *)
(* Under a kind [k = Any], [sc on e] is correct if [e] is of kind [AD] *)
(* The reason is that the possible discontinuity of [e] only effect *)
(* when [sc] is true *)
and scondpat expected_k is_zero_type h scpat =
let rec typrec expected_k is_zero_type scpat =
match scpat.desc with
| Econdand(sc1, sc2) ->
typrec expected_k is_zero_type sc1;
typrec expected_k is_zero_type sc2
| Econdor(sc1, sc2) ->
typrec expected_k is_zero_type sc1;
typrec expected_k is_zero_type sc2
| Econdexp(e) ->
let expected_ty =
if is_zero_type then Initial.typ_zero else Initial.typ_bool in
ignore (expect expected_k h e expected_ty)
| Econdpat(e_cond, pat) ->
(* check that e is a signal *)
let ty = new_var () in
ignore (expect expected_k h e_cond (Initial.typ_signal ty));
pattern h pat ty
| Econdon(sc1, e) ->
typrec expected_k is_zero_type sc1;
ignore
(expect (Ztypes.on_type expected_k) h e Initial.typ_bool)
in
typrec expected_k is_zero_type scpat
(* typing state expressions. [state] must be a stateless expression *)
(* [actual_reset = true] if [state] is entered by reset *)
and typing_state h def_states actual_reset state =
match state.desc with
| Estate0(s) ->
begin try
let ({ s_reset = expected_reset; s_parameters = args } as r) =
Env.find s def_states in
if args <> []
then error state.loc (Estate_arity_clash(s, 0, List.length args));
r.s_reset <-
check_target_state state.loc expected_reset actual_reset
with
| Not_found -> error state.loc (Estate_unbound s)
end
| Estate1(s, l) ->
let ({ s_reset = expected_reset; s_parameters = args } as r) =
try
Env.find s def_states
with
| Not_found -> error state.loc (Estate_unbound s) in
begin try
List.iter2
(fun e expected_ty -> ignore (expect Tany h e expected_ty))
l args;
r.s_reset <-
check_target_state state.loc expected_reset actual_reset
with
| Invalid_argument _ ->
error state.loc
(Estate_arity_clash(s, List.length l, List.length args))
end
(* Once the body of an automaton has been typed, indicate for every *)
(* handler if it is always entered by reset or not *)
and mark_reset_state def_states state_handlers =
let mark ({ s_state = statepat } as handler) =
let { s_reset = r } =
Env.find (Total.Automaton.statepatname statepat) def_states in
let v = match r with | None | Some(false) -> false | Some(true) -> true in
handler.Zelus.s_reset <- v in
List.iter mark state_handlers
(** Typing an automaton. Returns defined names *)
and automaton_handlers is_weak loc expected_k h state_handlers se_opt =
(* check that all declared states are accessible *)
Total.Automaton.check_all_states_are_accessible loc state_handlers;
(* global table which associate the set of defined_names for every state *)
let t = Total.Automaton.table state_handlers in
(* build the environment of states. *)
let addname acc { s_state = statepat } =
match statepat.desc with
| Estate0pat(s) -> Env.add s { s_reset = None; s_parameters = [] } acc
| Estate1pat(s, l) ->
Env.add s { s_reset = None;
s_parameters = (List.map (fun _ -> new_var ()) l)} acc in
let def_states = List.fold_left addname Env.empty state_handlers in
(* in case [se_opt = None], checks that the initial state is a non *)
(* parameterised state. *)
let { s_state = statepat } = List.hd state_handlers in
begin match se_opt with
| None ->
begin match statepat.desc with
| Estate1pat _ -> error statepat.loc Estate_initial
| Estate0pat _ -> ()
end
| Some(se) -> typing_state h def_states true se
end;
(* the type for conditions on transitions *)
let is_zero_type = Ztypes.is_continuous_kind expected_k in
(* typing the body of the automaton *)
let typing_handler h
({ s_state = statepat; s_body = b; s_trans = trans } as s) =
let escape source_state h expected_k
({ e_cond = scpat; e_reset = r; e_block = b_opt;
e_next_state = state } as esc) =
(* type one escape condition *)
let h0 = env_of_scondpat expected_k scpat in
let h = Env.append h0 h in
scondpat expected_k is_zero_type h scpat;
(* sets flag [zero = true] when [is_zero_type = true] *)
esc.e_zero <- is_zero_type;
esc.e_env <- h0;
let h, defined_names =
match b_opt with
| None -> h, Deftypes.empty
| Some(b) -> block_eq_list (Tdiscrete(true)) h b in
(* checks that [state] belond to the current set of [states] *)
typing_state h def_states r state;
(* checks that names are not defined twice in a state *)
let statename =
if is_weak then source_state else Total.Automaton.statename state in
Total.Automaton.add_transition is_weak h statename defined_names t in
(* typing the state pattern *)
let h0 = env_of_statepat expected_k statepat in
s.s_env <- h0;
begin match statepat.desc with
| Estate0pat _ -> ()
| Estate1pat(s, n_list) ->
let { s_parameters = ty_list } = Env.find s def_states in
List.iter2
(fun n ty ->
unify statepat.loc
(typ_of_var statepat.loc h0 n) ty) n_list ty_list;
end;
let h = Env.append h0 h in
(* typing the body *)
let new_h, defined_names = block_eq_list expected_k h b in
(* add the list of defined_names to the current state *)
let source_state = Total.Automaton.statepatname statepat in
Total.Automaton.add_state source_state defined_names t;
List.iter (escape source_state new_h expected_k) trans;
defined_names in
let first_handler = List.hd state_handlers in
let remaining_handlers = List.tl state_handlers in
(* first type the initial branch *)
let defined_names = typing_handler h first_handler in
(* if the initial state has only weak transition then all *)
(* variables from [defined_names] do have a last value *)
let first_h, new_h = if is_weak then turn_vars_into_memories h defined_names
else Env.empty, h in
let defined_names_list =
List.map (typing_handler new_h) remaining_handlers in
(* identify variables which are partially defined in some states *)
(* and/or transitions *)
let defined_names = Total.Automaton.check loc new_h t in
(* write defined_names in every handler *)
List.iter2
(fun { s_body = { b_write = _ } as b } defined_names ->
b.b_write <- defined_names)
state_handlers (defined_names :: defined_names_list);
(* incorporate all the information computed concerning variables *)
(* from the initial handler into the global one *)
incorporate_into_env first_h h;
(* finally, indicate for every state handler if it is entered *)
(* by reset or not *)
mark_reset_state def_states state_handlers;
defined_names
(** Check that size variables are all bounded *)
let no_unbounded_name loc free_in_ty ty =
if not (S.is_empty free_in_ty)
then let n = S.choose free_in_ty in
error loc (Esize_parameter_cannot_be_generalized(n, ty))
else ty
(* make a function type from a function definition. *)
(* remove useless dependences:
* - (n:ty_arg) -k-> ty_res => ty_arg -k-> ty_res if n not in fv_size(ty_res)
* - if some name stay unbounded, raise an error *)
let funtype loc expected_k pat_list ty_list ty_res =
let rec arg pat_list ty_list fv_in_ty_res =
match pat_list, ty_list with
| [], [] -> [], fv_in_ty_res
| pat :: pat_list, ty_arg :: ty_list ->
let ty_res_list, fv_in_ty_res =
arg pat_list ty_list fv_in_ty_res in
let fv_pat = Vars.fv_pat S.empty S.empty pat in
let opt_name, fv_in_ty_res =
let fv_inter = S.inter fv_pat fv_in_ty_res in
if S.is_empty fv_inter then None, fv_in_ty_res
else match pat.p_desc with
| Evarpat(n) -> Some(n), S.remove n fv_in_ty_res
| _ -> error pat.p_loc Esize_parameter_must_be_a_name in
(opt_name, ty_arg) :: ty_res_list, fv fv_in_ty_res ty_arg
| _ -> assert false in
let ty_arg_list, fv_in_ty_res = arg pat_list ty_list (fv S.empty ty_res) in
let ty_res = funtype_list expected_k ty_arg_list ty_res in
no_unbounded_name loc fv_in_ty_res ty_res
(* The main entry functions *)
let constdecl f is_static e =
let expected_k = if is_static then Tstatic(true) else Tdiscrete(false) in
Zmisc.push_binding_level ();
let ty = expression expected_k Env.empty e in
Zmisc.pop_binding_level ();
let tys = Ztypes.gen (not (expansive e)) ty in
tys
let fundecl loc f ({ f_kind = k; f_atomic = is_atomic;
f_args = pat_list; f_body = e } as body) =
Zmisc.push_binding_level ();
let expected_k = Interface.kindtype k in
(* sets the kind of variables according to [k] *)
(* vars in [pat_list] are values, i.e., *)
(* they cannot be accessed with a last *)
let h0 = env_of_pattern_list expected_k Env.empty pat_list in
body.f_env <- h0;
(* first type the body *)
let ty_p_list = List.map (fun _ -> new_var ()) pat_list in
pattern_list h0 pat_list ty_p_list;
(* check that the pattern is total *)
check_total_pattern_list pat_list;
let ty_res = expression expected_k h0 e in
Zmisc.pop_binding_level ();
let ty_res = funtype loc expected_k pat_list ty_p_list ty_res in
let tys = Ztypes.gen true ty_res in
tys
let implementation ff is_first impl =
try
match impl.desc with
| Econstdecl(f, is_static, e) ->
let tys = constdecl f is_static e in
if is_first then Interface.add_type_of_value ff impl.loc f is_static tys
else Interface.update_type_of_value ff impl.loc f is_static tys
| Efundecl(f, body) ->
let tys = fundecl impl.loc f body in
if is_first then Interface.add_type_of_value ff impl.loc f true tys
else Interface.update_type_of_value ff impl.loc f true tys
| Eopen(modname) ->
if is_first then Modules.open_module modname
| Etypedecl(f, params, ty) ->
if is_first then Interface.typedecl ff impl.loc f params ty
with
| Typerrors.Error(loc, err) ->
if is_first then Typerrors.message loc err
else
begin
Format.eprintf
"@[Internal error: type error during the second step \n\
after static reduction and inlining\n\
Be carreful, the localisation of errors is misleading@.@.@]";
Typerrors.message loc err
end
(* the main entry function *)
let implementation_list ff is_first impl_list =
Zmisc.no_warning := not is_first;
List.iter (implementation ff is_first) impl_list;
Zmisc.no_warning := not is_first;
impl_list
| (***********************************************************************)
(* *)
(* *)
(* Zelus, a synchronous language for hybrid systems *)
(* *)
(* (c) 2020 Inria Paris (see the AUTHORS file) *)
(* *)
(* Copyright Institut National de Recherche en Informatique et en *)
(* Automatique. All rights reserved. This file is distributed under *)
(* the terms of the INRIA Non-Commercial License Agreement (see the *)
(* LICENSE file). *)
(* *)
(* *********************************************************************)
|
tests.ml |
open ExtLib
open OUnit
open Dose_common
module Version = Dose_versioning.Debian
let testdir = "./tests"
let returns_result ?(printer = fun _ -> "(PRINTER NOT SPECIFIED)")
function_to_test expected_result args () =
assert_equal ~printer (function_to_test args) expected_result
and raises_failure function_to_test failure_text args () =
assert_raises (Failure failure_text) (fun () -> function_to_test args)
let parse_depends =
let function_to_test options par =
let f =
Dose_pef.Packages.parse_s
~default:[]
Dose_pef.Packages.parse_builddepsformula
in
Dose_opam2.Packages.vpkgformula_filter options (f "depends" par)
in
let returns options = returns_result (function_to_test options) in
[ ( "depends simple",
[("depends", (Dose_extra.Format822.dummy_loc, "a"))],
returns ("system", [], []) [[(("a", None), None)]] );
( "depends filter arch",
[("depends", (Dose_extra.Format822.dummy_loc, "a [arch]"))],
returns ("arch", [], []) [[(("a", None), None)]] );
( "depends filter arch neg",
[("depends", (Dose_extra.Format822.dummy_loc, "a [arch]"))],
returns ("system", [], []) [] ) ]
let version_lag =
let (request, packagelist) =
Dose_opam2.Packages.input_raw
(Filename.concat testdir "opam/version_lag.opam")
in
let options =
{ Dose_opam2.Opamcudf.default_options with
Dose_opam2.Opamcudf.switch = request.Dose_opam2.Packages.switch
}
in
let universe =
Dose_opam2.Opamcudf.load_universe ~options Version.compare packagelist
in
let pkgA1 = Cudf.lookup_package universe ("A%3asystem", 1) in
let pkgA2 = Cudf.lookup_package universe ("A%3asystem", 2) in
let pkgA3 = Cudf.lookup_package universe ("A%3asystem", 3) in
let pkgA4 = Cudf.lookup_package universe ("A%3asystem", 4) in
assert_equal "3" (Cudf.lookup_package_property pkgA1 "version-lag") ;
assert_equal "2" (Cudf.lookup_package_property pkgA2 "version-lag") ;
assert_equal "1" (Cudf.lookup_package_property pkgA3 "version-lag") ;
assert_equal "0" (Cudf.lookup_package_property pkgA4 "version-lag")
module NV = struct
type t = Cudf_types.pkgname * Cudf_types.version
let compare = compare
let pp_printer fmt (n, v) = Format.fprintf fmt "(\"%s\",%d)" n v
let pp_print_sep fmt () = Format.fprintf fmt ";"
end
module ListNV = OUnitDiff.ListSimpleMake (NV)
let returns_result_list function_to_test expected_result args () =
ListNV.assert_equal (function_to_test args) expected_result
let opamcudf_filter =
let (_, packagelist) =
Dose_opam2.Packages.input_raw
(Filename.concat testdir "opam/filter_universe.opam")
in
let function_to_test (switch, switches) =
let options =
{ Dose_opam2.Opamcudf.default_options with
Dose_opam2.Opamcudf.switch;
switches
}
in
let l =
Dose_opam2.Opamcudf.load_list ~options Version.compare packagelist
in
List.map (fun p -> (CudfAdd.decode p.Cudf.package, p.Cudf.version)) l
in
let returns = returns_result_list function_to_test in
[ ( "load switch:sw1, switches:[]",
("sw1", []),
returns
[("f:sw1", 1); ("e:sw1", 1); ("d:sw1", 1); ("c:sw1", 1); ("a:sw1", 1)]
);
( "load switch:sw2, switches:[]",
("sw2", []),
returns
[("f:sw2", 1); ("e:sw2", 1); ("d:sw2", 1); ("c:sw2", 1); ("b:sw2", 1)]
);
( "load switch:sw1, switches:[sw2]",
("sw1", ["sw2"]),
returns
[ ("f:sw2", 1);
("f:sw1", 1);
("e:sw2", 1);
("e:sw1", 1);
("d:sw2", 1);
("d:sw1", 1);
("c:sw2", 1);
("c:sw1", 1);
("b:sw2", 1);
("a:sw1", 1) ] );
( "load switch:sw2, switches:[sw1]",
("sw2", ["sw1"]),
returns
[ ("f:sw1", 1);
("f:sw2", 1);
("e:sw1", 1);
("e:sw2", 1);
("d:sw1", 1);
("d:sw2", 1);
("c:sw1", 1);
("c:sw2", 1);
("b:sw2", 1);
("a:sw1", 1) ] ) ]
let opamcudf_installed =
let (_, packagelist) =
Dose_opam2.Packages.input_raw
(Filename.concat testdir "opam/filter_universe.opam")
in
let function_to_test (switch, switches) =
let options =
{ Dose_opam2.Opamcudf.default_options with
Dose_opam2.Opamcudf.switch;
switches
}
in
let l =
Dose_opam2.Opamcudf.load_list ~options Version.compare packagelist
in
List.filter_map
(fun p ->
if p.Cudf.installed then
Some (CudfAdd.decode p.Cudf.package, p.Cudf.version)
else None)
l
in
let returns = returns_result_list function_to_test in
[ ("installed switch:sw1", ("sw1", []), returns [("d:sw1", 1); ("c:sw1", 1)]);
("installed switch:sw2, switches:[]", ("sw2", []), returns [("c:sw2", 1)]);
( "installed switch:sw1, switches:[sw2]",
("sw1", ["sw2"]),
returns [("d:sw1", 1); ("c:sw2", 1); ("c:sw1", 1)] ) ]
module NVK = struct
type t = Cudf_types.pkgname * Cudf_types.version * Cudf_types.enum_keep
let compare = compare
let pp_printer fmt (n, v, k) =
Format.fprintf fmt "(\"%s\",%d,%s)" n v (Cudf_types_pp.string_of_keep k)
let pp_print_sep fmt () = Format.fprintf fmt ";"
end
module ListNVK = OUnitDiff.ListSimpleMake (NVK)
let returns_result_nvk function_to_test expected_result args () =
ListNVK.assert_equal (function_to_test args) expected_result
let opamcudf_pinned =
let (_, packagelist) =
Dose_opam2.Packages.input_raw
(Filename.concat testdir "opam/filter_universe.opam")
in
let function_to_test (switch, switches) =
let options =
{ Dose_opam2.Opamcudf.default_options with
Dose_opam2.Opamcudf.switch;
switches
}
in
let l =
Dose_opam2.Opamcudf.load_list ~options Version.compare packagelist
in
List.filter_map
(fun p ->
if p.Cudf.keep = `Keep_version || p.Cudf.keep = `Keep_package then
Some (CudfAdd.decode p.Cudf.package, p.Cudf.version, p.Cudf.keep)
else None)
l
in
let returns = returns_result_nvk function_to_test in
[ ("pinned switch:sw1", ("sw1", []), returns [("e:sw1", 1, `Keep_version)]);
( "pinned switch:sw2, switches:[]",
("sw2", []),
returns [("f:sw2", 1, `Keep_version); ("e:sw2", 1, `Keep_package)] );
( "pinned switch:sw1, switches:[sw2]",
("sw1", ["sw2"]),
returns
[ ("f:sw2", 1, `Keep_version);
("e:sw2", 1, `Keep_package);
("e:sw1", 1, `Keep_version) ] ) ]
let make_test_cases triplets =
List.map
(fun (test_name, input, assert_function) ->
test_name >:: assert_function input)
triplets
let all =
"all tests"
>::: [ "test depends" >::: make_test_cases parse_depends;
"test filters" >::: make_test_cases opamcudf_filter;
"test installed" >::: make_test_cases opamcudf_installed;
"test pinned" >::: make_test_cases opamcudf_pinned ]
let main () = OUnit.run_test_tt_main all;;
main ()
| |
lwt_sys.ml | (* This file is part of Lwt, released under the MIT license. See LICENSE.md for
details, or visit https://github.com/ocsigen/lwt/blob/master/LICENSE.md. *)
exception Not_available of string
let () = Callback.register_exception "lwt:not-available" (Not_available "")
let windows = Sys.win32
type feature =
[ `wait4
| `get_cpu
| `get_affinity
| `set_affinity
| `recv_msg
| `send_msg
| `fd_passing
| `get_credentials
| `mincore
| `madvise
| `fdatasync
| `libev ]
let have = function
| `wait4
| `recv_msg
| `send_msg
| `madvise -> not Sys.win32
| `mincore -> not (Sys.win32 || Sys.cygwin)
| `get_cpu -> Lwt_config._HAVE_GETCPU
| `get_affinity
| `set_affinity -> Lwt_config._HAVE_AFFINITY
| `fd_passing -> Lwt_config._HAVE_FD_PASSING
| `get_credentials -> Lwt_config._HAVE_GET_CREDENTIALS
| `fdatasync -> Lwt_config._HAVE_FDATASYNC
| `libev -> Lwt_config._HAVE_LIBEV
type byte_order = Little_endian | Big_endian
external get_byte_order : unit -> byte_order = "lwt_unix_system_byte_order"
let byte_order = get_byte_order ()
| (* This file is part of Lwt, released under the MIT license. See LICENSE.md for
details, or visit https://github.com/ocsigen/lwt/blob/master/LICENSE.md. *) |
dune |
(coq.theory
(name C)
(package C)
(theories B)) | |
decode.mli |
include Decoders.Xml.Decode with type value = Ezxmlm.node
val tag_ns : Xmlm.name -> unit decoder
val any_tag_ns : Xmlm.name decoder
val attrs_ns : Xmlm.attribute list decoder
val attr_opt_ns : Xmlm.name -> string option decoder
val attr_ns : Xmlm.name -> string decoder
| |
should_keep_whitespace.ml |
open! Core_kernel
open! Import
let looks_like_python_filename = String.is_suffix ~suffix:".py"
let looks_like_python_first_line first_line =
String.is_prefix first_line ~prefix:"#!"
&& String.is_substring first_line ~substring:"python"
;;
let looks_like_python input ~get_name ~get_first_line =
looks_like_python_filename (get_name input)
|| looks_like_python_first_line (get_first_line input)
;;
let for_diff_internal ~prev ~next ~get_name ~get_first_line =
looks_like_python prev ~get_name ~get_first_line
|| looks_like_python next ~get_name ~get_first_line
;;
let for_diff =
for_diff_internal
~get_name:Diff_input.name
~get_first_line:(fun (input : Diff_input.t) ->
match String.lsplit2 input.text ~on:'\n' with
| Some (first_line, _) -> first_line
| None -> input.text)
;;
let for_diff_array =
for_diff_internal ~get_name:fst ~get_first_line:(fun (_, lines) ->
if Array.is_empty lines then "" else lines.(0))
;;
| |
test_git.ml |
open Irmin.Export_for_backends
let test_db = Filename.concat "_build" "test-db-git"
let config =
let head = Git.Reference.v "refs/heads/test" in
Irmin_git.config ~head ~bare:true test_db
module type S = sig
include Irmin_test.S
val init : config:Irmin.config -> unit Lwt.t
end
module type G = sig
include S
module Git : Irmin_git.G
end
module X = struct
type t = X of (int * int) | Y of string list [@@deriving irmin]
let merge = Irmin.Merge.idempotent [%typ: t option]
end
module type X =
Irmin.S
with type Schema.Path.step = string
and type Schema.Path.t = string list
and type Schema.Contents.t = X.t
and type Schema.Branch.t = string
module Mem (C : Irmin.Contents.S) = struct
module G = Irmin_git.Mem
module M = Irmin_git.KV (G) (Git_unix.Sync (G))
module S = M.Make (C)
include S
let init ~config =
let test_db =
Irmin.Backend.Conf.find_root config |> Option.value ~default:test_db
in
Git.v (Fpath.v test_db) >>= function
| Ok t -> S.Git.reset t >|= fun _ -> ()
| _ -> Lwt.return_unit
end
module Generic (C : Irmin.Contents.S) = struct
module CA = Irmin.Content_addressable.Make (Irmin_mem.Append_only)
module M = Irmin_git.Generic_KV (CA) (Irmin_mem.Atomic_write)
include M.Make (C)
let init ~config =
let* repo = Repo.v config in
Repo.branches repo >>= Lwt_list.iter_p (Branch.remove repo)
let clean ~config =
let* repo = Repo.v config in
Repo.branches repo >>= Lwt_list.iter_p (Branch.remove repo) >>= fun () ->
Repo.close repo
end
let suite =
let module S = Mem (Irmin.Contents.String) in
let store = (module S : Irmin_test.S) in
let init ~config = S.init ~config in
let clean ~config = S.init ~config in
Irmin_test.Suite.create ~name:"GIT" ~init ~store ~config ~clean ()
let suite_generic =
let module S = Generic (Irmin.Contents.String) in
let store = (module S : Irmin_test.S) in
let clean ~config = S.clean ~config in
let init ~config = S.init ~config in
Irmin_test.Suite.create ~name:"GIT.generic" ~init ~store ~config ~clean ()
let get = function Some x -> x | None -> Alcotest.fail "get"
let test_sort_order (module S : S) =
let config = Irmin_git.config test_db in
S.init ~config >>= fun () ->
let* repo = S.Repo.v config in
let commit_t = S.Backend.Repo.commit_t repo in
let node_t = S.Backend.Repo.node_t repo in
let head_tree_id branch =
let* head = S.Head.get branch in
let+ commit = S.Backend.Commit.find commit_t (S.Commit.hash head) in
S.Backend.Commit.Val.node (get commit)
in
let ls branch =
let* tree_id = head_tree_id branch in
let+ tree = S.Backend.Node.find node_t tree_id in
S.Backend.Node.Val.list (get tree) |> List.map fst
in
let info = S.Info.none in
let* main = S.main repo in
S.remove_exn main ~info [] >>= fun () ->
S.set_exn main ~info [ "foo.c" ] "foo.c" >>= fun () ->
S.set_exn main ~info [ "foo1" ] "foo1" >>= fun () ->
S.set_exn main ~info [ "foo"; "foo.o" ] "foo.o" >>= fun () ->
let* items = ls main in
Alcotest.(check (list string)) "Sort order" [ "foo.c"; "foo"; "foo1" ] items;
let* tree_id = head_tree_id main in
Alcotest.(check string)
"Sort hash" "00c5f5e40e37fde61911f71373813c0b6cad1477"
(Irmin.Type.to_string S.Backend.Node.Key.t tree_id);
(* Convert dir to file; changes order in listing *)
S.set_exn main ~info [ "foo" ] "foo" >>= fun () ->
let* items = ls main in
Alcotest.(check (list string)) "Sort order" [ "foo"; "foo.c"; "foo1" ] items;
Lwt.return_unit
module Ref (S : Irmin_git.G) = struct
module M = Irmin_git.Ref (S) (Git_unix.Sync (S))
include M.Make (Irmin.Contents.String)
end
let pp_reference ppf = function
| `Branch s -> Fmt.pf ppf "branch: %s" s
| `Remote s -> Fmt.pf ppf "remote: %s" s
| `Tag s -> Fmt.pf ppf "tag: %s" s
| `Other s -> Fmt.pf ppf "other: %s" s
let reference = Alcotest.testable pp_reference ( = )
let test_list_refs (module S : G) =
let module R = Ref (S.Git) in
let config = Irmin_git.config test_db in
S.init ~config >>= fun () ->
let* repo = R.Repo.v config in
let* main = R.main repo in
R.set_exn main ~info:R.Info.none [ "test" ] "toto" >>= fun () ->
let* head = R.Head.get main in
R.Branch.set repo (`Remote "datakit/main") head >>= fun () ->
R.Branch.set repo (`Other "foo/bar/toto") head >>= fun () ->
R.Branch.set repo (`Branch "foo") head >>= fun () ->
let* bs = R.Repo.branches repo in
Alcotest.(check (slist reference compare))
"raw branches"
[
`Branch "foo";
`Branch "main";
`Other "foo/bar/toto";
`Remote "datakit/main";
]
bs;
let* repo = S.Repo.v (Irmin_git.config test_db) in
let* bs = S.Repo.branches repo in
Alcotest.(check (slist string String.compare))
"filtered branches" [ "main"; "foo" ] bs;
(* XXX: re-add
if S.Git.kind = `Disk then
let i = Fmt.kstr Sys.command "cd %s && git gc" test_db in
if i <> 0 then Alcotest.fail "git gc failed";
S.Repo.branches repo >|= fun bs ->
Alcotest.(check (slist string String.compare)) "filtered branches"
["main";"foo"] bs
else *)
Lwt.return_unit
let bin_string = Alcotest.testable (Fmt.fmt "%S") ( = )
let pre_hash t v =
let buf = Buffer.create 13 in
let pre_hash = Irmin.Type.(unstage (pre_hash t)) in
pre_hash v (Buffer.add_string buf);
Buffer.contents buf
let test_blobs (module S : S) =
let str = pre_hash S.Contents.t "foo" in
Alcotest.(check bin_string) "blob foo" "blob 3\000foo" str;
let str = pre_hash S.Contents.t "" in
Alcotest.(check bin_string) "blob ''" "blob 0\000" str;
let module X = Mem (X) in
let str = pre_hash X.Contents.t (Y [ "foo"; "bar" ]) in
Alcotest.(check bin_string)
"blob foo" "blob 19\000{\"Y\":[\"foo\",\"bar\"]}" str;
let str = pre_hash X.Contents.t (X (1, 2)) in
Alcotest.(check bin_string) "blob ''" "blob 11\000{\"X\":[1,2]}" str;
let t = X.Tree.singleton [ "foo" ] (X (1, 2)) in
let k1 = X.Tree.hash t in
let* repo = X.Repo.v (Irmin_git.config test_db) in
let* k2 =
X.Backend.Repo.batch repo (fun x y _ -> X.save_tree ~clear:false repo x y t)
>|= function
| `Node k -> k
| `Contents k -> k
in
let hash = Irmin_test.testable X.Hash.t in
Alcotest.(check hash) "blob" k1 k2;
Lwt.return_unit
let test_import_export (module S : S) =
let module Generic = Generic (Irmin.Contents.String) in
let module Sync = Irmin.Sync.Make (Generic) in
let config = Irmin_git.config test_db in
S.init ~config >>= fun () ->
let* _ = Generic.init ~config in
let* repo = S.Repo.v config in
let* t = S.main repo in
S.set_exn t ~info:S.Info.none [ "test" ] "toto" >>= fun () ->
let remote = Irmin.remote_store (module S) t in
let* repo = Generic.Repo.v (Irmin_mem.config ()) in
let* t = Generic.main repo in
let* _ = Sync.pull_exn t remote `Set in
let+ toto = Generic.get t [ "test" ] in
Alcotest.(check string) "import" toto "toto"
let misc (module S : G) =
let s = (module S : S) in
let g = (module S : G) in
let generic = (module Generic (Irmin.Contents.String) : S) in
[
("Testing sort order", `Quick, fun () -> test_sort_order s);
("Testing sort order (generic)", `Quick, fun () -> test_sort_order generic);
("Testing listing refs", `Quick, fun () -> test_list_refs g);
("git -> mem", `Quick, fun () -> test_import_export s);
("git blobs", `Quick, fun () -> test_blobs s);
("git blobs of generic", `Quick, fun () -> test_blobs s);
]
let mem = (module Mem (Irmin.Contents.String) : G)
| (*
* Copyright (c) 2013-2022 Thomas Gazagnaire <[email protected]>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*) |
dune |
(library
(kind ppx_deriver)
(name ppx_foo_deriver)
(libraries ppxlib))
| |
web_flash.ml |
let log_src = Logs.Src.create "sihl.middleware.flash"
module Logs = (val Logs.src_log log_src : Logs.LOG)
module Flash = struct
open Sexplib.Conv
type t =
{ alert : string option
; notice : string option
; custom : (string * string) list
}
[@@deriving yojson, sexp]
let equals f1 f2 =
Option.equal String.equal f1.alert f2.alert
&& Option.equal String.equal f1.notice f2.notice
&& CCList.equal (CCPair.equal String.equal String.equal) f1.custom f2.custom
;;
let of_json (json : string) : t option =
try Some (of_yojson (Yojson.Safe.from_string json) |> Result.get_ok) with
| _ -> None
;;
let to_json (flash : t) : string = flash |> to_yojson |> Yojson.Safe.to_string
end
module Env = struct
let key : Flash.t Opium.Context.key =
Opium.Context.Key.create ("flash", Flash.sexp_of_t)
;;
end
let find' req = Opium.Context.find Env.key req.Opium.Request.env
let find_alert req = Option.bind (find' req) (fun flash -> flash.alert)
let find_notice req = Option.bind (find' req) (fun flash -> flash.notice)
let find key req =
Option.bind (find' req) (fun flash ->
flash.custom
|> List.find_opt (fun (k, _) -> String.equal key k)
|> Option.map snd)
;;
let set_alert alert resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = Some alert; notice = None; custom = [] }
| Some flash -> Flash.{ flash with alert = Some alert }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
let set_notice notice resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = None; notice = Some notice; custom = [] }
| Some flash -> Flash.{ flash with notice = Some notice }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
let set values resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
let flash =
match flash with
| None -> Flash.{ alert = None; notice = None; custom = values }
| Some flash -> Flash.{ flash with custom = values }
in
let env = resp.Opium.Response.env in
let env = Opium.Context.add Env.key flash env in
{ resp with env }
;;
type decode_status =
| No_cookie_found
| Parse_error
| Found of Flash.t
let decode_flash cookie_key req =
match Opium.Request.cookie cookie_key req with
| None -> No_cookie_found
| Some cookie_value ->
(match Flash.of_json cookie_value with
| None ->
Logs.err (fun m ->
m
"Failed to parse value found in flash cookie '%s': '%s'"
cookie_key
cookie_value);
Logs.info (fun m ->
m
"Maybe the cookie key '%s' collides with a cookie issued by someone \
else. Try to change the cookie key."
cookie_key);
Parse_error
| Some flash -> Found flash)
;;
let persist_flash ?old_flash ?(delete_if_not_set = false) cookie_key resp =
let flash = Opium.Context.find Env.key resp.Opium.Response.env in
match flash with
(* No flash was set in handler *)
| None ->
if delete_if_not_set
then
(* Remove flash cookie *)
Opium.Response.add_cookie_or_replace
~expires:(`Max_age Int64.zero)
~scope:(Uri.of_string "/")
(cookie_key, "")
resp
else resp
(* Flash was set in handler *)
| Some flash ->
(match old_flash with
| Some old_flash ->
if Flash.equals old_flash flash
then (* Same flash value, don't set cookie *)
resp
else (
(* Flash was changed and is different than old flash, set cookie *)
let cookie_value = Flash.to_json flash in
let cookie = cookie_key, cookie_value in
let resp =
Opium.Response.add_cookie_or_replace
~scope:(Uri.of_string "/")
cookie
resp
in
resp)
| None ->
(* Flash was changed and old flash is empty, set cookie *)
let cookie_value = Flash.to_json flash in
let cookie = cookie_key, cookie_value in
let resp =
Opium.Response.add_cookie_or_replace
~scope:(Uri.of_string "/")
cookie
resp
in
resp)
;;
let middleware ?(cookie_key = "_flash") () =
let filter handler req =
match decode_flash cookie_key req with
| No_cookie_found ->
let%lwt resp = handler req in
Lwt.return @@ persist_flash cookie_key resp
| Parse_error ->
let%lwt resp = handler req in
Lwt.return @@ persist_flash ~delete_if_not_set:true cookie_key resp
| Found flash ->
let env = req.Opium.Request.env in
let env = Opium.Context.add Env.key flash env in
let req = { req with env } in
let%lwt resp = handler req in
Lwt.return
@@ persist_flash ~delete_if_not_set:true ~old_flash:flash cookie_key resp
in
Rock.Middleware.create ~name:"flash" ~filter
;;
| |
storage_sigs.ml | (** {1 Entity Accessor Signatures} ****************************************)
(** The generic signature of a single data accessor (a single value
bound to a specific key in the hierarchical (key x value)
database). *)
module type Single_data_storage = sig
type t
type context = t
(** The type of the value *)
type value
(** Tells if the data is already defined *)
val mem: context -> bool Lwt.t
(** Retrieve the value from the storage bucket ; returns a
{!Storage_error} if the key is not set or if the deserialisation
fails *)
val get: context -> value tzresult Lwt.t
(** Retrieves the value from the storage bucket ; returns [None] if
the data is not initialized, or {!Storage_helpers.Storage_error}
if the deserialisation fails *)
val get_option: context -> value option tzresult Lwt.t
(** Allocates the storage bucket and initializes it ; returns a
{!Storage_error Missing_key} if the bucket exists *)
val init: context -> value -> Raw_context.t tzresult Lwt.t
(** Updates the content of the bucket ; returns a {!Storage_Error
Existing_key} if the value does not exists *)
val set: context -> value -> Raw_context.t tzresult Lwt.t
(** Allocates the data and initializes it with a value ; just
updates it if the bucket exists *)
val init_set: context -> value -> Raw_context.t Lwt.t
(** When the value is [Some v], allocates the data and initializes
it with [v] ; just updates it if the bucket exists. When the
valus is [None], delete the storage bucket when the value ; does
nothing if the bucket does not exists. *)
val set_option: context -> value option -> Raw_context.t Lwt.t
(** Delete the storage bucket ; returns a {!Storage_error
Missing_key} if the bucket does not exists *)
val delete: context -> Raw_context.t tzresult Lwt.t
(** Removes the storage bucket and its contents ; does nothing if
the bucket does not exists *)
val remove: context -> Raw_context.t Lwt.t
end
(** Variant of {!Single_data_storage} with gas accounting. *)
module type Single_carbonated_data_storage = sig
type t
type context = t
(** The type of the value *)
type value
(** Tells if the data is already defined.
Consumes [Gas_repr.read_bytes_cost Z.zero]. *)
val mem: context -> (Raw_context.t * bool) tzresult Lwt.t
(** Retrieve the value from the storage bucket ; returns a
{!Storage_error} if the key is not set or if the deserialisation
fails.
Consumes [Gas_repr.read_bytes_cost <size of the value>]. *)
val get: context -> (Raw_context.t * value) tzresult Lwt.t
(** Retrieves the value from the storage bucket ; returns [None] if
the data is not initialized, or {!Storage_helpers.Storage_error}
if the deserialisation fails.
Consumes [Gas_repr.read_bytes_cost <size of the value>] if present
or [Gas_repr.read_bytes_cost Z.zero]. *)
val get_option: context -> (Raw_context.t * value option) tzresult Lwt.t
(** Allocates the storage bucket and initializes it ; returns a
{!Storage_error Missing_key} if the bucket exists.
Consumes [Gas_repr.write_bytes_cost <size of the value>].
Returns the size. *)
val init: context -> value -> (Raw_context.t * int) tzresult Lwt.t
(** Updates the content of the bucket ; returns a {!Storage_Error
Existing_key} if the value does not exists.
Consumes [Gas_repr.write_bytes_cost <size of the new value>].
Returns the difference from the old to the new size. *)
val set: context -> value -> (Raw_context.t * int) tzresult Lwt.t
(** Allocates the data and initializes it with a value ; just
updates it if the bucket exists.
Consumes [Gas_repr.write_bytes_cost <size of the new value>].
Returns the difference from the old (maybe 0) to the new size. *)
val init_set: context -> value -> (Raw_context.t * int) tzresult Lwt.t
(** When the value is [Some v], allocates the data and initializes
it with [v] ; just updates it if the bucket exists. When the
valus is [None], delete the storage bucket when the value ; does
nothing if the bucket does not exists.
Consumes the same gas cost as either {!remove} or {!init_set}.
Returns the difference from the old (maybe 0) to the new size. *)
val set_option: context -> value option -> (Raw_context.t * int) tzresult Lwt.t
(** Delete the storage bucket ; returns a {!Storage_error
Missing_key} if the bucket does not exists.
Consumes [Gas_repr.write_bytes_cost Z.zero].
Returns the freed size. *)
val delete: context -> (Raw_context.t * int) tzresult Lwt.t
(** Removes the storage bucket and its contents ; does nothing if
the bucket does not exists.
Consumes [Gas_repr.write_bytes_cost Z.zero].
Returns the freed size. *)
val remove: context -> (Raw_context.t * int) tzresult Lwt.t
end
(** Restricted version of {!Indexed_data_storage} w/o iterators. *)
module type Non_iterable_indexed_data_storage = sig
type t
type context = t
(** An abstract type for keys *)
type key
(** The type of values *)
type value
(** Tells if a given key is already bound to a storage bucket *)
val mem: context -> key -> bool Lwt.t
(** Retrieve a value from the storage bucket at a given key ;
returns {!Storage_error Missing_key} if the key is not set ;
returns {!Storage_error Corrupted_data} if the deserialisation
fails. *)
val get: context -> key -> value tzresult Lwt.t
(** Retrieve a value from the storage bucket at a given key ;
returns [None] if the value is not set ; returns {!Storage_error
Corrupted_data} if the deserialisation fails. *)
val get_option: context -> key -> value option tzresult Lwt.t
(** Updates the content of a bucket ; returns A {!Storage_Error
Missing_key} if the value does not exists. *)
val set: context -> key -> value -> Raw_context.t tzresult Lwt.t
(** Allocates a storage bucket at the given key and initializes it ;
returns a {!Storage_error Existing_key} if the bucket exists. *)
val init: context -> key -> value -> Raw_context.t tzresult Lwt.t
(** Allocates a storage bucket at the given key and initializes it
with a value ; just updates it if the bucket exists. *)
val init_set: context -> key -> value -> Raw_context.t Lwt.t
(** When the value is [Some v], allocates the data and initializes
it with [v] ; just updates it if the bucket exists. When the
valus is [None], delete the storage bucket when the value ; does
nothing if the bucket does not exists. *)
val set_option: context -> key -> value option -> Raw_context.t Lwt.t
(** Delete a storage bucket and its contents ; returns a
{!Storage_error Missing_key} if the bucket does not exists. *)
val delete: context -> key -> Raw_context.t tzresult Lwt.t
(** Removes a storage bucket and its contents ; does nothing if the
bucket does not exists. *)
val remove: context -> key -> Raw_context.t Lwt.t
end
(** Variant of {!Non_iterable_indexed_data_storage} with gas accounting. *)
module type Non_iterable_indexed_carbonated_data_storage = sig
type t
type context = t
(** An abstract type for keys *)
type key
(** The type of values *)
type value
(** Tells if a given key is already bound to a storage bucket.
Consumes [Gas_repr.read_bytes_cost Z.zero]. *)
val mem: context -> key -> (Raw_context.t * bool) tzresult Lwt.t
(** Retrieve a value from the storage bucket at a given key ;
returns {!Storage_error Missing_key} if the key is not set ;
returns {!Storage_error Corrupted_data} if the deserialisation
fails.
Consumes [Gas_repr.read_bytes_cost <size of the value>]. *)
val get: context -> key -> (Raw_context.t * value) tzresult Lwt.t
(** Retrieve a value from the storage bucket at a given key ;
returns [None] if the value is not set ; returns {!Storage_error
Corrupted_data} if the deserialisation fails.
Consumes [Gas_repr.read_bytes_cost <size of the value>] if present
or [Gas_repr.read_bytes_cost Z.zero]. *)
val get_option: context -> key -> (Raw_context.t * value option) tzresult Lwt.t
(** Updates the content of a bucket ; returns A {!Storage_Error
Missing_key} if the value does not exists.
Consumes serialization cost.
Consumes [Gas_repr.write_bytes_cost <size of the new value>].
Returns the difference from the old to the new size. *)
val set: context -> key -> value -> (Raw_context.t * int) tzresult Lwt.t
(** Allocates a storage bucket at the given key and initializes it ;
returns a {!Storage_error Existing_key} if the bucket exists.
Consumes serialization cost.
Consumes [Gas_repr.write_bytes_cost <size of the value>].
Returns the size. *)
val init: context -> key -> value -> (Raw_context.t * int) tzresult Lwt.t
(** Allocates a storage bucket at the given key and initializes it
with a value ; just updates it if the bucket exists.
Consumes serialization cost.
Consumes [Gas_repr.write_bytes_cost <size of the new value>].
Returns the difference from the old (maybe 0) to the new size. *)
val init_set: context -> key -> value -> (Raw_context.t * int) tzresult Lwt.t
(** When the value is [Some v], allocates the data and initializes
it with [v] ; just updates it if the bucket exists. When the
valus is [None], delete the storage bucket when the value ; does
nothing if the bucket does not exists.
Consumes serialization cost.
Consumes the same gas cost as either {!remove} or {!init_set}.
Returns the difference from the old (maybe 0) to the new size. *)
val set_option: context -> key -> value option -> (Raw_context.t * int) tzresult Lwt.t
(** Delete a storage bucket and its contents ; returns a
{!Storage_error Missing_key} if the bucket does not exists.
Consumes [Gas_repr.write_bytes_cost Z.zero].
Returns the freed size. *)
val delete: context -> key -> (Raw_context.t * int) tzresult Lwt.t
(** Removes a storage bucket and its contents ; does nothing if the
bucket does not exists.
Consumes [Gas_repr.write_bytes_cost Z.zero].
Returns the freed size. *)
val remove: context -> key -> (Raw_context.t * int) tzresult Lwt.t
end
(** The generic signature of indexed data accessors (a set of values
of the same type indexed by keys of the same form in the
hierarchical (key x value) database). *)
module type Indexed_data_storage = sig
include Non_iterable_indexed_data_storage
(** Empties all the keys and associated data. *)
val clear: context -> Raw_context.t Lwt.t
(** Lists all the keys. *)
val keys: context -> key list Lwt.t
(** Lists all the keys and associated data. *)
val bindings: context -> (key * value) list Lwt.t
(** Iterates over all the keys and associated data. *)
val fold:
context -> init:'a -> f:(key -> value -> 'a -> 'a Lwt.t) -> 'a Lwt.t
(** Iterate over all the keys. *)
val fold_keys:
context -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t
end
module type Indexed_data_snapshotable_storage = sig
type snapshot
type key
include Indexed_data_storage with type key := key
module Snapshot : Indexed_data_storage
with type key = (snapshot * key)
and type value = value
and type t = t
val snapshot_exists : context -> snapshot -> bool Lwt.t
val snapshot : context -> snapshot -> Raw_context.t tzresult Lwt.t
val delete_snapshot : context -> snapshot -> Raw_context.t Lwt.t
end
(** The generic signature of a data set accessor (a set of values
bound to a specific key prefix in the hierarchical (key x value)
database). *)
module type Data_set_storage = sig
type t
type context = t
(** The type of elements. *)
type elt
(** Tells if a elt is a member of the set *)
val mem: context -> elt -> bool Lwt.t
(** Adds a elt is a member of the set *)
val add: context -> elt -> Raw_context.t Lwt.t
(** Removes a elt of the set ; does nothing if not a member *)
val del: context -> elt -> Raw_context.t Lwt.t
(** Adds/Removes a elt of the set *)
val set: context -> elt -> bool -> Raw_context.t Lwt.t
(** Returns the elements of the set, deserialized in a list in no
particular order. *)
val elements: context -> elt list Lwt.t
(** Iterates over the elements of the set. *)
val fold: context -> init:'a -> f:(elt -> 'a -> 'a Lwt.t) -> 'a Lwt.t
(** Removes all elements in the set *)
val clear: context -> Raw_context.t Lwt.t
end
module type NAME = sig
val name: Raw_context.key
end
module type VALUE = sig
type t
val encoding: t Data_encoding.t
end
module type Indexed_raw_context = sig
type t
type context = t
type key
type 'a ipath
val clear: context -> Raw_context.t Lwt.t
val fold_keys:
context -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t
val keys: context -> key list Lwt.t
val resolve: context -> string list -> key list Lwt.t
module Make_set (N : NAME)
: Data_set_storage with type t = t
and type elt = key
module Make_map (N : NAME) (V : VALUE)
: Indexed_data_storage with type t = t
and type key = key
and type value = V.t
module Make_carbonated_map (N : NAME) (V : VALUE)
: Non_iterable_indexed_carbonated_data_storage with type t = t
and type key = key
and type value = V.t
module Raw_context : Raw_context.T with type t = t ipath
end
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
document_text_command.mli |
open Import
val command_name : string
val command_run :
_ Server.t -> Document_store.t -> Json.t list option -> unit Fiber.t
| |
hello.ml |
let hello s = print_endline (Printf.sprintf "Hello from %s!" s)
| |
dns_trie.mli | (* (c) 2017, 2018 Hannes Mehnert, all rights reserved *)
(** Prefix tree data structure for domain names
The key is a {!Dns_name}, whereas the value may be any resource record. The
representation is a tree, where the edges are domain name labels, and the
nodes carry a {{!Dns_map.t}resource map}.
Some special treatment is applied for zones, which must have a start of
authority entry and a set of name servers. End of authority, also known as
delegation, is supported. Aliases (canonical names, CNAME records) are also
supported.
The data structure tries to preserve invariants recommended by the domain
name system, such as that for any name there may either be an alias or any
other record, there must be a SOA record, and multiple NS records for an
authoritative zone, a resource type must have entries of the given type (no
NS record for A type, the ttl for all resource records of a rrset is the
same.
*)
open Dns
(** {2 Abstract trie type} *)
type t
(** The type of the trie. *)
val pp : t Fmt.t
(** [pp ppf t] pretty prints [t] to [ppf]. *)
val empty : t
(** [empty] is the empty trie. *)
val equal : t -> t -> bool
(** [equal a b] compares [a] with [b]. *)
(** {2 Operations to modify the trie} *)
val insert_map : Rr_map.t Domain_name.Map.t -> t -> t
(** [insert_map m t] inserts all elements of the domain name map [m] into
[t], potentially existing are unioned with {!Rr_map.unionee}. *)
val replace_map : Rr_map.t Domain_name.Map.t -> t -> t
(** [replace_map m t] replaces in the trie [t] all existing bindings of the
domain name map [m] with the provided map. *)
val remove_map : Rr_map.t Domain_name.Map.t -> t -> t
(** [remove_map m t] removes all elements of the domain name map [m] from
[t]. *)
val insert : 'a Domain_name.t -> 'b Rr_map.key -> 'b -> t -> t
(** [insert n k v t] inserts [k, v] under [n] in [t]. Existing entries are
unioneed with {!Rr_map.union_rr}. *)
val replace : 'a Domain_name.t -> 'b Rr_map.key -> 'b -> t -> t
(** [replace n k v t] inserts [k, v] under [n] in [t]. Existing entries are
replaced. *)
val remove : 'a Domain_name.t -> 'b Rr_map.key -> 'b -> t -> t
(** [remove k ty v t] removes [ty, v] from [t] at [k]. Beware, this may lead
to a [t] where the initially mentioned invariants are violated. *)
val remove_ty : 'a Domain_name.t -> 'b Rr_map.key -> t -> t
(** [remove_ty k ty t] removes [ty] from [t] at [k]. Beware, this may lead to a
[t] where the initially mentioned invariants are violated. *)
val remove_all : 'a Domain_name.t -> t -> t
(** [remove_all k t] removes all entries of [k] in [t]. Beware, this may lead to
a [t] where the initially mentioned invariants are violated. *)
val remove_zone : 'a Domain_name.t -> t -> t
(** [remove_zone name t] remove the zone [name] from [t], retaining subzones
(entries with [Soa] records). This removes as well any delegations. *)
(** {2 Checking invariants} *)
type zone_check = [ `Missing_soa of [ `raw ] Domain_name.t
| `Cname_other of [ `raw ] Domain_name.t
| `Bad_ttl of [ `raw ] Domain_name.t * Rr_map.b
| `Empty of [ `raw ] Domain_name.t * Rr_map.k
| `Missing_address of [ `host ] Domain_name.t
| `Soa_not_a_host of [ `raw ] Domain_name.t * string ]
val pp_zone_check : zone_check Fmt.t
(** [pp_err ppf err] pretty prints the error [err]. *)
val check : t -> (unit, zone_check) result
(** [check t] checks all invariants. *)
(** {2 Lookup} *)
type e = [ `Delegation of [ `raw ] Domain_name.t * (int32 * Domain_name.Host_set.t)
| `EmptyNonTerminal of [ `raw ] Domain_name.t * Soa.t
| `NotAuthoritative
| `NotFound of [ `raw ] Domain_name.t * Soa.t ]
(** The type of lookup errors. *)
val pp_e : e Fmt.t
(** [pp_e ppf e] pretty-prints [e] on [ppf]. *)
val zone : 'a Domain_name.t -> t ->
([ `raw ] Domain_name.t * Soa.t, e) result
(** [zone k t] returns either the zone and soa for [k] in [t], or an error. *)
val lookup_with_cname : 'a Domain_name.t -> 'b Rr_map.key -> t ->
(Rr_map.b * ([ `raw ] Domain_name.t * int32 * Domain_name.Host_set.t), e) result
(** [lookup_with_cname k ty t] finds [k, ty] in [t]. It either returns the found
resource record set and authority information, a cname alias and authority
information, or an error. *)
val lookup : 'a Domain_name.t -> 'b Rr_map.key -> t -> ('b, e) result
(** [lookup k ty t] finds [k, ty] in [t], which may lead to an error. *)
val lookup_any : 'a Domain_name.t -> t ->
(Rr_map.t * ([ `raw ] Domain_name.t * int32 * Domain_name.Host_set.t), e) result
(** [lookup_any k t] looks up all resource records of [k] in [t], and returns
that and the authority information. *)
val lookup_glue : 'a Domain_name.t -> t ->
(int32 * Ipaddr.V4.Set.t) option * (int32 * Ipaddr.V6.Set.t) option
(** [lookup_glue k t] finds glue records (A, AAAA) for [k] in [t]. It ignores
potential DNS invariants, e.g. that there is no surrounding zone. *)
val entries : 'a Domain_name.t -> t ->
(Dns.Soa.t * Rr_map.t Domain_name.Map.t, e) result
(** [entries name t] returns either the SOA and all entries for the requested
[name], or an error. *)
val fold : 'a Rr_map.key -> t -> ([ `raw ] Domain_name.t -> 'a -> 'b -> 'b) -> 'b -> 'b
(** [fold key t f acc] calls [f] with [dname value acc] element in [t]. *)
val diff : 'a Domain_name.t -> Soa.t -> old:t -> t ->
(Soa.t * [ `Empty | `Full of Name_rr_map.t | `Difference of Soa.t * Name_rr_map.t * Name_rr_map.t ],
[> `Msg of string ]) result
(** [diff zone soa ~old trie] computes the difference of [zone] in [old] and
[trie], and returns either [`Empty] if [soa] is equal or newer than the one
in [trie], [`Full] (the same as [entries]) if [zone] is not present in [old],
or [`Difference (old_soa, deleted, added)]. Best used with IXFR. An error
occurs if [zone] is not present in [trie]. *)
| (* (c) 2017, 2018 Hannes Mehnert, all rights reserved *)
(** Prefix tree data structure for domain names |
infoAlign.mli | val parse : string -> (string * int) list
| (****************************************************************************)
(* the diy toolsuite *)
(* *)
(* Jade Alglave, University College London, UK. *)
(* Luc Maranget, INRIA Paris-Rocquencourt, France. *)
(* *)
(* Copyright 2020-present Institut National de Recherche en Informatique et *)
(* en Automatique and the authors. All rights reserved. *)
(* *)
(* This software is governed by the CeCILL-B license under French law and *)
(* abiding by the rules of distribution of free software. You can use, *)
(* modify and/ or redistribute the software under the terms of the CeCILL-B *)
(* license as circulated by CEA, CNRS and INRIA at the following URL *)
(* "http://www.cecill.info". We also give a copy in LICENSE.txt. *)
(****************************************************************************)
|
meta-change.c |
int main() {
int x = 0;
x = 1 + 1;
x = 2 + 2;
}
| |
stm_tests_thread_ref.ml |
open Stm_tests_spec_ref
module RT_int = STM_thread.Make(RConf_int) [@alert "-experimental"]
module RT_int64 = STM_thread.Make(RConf_int64) [@alert "-experimental"]
;;
if Sys.backend_type = Sys.Bytecode
then
Printf.printf "STM ref tests with Thread disabled under bytecode\n\n%!"
else
QCheck_runner.run_tests_main
[RT_int.agree_test_conc ~count:250 ~name:"STM int ref test with Thread";
RT_int64.neg_agree_test_conc ~count:1000 ~name:"STM int64 ref test with Thread";
]
|
Subsets and Splits