filename
stringlengths 3
67
| data
stringlengths 0
58.3M
| license
stringlengths 0
19.5k
|
---|---|---|
g2.ml | include Bls12_381_gen.G2.Make (Fr) (Bls12_381_js_gen.G2.MakeStubs (Stubs))
| (*****************************************************************************)
(* *)
(* Copyright (c) 2020-2021 Danny Willems <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
nbdtr.c | /* nbdtr.c
*
* Negative binomial distribution
*
*
*
* SYNOPSIS:
*
* int k, n;
* double p, y, nbdtr();
*
* y = nbdtr( k, n, p );
*
* DESCRIPTION:
*
* Returns the sum of the terms 0 through k of the negative
* binomial distribution:
*
* k
* -- ( n+j-1 ) n j
* > ( ) p (1-p)
* -- ( j )
* j=0
*
* In a sequence of Bernoulli trials, this is the probability
* that k or fewer failures precede the nth success.
*
* The terms are not computed individually; instead the incomplete
* beta integral is employed, according to the formula
*
* y = nbdtr( k, n, p ) = incbet( n, k+1, p ).
*
* The arguments must be positive, with p ranging from 0 to 1.
*
* ACCURACY:
*
* Tested at random points (a,b,p), with p between 0 and 1.
*
* a,b Relative error:
* arithmetic domain # trials peak rms
* IEEE 0,100 100000 1.7e-13 8.8e-15
* See also incbet.c.
*
*/
/* nbdtrc.c
*
* Complemented negative binomial distribution
*
*
*
* SYNOPSIS:
*
* int k, n;
* double p, y, nbdtrc();
*
* y = nbdtrc( k, n, p );
*
* DESCRIPTION:
*
* Returns the sum of the terms k+1 to infinity of the negative
* binomial distribution:
*
* inf
* -- ( n+j-1 ) n j
* > ( ) p (1-p)
* -- ( j )
* j=k+1
*
* The terms are not computed individually; instead the incomplete
* beta integral is employed, according to the formula
*
* y = nbdtrc( k, n, p ) = incbet( k+1, n, 1-p ).
*
* The arguments must be positive, with p ranging from 0 to 1.
*
* ACCURACY:
*
* Tested at random points (a,b,p), with p between 0 and 1.
*
* a,b Relative error:
* arithmetic domain # trials peak rms
* IEEE 0,100 100000 1.7e-13 8.8e-15
* See also incbet.c.
*/
/* nbdtrc
*
* Complemented negative binomial distribution
*
*
*
* SYNOPSIS:
*
* int k, n;
* double p, y, nbdtrc();
*
* y = nbdtrc( k, n, p );
*
* DESCRIPTION:
*
* Returns the sum of the terms k+1 to infinity of the negative
* binomial distribution:
*
* inf
* -- ( n+j-1 ) n j
* > ( ) p (1-p)
* -- ( j )
* j=k+1
*
* The terms are not computed individually; instead the incomplete
* beta integral is employed, according to the formula
*
* y = nbdtrc( k, n, p ) = incbet( k+1, n, 1-p ).
*
* The arguments must be positive, with p ranging from 0 to 1.
*
* ACCURACY:
*
* See incbet.c.
*/
/* nbdtri
*
* Functional inverse of negative binomial distribution
*
*
*
* SYNOPSIS:
*
* int k, n;
* double p, y, nbdtri();
*
* p = nbdtri( k, n, y );
*
* DESCRIPTION:
*
* Finds the argument p such that nbdtr(k,n,p) is equal to y.
*
* ACCURACY:
*
* Tested at random points (a,b,y), with y between 0 and 1.
*
* a,b Relative error:
* arithmetic domain # trials peak rms
* IEEE 0,100 100000 1.5e-14 8.5e-16
* See also incbi.c.
*/
/*
* Cephes Math Library Release 2.3: March, 1995
* Copyright 1984, 1987, 1995 by Stephen L. Moshier
*/
#include "mconf.h"
double nbdtrc(k, n, p)
int k, n;
double p;
{
double dk, dn;
if ((p < 0.0) || (p > 1.0))
goto domerr;
if (k < 0) {
domerr:
mtherr("nbdtr", DOMAIN);
return (OWL_NAN);
}
dk = k + 1;
dn = n;
return (incbet(dk, dn, 1.0 - p));
}
double nbdtr(k, n, p)
int k, n;
double p;
{
double dk, dn;
if ((p < 0.0) || (p > 1.0))
goto domerr;
if (k < 0) {
domerr:
mtherr("nbdtr", DOMAIN);
return (OWL_NAN);
}
dk = k + 1;
dn = n;
return (incbet(dn, dk, p));
}
double nbdtri(k, n, p)
int k, n;
double p;
{
double dk, dn, w;
if ((p < 0.0) || (p > 1.0))
goto domerr;
if (k < 0) {
domerr:
mtherr("nbdtri", DOMAIN);
return (OWL_NAN);
}
dk = k + 1;
dn = n;
w = incbi(dn, dk, p);
return (w);
}
| /* nbdtr.c
*
* Negative binomial distribution
*
*
*
* SYNOPSIS:
*
* int k, n;
* double p, y, nbdtr();
*
* y = nbdtr( k, n, p );
*
* DESCRIPTION:
*
* Returns the sum of the terms 0 through k of the negative
* binomial distribution:
*
* k
* -- ( n+j-1 ) n j
* > ( ) p (1-p)
* -- ( j )
* j=0
*
* In a sequence of Bernoulli trials, this is the probability
* that k or fewer failures precede the nth success.
*
* The terms are not computed individually; instead the incomplete
* beta integral is employed, according to the formula
*
* y = nbdtr( k, n, p ) = incbet( n, k+1, p ).
*
* The arguments must be positive, with p ranging from 0 to 1.
*
* ACCURACY:
*
* Tested at random points (a,b,p), with p between 0 and 1.
*
* a,b Relative error:
* arithmetic domain # trials peak rms
* IEEE 0,100 100000 1.7e-13 8.8e-15
* See also incbet.c.
*
*/ |
reason_lexer.mli |
open Reason_parser
type t
type 'a positioned = 'a * Lexing.position * Lexing.position
val init : ?insert_completion_ident:Lexing.position -> Lexing.lexbuf -> t
val token : t -> token positioned
val lexbuf : t -> Lexing.lexbuf
type comment = string * Location.t
(* Some docstrings are not accepted by the parser
and turned into comments. *)
type invalid_docstrings
val empty_invalid_docstrings : invalid_docstrings
val add_invalid_docstring :
string -> Lexing.position -> Lexing.position ->
invalid_docstrings -> invalid_docstrings
val get_comments : t -> invalid_docstrings -> comment list
| |
pipelined_tree_reduce.mli | (** Pipelined tree reduce operation, with propogation delay equivalent to
[ceil(log(|args|))] *)
open Base
open Hardcaml
val ceil_log : base:int -> int -> int
val create
: f:(Signal.t -> Signal.t -> Signal.t)
-> enable:Signal.t
-> arity:int
-> Signal.register
-> Signal.t list
-> Signal.t With_valid.t
| (** Pipelined tree reduce operation, with propogation delay equivalent to
[ceil(log(|args|))] *) |
p-iterated_frobenius.c |
#include "fq_poly.h"
#ifdef T
#undef T
#endif
#define T fq
#define CAP_T FQ
#include "fq_poly_templates/profile/p-iterated_frobenius.c"
#undef CAP_T
#undef T
| /*
Copyright (C) 2013 Mike Hansen
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
randtest.c |
#include <stdio.h>
#include <stdlib.h>
#include <gmp.h>
#include "flint.h"
#include "fmpz.h"
#include "fmpz_mod_poly.h"
void fmpz_mod_poly_randtest(fmpz_mod_poly_t f, flint_rand_t state, slong len,
const fmpz_mod_ctx_t ctx)
{
slong i;
fmpz_mod_poly_fit_length(f, len, ctx);
for (i = 0; i < len; i++)
fmpz_randm(f->coeffs + i, state, fmpz_mod_ctx_modulus(ctx));
_fmpz_mod_poly_set_length(f, len);
_fmpz_mod_poly_normalise(f);
}
void fmpz_mod_poly_randtest_monic(fmpz_mod_poly_t f, flint_rand_t state,
slong len, const fmpz_mod_ctx_t ctx)
{
slong i;
FLINT_ASSERT(len > 0);
fmpz_mod_poly_fit_length(f, len, ctx);
for (i = 0; i < len - 1; i++)
fmpz_randm(f->coeffs + i, state, fmpz_mod_ctx_modulus(ctx));
fmpz_one(f->coeffs + len - 1);
_fmpz_mod_poly_set_length(f, len);
}
static void
fmpz_mod_poly_randtest_monic_sparse(fmpz_mod_poly_t poly, flint_rand_t state,
slong len, slong nonzero, const fmpz_mod_ctx_t ctx)
{
slong i;
fmpz_mod_poly_fit_length(poly, len, ctx);
_fmpz_vec_zero(poly->coeffs, len);
fmpz_randm(poly->coeffs + 0, state, fmpz_mod_ctx_modulus(ctx));
for (i = 1; i < nonzero; i++)
fmpz_randm(poly->coeffs + n_randint(state, len - 1) + 1,
state, fmpz_mod_ctx_modulus(ctx));
fmpz_set_ui(poly->coeffs + len - 1, 1);
_fmpz_mod_poly_set_length(poly, len);
}
void fmpz_mod_poly_randtest_irreducible(fmpz_mod_poly_t f, flint_rand_t state,
slong len, const fmpz_mod_ctx_t ctx)
{
if (len == 0)
{
flint_printf("Exception (fmpz_mod_poly_randtest_irreducible). len == 0.\n");
flint_abort();
}
do {
fmpz_mod_poly_randtest(f, state, len, ctx);
} while (fmpz_mod_poly_is_zero(f, ctx) ||
!fmpz_mod_poly_is_irreducible(f, ctx));
}
void fmpz_mod_poly_randtest_monic_irreducible(fmpz_mod_poly_t f,
flint_rand_t state, slong len, const fmpz_mod_ctx_t ctx)
{
if (len == 0)
{
flint_printf("Exception (fmpz_mod_poly_randtest_monic_irreducible). len == 0.\n");
flint_abort();
}
do {
fmpz_mod_poly_randtest_monic(f, state, len, ctx);
} while (fmpz_mod_poly_is_zero(f, ctx) ||
!fmpz_mod_poly_is_irreducible(f, ctx));
}
void fmpz_mod_poly_randtest_not_zero(fmpz_mod_poly_t f,
flint_rand_t state, slong len, const fmpz_mod_ctx_t ctx)
{
if (len == 0)
{
flint_printf("Exception (fmpz_mod_poly_randtest_not_zero). len == 0.\n");
flint_abort();
}
do {
fmpz_mod_poly_randtest(f, state, len, ctx);
} while (fmpz_mod_poly_is_zero(f, ctx));
}
static void
fmpz_mod_poly_randtest_monic_irreducible_sparse(fmpz_mod_poly_t poly,
flint_rand_t state, slong len, const fmpz_mod_ctx_t ctx)
{
slong i = 0;
slong terms = 3;
do {
i++;
terms += ((i % 4) == 0);
if (terms >= len)
terms = 3;
fmpz_mod_poly_randtest_monic_sparse(poly, state, len, terms, ctx);
} while (fmpz_mod_poly_is_zero(poly, ctx) ||
!fmpz_mod_poly_is_irreducible(poly, ctx));
}
void fmpz_mod_poly_randtest_trinomial(fmpz_mod_poly_t poly,
flint_rand_t state, slong len, const fmpz_mod_ctx_t ctx)
{
ulong k;
fmpz_mod_poly_fit_length(poly, len, ctx);
_fmpz_vec_zero(poly->coeffs, len);
fmpz_randm(poly->coeffs, state, fmpz_mod_ctx_modulus(ctx));
k = (n_randtest(state) % (len - 2)) + 1;
fmpz_randm(poly->coeffs + k, state, fmpz_mod_ctx_modulus(ctx));
fmpz_one(poly->coeffs + len - 1);
_fmpz_mod_poly_set_length(poly, len);
}
void fmpz_mod_poly_randtest_pentomial(fmpz_mod_poly_t poly,
flint_rand_t state, slong len, const fmpz_mod_ctx_t ctx)
{
fmpz_mod_poly_fit_length(poly, len, ctx);
_fmpz_vec_zero(poly->coeffs, len);
fmpz_randm(poly->coeffs, state, fmpz_mod_ctx_modulus(ctx));
fmpz_randm(poly->coeffs + 1, state, fmpz_mod_ctx_modulus(ctx));
fmpz_randm(poly->coeffs + 2, state, fmpz_mod_ctx_modulus(ctx));
fmpz_randm(poly->coeffs + 3, state, fmpz_mod_ctx_modulus(ctx));
fmpz_one(poly->coeffs + len - 1);
_fmpz_mod_poly_set_length(poly, len);
}
int fmpz_mod_poly_randtest_trinomial_irreducible(fmpz_mod_poly_t poly,
flint_rand_t state, slong len, slong max_attempts,
const fmpz_mod_ctx_t ctx)
{
slong i = 0;
while (max_attempts == 0 || i < max_attempts)
{
fmpz_mod_poly_randtest_trinomial(poly, state, len, ctx);
if (!fmpz_mod_poly_is_zero(poly, ctx) &&
fmpz_mod_poly_is_irreducible(poly, ctx))
{
return 1;
}
i++;
}
return 0;
}
int fmpz_mod_poly_randtest_pentomial_irreducible(fmpz_mod_poly_t poly,
flint_rand_t state, slong len, slong max_attempts,
const fmpz_mod_ctx_t ctx)
{
slong i = 0;
while (max_attempts == 0 || i < max_attempts)
{
fmpz_mod_poly_randtest_pentomial(poly, state, len, ctx);
if (!fmpz_mod_poly_is_zero(poly, ctx) &&
fmpz_mod_poly_is_irreducible(poly, ctx))
{
return 1;
}
i++;
}
return 0;
}
void fmpz_mod_poly_randtest_sparse_irreducible(fmpz_mod_poly_t poly,
flint_rand_t state, slong len, const fmpz_mod_ctx_t ctx)
{
if (len < 3)
{
fmpz_mod_poly_randtest_monic_irreducible(poly, state, len, ctx);
return;
}
/* Try trinomials */
if (fmpz_mod_poly_randtest_trinomial_irreducible(poly, state, len, 2*len, ctx))
return;
if (len < 5)
{
fmpz_mod_poly_randtest_monic_irreducible(poly, state, len, ctx);
return;
}
/* Try pentomials */
if (fmpz_mod_poly_randtest_pentomial_irreducible(poly, state, len, 2*len, ctx))
return;
/* Random monic sparse */
fmpz_mod_poly_randtest_monic_irreducible_sparse(poly, state, len, ctx);
}
| /*
Copyright (C) 2011 Sebastian Pancratz
Copyright (C) 2009 William Hart
Copyright (C) 2013 Mike Hansen
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
ftruncate.c | #define CAML_INTERNALS
#include <sys/types.h>
#include <caml/fail.h>
#include <caml/mlvalues.h>
#include <caml/io.h>
#include <caml/signals.h>
#include "unixsupport.h"
#ifdef HAS_UNISTD
#include <unistd.h>
#endif
#ifdef HAS_TRUNCATE
CAMLprim value unix_ftruncate(value fd, value len)
{
int result;
caml_enter_blocking_section();
result = ftruncate(Int_val(fd), Long_val(len));
caml_leave_blocking_section();
if (result == -1) uerror("ftruncate", Nothing);
return Val_unit;
}
CAMLprim value unix_ftruncate_64(value fd, value len)
{
int result;
file_offset ofs = File_offset_val(len);
caml_enter_blocking_section();
result = ftruncate(Int_val(fd), ofs);
caml_leave_blocking_section();
if (result == -1) uerror("ftruncate", Nothing);
return Val_unit;
}
#else
CAMLprim value unix_ftruncate(value fd, value len)
{ caml_invalid_argument("ftruncate not implemented"); }
CAMLprim value unix_ftruncate_64(value fd, value len)
{ caml_invalid_argument("ftruncate not implemented"); }
#endif
| /**************************************************************************/
/* */
/* OCaml */
/* */
/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */
/* */
/* Copyright 1996 Institut National de Recherche en Informatique et */
/* en Automatique. */
/* */
/* All rights reserved. This file is distributed under the terms of */
/* the GNU Lesser General Public License version 2.1, with the */
/* special exception on linking described in the file LICENSE. */
/* */
/**************************************************************************/
|
pkg.ml |
#use "topfind"
#require "topkg-jbuilder.auto"
| |
cache_memory_helpers.ml | module type SNodes = sig
type t = private int
val zero : t
val one : t [@@ocaml.warning "-32"]
val succ : t -> t
val add : t -> t -> t
val to_int : t -> int
end
(** The [Nodes] module is used to count the number of computation steps
performed when evaluating the size of the in-memory graph corresponding
to an OCaml value.
In first approximation, the value of type [Nodes.t] threaded through
{!expr_size} below and through the module {!Script_typed_ir_size}
is meant to match the number of recursive calls in the [traverse]
functions of {!Script_typed_ir} and in that of {!node_size}.
The assumption is that there's a bounded amount of work performed between
two such recursive calls, hence that the total work is bounded above
by something proportional to the [Nodes.t] accumulator.
Computations on values of type [Nodes.t] do not overflow, as they
are bounded above by the number of nodes traversed when computing
an OCaml value.
*)
module Nodes : SNodes = struct
type t = int
let zero = 0
let one = 1
let succ x = x + 1
let add x y = x + y
let to_int x = x
end
(** {2 Helpers to deal with computing the in-memory size of values} *)
type sint = Saturation_repr.may_saturate Saturation_repr.t
type nodes_and_size = Nodes.t * sint
let ( !! ) = Saturation_repr.safe_int
let ( +! ) = Saturation_repr.add
let ( +? ) s x = Saturation_repr.add s !!x
let ( *? ) s x = Saturation_repr.mul s !!x
let ( /? ) s x = Saturation_repr.ediv s !!x
let ( ++ ) (n1, s1) (n2, s2) = (Nodes.add n1 n2, s1 +! s2)
let zero = (Nodes.zero, !!0)
let word_size = !!8
let header_size = word_size
let int32_size = header_size +! word_size
let int64_size = header_size +! (word_size *? 2)
let h1w = header_size +! word_size
let h2w = header_size +! (word_size *? 2)
let h3w = header_size +! (word_size *? 3)
let h4w = header_size +! (word_size *? 4)
let h5w = header_size +! (word_size *? 5)
let hh3w = (word_size *? 3) +! (header_size *? 2)
let hh6w = (word_size *? 6) +! (header_size *? 2)
let hh8w = (word_size *? 8) +! (header_size *? 2)
let z_size z =
let numbits = Z.numbits z in
(*
Z does not seem to have a canonical representation of numbers.
Hence, even though we observed that 24 works in many cases we
sometimes meet numbers with a larger size, hence we use 32 instead
of 24 in the following formula.
*)
if Compare.Int.(numbits <= 62) then !!0 else (word_size *? Z.size z) +? 32
let string_size_gen len = header_size +? (len + (8 - (len mod 8)))
let bytes_size b = string_size_gen (Bytes.length b)
let string_size s = string_size_gen (String.length s)
let ret_adding (nodes, size) added = (nodes, size +! added)
let ret_succ_adding (nodes, size) added = (Nodes.succ nodes, size +! added)
let ret_succ (nodes, size) = (Nodes.succ nodes, size)
let option_size some x =
let some x = h1w +! some x in
Option.fold ~none:!!0 ~some x
let option_size_vec some x =
let some x = ret_adding (some x) h1w in
Option.fold ~none:zero ~some x
let list_cell_size elt_size = header_size +! word_size +! word_size +! elt_size
[@@ocaml.inline always]
let list_fold_size elt_size list =
List.fold_left
(fun accu elt -> ret_succ_adding (accu ++ elt_size elt) h2w)
zero
list
let boxed_tup2 x y = header_size +! word_size +! word_size +! x +! y
[@@ocaml.inline always]
let node_size =
let open Micheline in
(* An OCaml list item occupies 3 words of memory: one for the (::)
constructor, one for the item itself (head) and one for the
remainder of the list (tail). *)
let list_size sns = word_size *? (List.length sns * 3) in
let annotation_size a =
List.fold_left
(fun accu s -> ret_succ_adding accu (h2w +! string_size s))
zero
a
in
let internal_node_size = function
| Int (_, z) -> (Nodes.one, h2w +! z_size z)
| String (_, s) -> (Nodes.one, h2w +! string_size s)
| Bytes (_, s) -> (Nodes.one, h2w +! bytes_size s)
| Prim (_, _, args, a) ->
ret_succ_adding (annotation_size a) (list_size args +! h4w)
| Seq (_, terms) -> (Nodes.one, list_size terms +! h2w)
in
fun node ->
Script_repr.fold node zero @@ fun accu node ->
accu ++ internal_node_size node
let expr_size expr = node_size (Micheline.root expr)
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2021 Nomadic Labs, <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
regression-jenkins.ml | open Unix
open Sys
let out_file = "tests.xml"
let _ =
let fd = open_out out_file in
output_string fd "<testsuite tests=\"3\">\n";
output_string fd " <testcase classname=\"foo1\" name=\"ASuccessfulTest\"/>\n";
output_string fd " <testcase classname=\"foo2\" name=\"AnotherSuccessfulTest\"/>\n";
output_string fd " <testcase classname=\"foo3\" name=\"AFailingTest\">\n";
output_string fd " <failure type=\"NotEnoughFoo\"> details about failure </failure>\n";
output_string fd " </testcase>\n";
output_string fd "</testsuite>\n";
close_out fd;
| (**************************************************************************)
(* Ott *)
(* *)
(* Peter Sewell, Computer Laboratory, University of Cambridge *)
(* Francesco Zappa Nardelli, Moscova project, INRIA Rocquencourt *)
(* *)
(* Copyright 2005-2017 *)
(* *)
(* Redistribution and use in source and binary forms, with or without *)
(* modification, are permitted provided that the following conditions *)
(* are met: *)
(* 1. Redistributions of source code must retain the above copyright *)
(* notice, this list of conditions and the following disclaimer. *)
(* 2. Redistributions in binary form must reproduce the above copyright *)
(* notice, this list of conditions and the following disclaimer in the *)
(* documentation and/or other materials provided with the distribution. *)
(* 3. The names of the authors may not be used to endorse or promote *)
(* products derived from this software without specific prior written *)
(* permission. *)
(* *)
(* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS *)
(* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED *)
(* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE *)
(* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY *)
(* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL *)
(* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE *)
(* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS *)
(* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER *)
(* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR *)
(* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN *)
(* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *)
(**************************************************************************)
|
environment_on_open.ml |
module M = struct
(* should jump above this comment,
but jumps below it pre 4.08 *)
module M = struct end
end
open M
| |
Fnptr.ml | (* Fnptr.ml *)
(* Module to allow invoking C function pointers. *)
(*
The existing OCaml module "Callback" is for registering OCaml
functions to be called from C. OCaml "external" declarations allow
named C functions to called from OCaml, but not arbitrary function
pointers.
This module allows function pointers to be packaged with their 'extra'
data (on the C side, not visible here) so they can be used from OCaml
to approximate closures.
It is then possible in OCaml to wrap a call to 'call_fnptr' in order
to make a true closure that can be passed around and used like any
other closure.
*)
(* This type encapsulates a pointer to a function defined in C and
* passed to the OCaml code as a callback. That function accepts an
* 'a and returns a 'b.
*
* Although the C code could raise an exception by calling
* 'caml_raise_XXX', the expectation is that users of this interface
* will encode unusual values in 'b, since conveying exceptions across
* language boundaries is somewhat fraught. *)
type ('a, 'b) t
(* Given an fnptr and its argument, invoke it to get the result.
*
* Be aware that an fnptr can be invalidated from the C side, in which
* case this function will throw an Invalid_argument exception. An
* interface that deals with fnptrs should document their valid
* lifetime. *)
external call : ('a, 'b) t -> 'a -> 'b = "call_fnptr"
(* EOF *)
| (* Fnptr.ml *)
(* Module to allow invoking C function pointers. *)
|
owl_base_linalg_intf.ml |
module type Common = sig
type elt
type mat
type complex_mat
type int32_mat
(** {5 Basic functions} *)
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
(** {5 Factorisation} *)
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
val qr : ?thin:bool -> ?pivot:bool -> mat -> mat * mat * int32_mat
val lq : ?thin:bool -> mat -> mat * mat
(** {5 Linear system of equations} *)
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : ?solver:[ `default | `direct | `bilinear ] -> mat -> mat -> mat
end
module type Real = sig
type elt
type mat
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
end
| (*
* OWL - OCaml Scientific Computing
* Copyright (c) 2016-2022 Liang Wang <[email protected]>
*) |
flame_graph_panel.mli |
open! Core
open Bonsai_web
open Memtrace_viewer_common
module Selection : sig
type t =
| Flame of { fragment : Data.Fragment.t }
| Icicle of { fragment : Data.Fragment.t }
| Focus of
{ callers_fragment : Data.Fragment.t
; callees_fragment : Data.Fragment.t
}
end
module Default_selection : sig
type t =
| First_caller
| First_callee
| No_selection
[@@deriving sexp, equal]
end
type t =
{ view : Vdom.Node.t
; key_handler : Vdom_keyboard.Keyboard_event_handler.t
; selection : Selection.t option
; reset_selection : Data.Fragment.t -> Default_selection.t -> unit Vdom.Effect.t
}
val component
: trie:Data.Fragment_trie.t Bonsai.Value.t
-> focus:Data.Fragment.t Bonsai.Value.t
-> activate:(Selection.t -> unit Vdom.Effect.t) Bonsai.Value.t
-> t Bonsai.Computation.t
| |
rebinding.ml |
module X = struct
module Y = struct
module Z = struct
let foo () = ()
end
end
end
open X.Y
let () = Z.foo ()
module X = X.Y
let () = Z.foo ()
| |
dune |
(rule
(alias runtest)
(deps
(source_tree .)
(package js_of_ocaml-ocamlbuild))
(action
(progn
(run
ocamlbuild
-use-ocamlfind
-plugin-tag
"package(js_of_ocaml-ocamlbuild)"
test.js)
(run node _build/test.js))))
| |
sandbox_mode.mli | (** How to sandbox actions *)
(** This module describes the method used to sandbox actions. Choices include:
- not sandboxing
- sandboxing by symlinking dependencies
- sandboxing by copying dependencies
- sandboxing by hardlinking dependencies
- sandboxing by copying dependencies, detecting changes and patching back
the source tree
In the last mode, Dune applies all the changes that happened in the sandbox
to the source tree. This includes:
- applying changes to source files that were dependencies
- deleting source files that were dependencies and were deleted in the
sandbox
- promoting all targets
- promoting all files that were created and not declared as dependencies or
targets
This is a dirty setting, but it is necessary to port projects to Dune that
don't use a separate directory and have rules that go and create/modify
random files. *)
open Import
type some =
| Symlink
| Copy
| Hardlink
| Patch_back_source_tree
type t = some option
val compare : t -> t -> Ordering.t
val equal : t -> t -> bool
module Dict : sig
type key = t
type 'a t =
{ none : 'a
; symlink : 'a
; copy : 'a
; hardlink : 'a
; patch_back_source_tree : 'a
}
val compare : ('a -> 'a -> Ordering.t) -> 'a t -> 'a t -> Ordering.t
val of_func : (key -> 'a) -> 'a t
val get : 'a t -> key -> 'a
end
module Set : sig
type key = t
type t = bool Dict.t
val singleton : key -> t
(** For rules with (mode patch-back-source-tree). *)
val patch_back_source_tree_only : t
val is_patch_back_source_tree_only : t -> bool
val equal : t -> t -> bool
val compare : t -> t -> Ordering.t
val of_func : (key -> bool) -> t
val mem : t -> key -> bool
val inter : t -> t -> t
val to_dyn : t -> Dyn.t
end
(** We exclude [Some Patch_back_source_tree] because selecting this mode
globally via the command line or the config file seems like a terrible
choice. Also, we want to get rid of this mode eventually. *)
val all_except_patch_back_source_tree : t list
val all : t list
val none : t
val symlink : t
val copy : t
val hardlink : t
val decode : t Dune_lang.Decoder.t
val to_string : t -> string
val to_dyn : t -> Dyn.t
| (** How to sandbox actions *)
|
ident.ml | open Local_store
let lowest_scope = 0
let highest_scope = 100000000
type t =
| Local of { name: string; stamp: int }
| Scoped of { name: string; stamp: int; scope: int }
| Global of string
| Predef of { name: string; stamp: int }
(* the stamp is here only for fast comparison, but the name of
predefined identifiers is always unique. *)
(* A stamp of 0 denotes a persistent identifier *)
let currentstamp = s_ref 0
let predefstamp = s_ref 0
let create_scoped ~scope s =
incr currentstamp;
Scoped { name = s; stamp = !currentstamp; scope }
let create_local s =
incr currentstamp;
Local { name = s; stamp = !currentstamp }
let create_predef s =
incr predefstamp;
Predef { name = s; stamp = !predefstamp }
let create_persistent s =
Global s
let name = function
| Local { name; _ }
| Scoped { name; _ }
| Global name
| Predef { name; _ } -> name
let rename = function
| Local { name; stamp = _ }
| Scoped { name; stamp = _; scope = _ } ->
incr currentstamp;
Local { name; stamp = !currentstamp }
| id ->
Misc.fatal_errorf "Ident.rename %s" (name id)
let unique_name = function
| Local { name; stamp }
| Scoped { name; stamp } -> name ^ "_" ^ Int.to_string stamp
| Global name ->
(* we're adding a fake stamp, because someone could have named his unit
[Foo_123] and since we're using unique_name to produce symbol names,
we might clash with an ident [Local { "Foo"; 123 }]. *)
name ^ "_0"
| Predef { name; _ } ->
(* we know that none of the predef names (currently) finishes in
"_<some number>", and that their name is unique. *)
name
let unique_toplevel_name = function
| Local { name; stamp }
| Scoped { name; stamp } -> name ^ "/" ^ Int.to_string stamp
| Global name
| Predef { name; _ } -> name
let persistent = function
| Global _ -> true
| _ -> false
let equal i1 i2 =
match i1, i2 with
| Local { name = name1; _ }, Local { name = name2; _ }
| Scoped { name = name1; _ }, Scoped { name = name2; _ }
| Global name1, Global name2 ->
name1 = name2
| Predef { stamp = s1; _ }, Predef { stamp = s2 } ->
(* if they don't have the same stamp, they don't have the same name *)
s1 = s2
| _ ->
false
let same i1 i2 =
match i1, i2 with
| Local { stamp = s1; _ }, Local { stamp = s2; _ }
| Scoped { stamp = s1; _ }, Scoped { stamp = s2; _ }
| Predef { stamp = s1; _ }, Predef { stamp = s2 } ->
s1 = s2
| Global name1, Global name2 ->
name1 = name2
| _ ->
false
let stamp = function
| Local { stamp; _ }
| Scoped { stamp; _ } -> stamp
| _ -> 0
let scope = function
| Scoped { scope; _ } -> scope
| Local _ -> highest_scope
| Global _ | Predef _ -> lowest_scope
let reinit_level = ref (-1)
let reinit () =
if !reinit_level < 0
then reinit_level := !currentstamp
else currentstamp := !reinit_level
let global = function
| Local _
| Scoped _ -> false
| Global _
| Predef _ -> true
let is_predef = function
| Predef _ -> true
| _ -> false
let print ~with_scope ppf =
let open Format in
function
| Global name -> fprintf ppf "%s!" name
| Predef { name; stamp = n } ->
fprintf ppf "%s/%i!" name n
| Local { name; stamp = n } ->
fprintf ppf "%s/%i" name n
| Scoped { name; stamp = n; scope } ->
fprintf ppf "%s/%i%s" name n
(if with_scope then sprintf "[%i]" scope else "")
let print_with_scope ppf id = print ~with_scope:true ppf id
let print ppf id = print ~with_scope:false ppf id
type 'a tbl =
Empty
| Node of 'a tbl * 'a data * 'a tbl * int
and 'a data =
{ ident: t;
data: 'a;
previous: 'a data option }
let empty = Empty
(* Inline expansion of height for better speed
* let height = function
* Empty -> 0
* | Node(_,_,_,h) -> h
*)
let mknode l d r =
let hl = match l with Empty -> 0 | Node(_,_,_,h) -> h
and hr = match r with Empty -> 0 | Node(_,_,_,h) -> h in
Node(l, d, r, (if hl >= hr then hl + 1 else hr + 1))
let balance l d r =
let hl = match l with Empty -> 0 | Node(_,_,_,h) -> h
and hr = match r with Empty -> 0 | Node(_,_,_,h) -> h in
if hl > hr + 1 then
match l with
| Node (ll, ld, lr, _)
when (match ll with Empty -> 0 | Node(_,_,_,h) -> h) >=
(match lr with Empty -> 0 | Node(_,_,_,h) -> h) ->
mknode ll ld (mknode lr d r)
| Node (ll, ld, Node(lrl, lrd, lrr, _), _) ->
mknode (mknode ll ld lrl) lrd (mknode lrr d r)
| _ -> assert false
else if hr > hl + 1 then
match r with
| Node (rl, rd, rr, _)
when (match rr with Empty -> 0 | Node(_,_,_,h) -> h) >=
(match rl with Empty -> 0 | Node(_,_,_,h) -> h) ->
mknode (mknode l d rl) rd rr
| Node (Node (rll, rld, rlr, _), rd, rr, _) ->
mknode (mknode l d rll) rld (mknode rlr rd rr)
| _ -> assert false
else
mknode l d r
let rec add id data = function
Empty ->
Node(Empty, {ident = id; data = data; previous = None}, Empty, 1)
| Node(l, k, r, h) ->
let c = String.compare (name id) (name k.ident) in
if c = 0 then
Node(l, {ident = id; data = data; previous = Some k}, r, h)
else if c < 0 then
balance (add id data l) k r
else
balance l k (add id data r)
let rec min_binding = function
Empty -> raise Not_found
| Node (Empty, d, _, _) -> d
| Node (l, _, _, _) -> min_binding l
let rec remove_min_binding = function
Empty -> invalid_arg "Map.remove_min_elt"
| Node (Empty, _, r, _) -> r
| Node (l, d, r, _) -> balance (remove_min_binding l) d r
let merge t1 t2 =
match (t1, t2) with
(Empty, t) -> t
| (t, Empty) -> t
| (_, _) ->
let d = min_binding t2 in
balance t1 d (remove_min_binding t2)
let rec remove id = function
Empty ->
Empty
| (Node (l, k, r, h) as m) ->
let c = String.compare (name id) (name k.ident) in
if c = 0 then
match k.previous with
| None -> merge l r
| Some k -> Node (l, k, r, h)
else if c < 0 then
let ll = remove id l in if l == ll then m else balance ll k r
else
let rr = remove id r in if r == rr then m else balance l k rr
let rec find_previous id = function
None ->
raise Not_found
| Some k ->
if same id k.ident then k.data else find_previous id k.previous
let rec find_same id = function
Empty ->
raise Not_found
| Node(l, k, r, _) ->
let c = String.compare (name id) (name k.ident) in
if c = 0 then
if same id k.ident
then k.data
else find_previous id k.previous
else
find_same id (if c < 0 then l else r)
let rec find_name n = function
Empty ->
raise Not_found
| Node(l, k, r, _) ->
let c = String.compare n (name k.ident) in
if c = 0 then
k.ident, k.data
else
find_name n (if c < 0 then l else r)
let rec get_all = function
| None -> []
| Some k -> (k.ident, k.data) :: get_all k.previous
let rec find_all n = function
Empty ->
[]
| Node(l, k, r, _) ->
let c = String.compare n (name k.ident) in
if c = 0 then
(k.ident, k.data) :: get_all k.previous
else
find_all n (if c < 0 then l else r)
let rec fold_aux f stack accu = function
Empty ->
begin match stack with
[] -> accu
| a :: l -> fold_aux f l accu a
end
| Node(l, k, r, _) ->
fold_aux f (l :: stack) (f k accu) r
let fold_name f tbl accu = fold_aux (fun k -> f k.ident k.data) [] accu tbl
let rec fold_data f d accu =
match d with
None -> accu
| Some k -> f k.ident k.data (fold_data f k.previous accu)
let fold_all f tbl accu =
fold_aux (fun k -> fold_data f (Some k)) [] accu tbl
(* let keys tbl = fold_name (fun k _ accu -> k::accu) tbl [] *)
let rec iter f = function
Empty -> ()
| Node(l, k, r, _) ->
iter f l; f k.ident k.data; iter f r
(* Idents for sharing keys *)
(* They should be 'totally fresh' -> neg numbers *)
let key_name = ""
let make_key_generator () =
let c = ref 1 in
function
| Local _
| Scoped _ ->
let stamp = !c in
decr c ;
Local { name = key_name; stamp = stamp }
| global_id ->
Misc.fatal_errorf "Ident.make_key_generator () %s" (name global_id)
let compare x y =
match x, y with
| Local x, Local y ->
let c = x.stamp - y.stamp in
if c <> 0 then c
else compare x.name y.name
| Local _, _ -> 1
| _, Local _ -> (-1)
| Scoped x, Scoped y ->
let c = x.stamp - y.stamp in
if c <> 0 then c
else compare x.name y.name
| Scoped _, _ -> 1
| _, Scoped _ -> (-1)
| Global x, Global y -> compare x y
| Global _, _ -> 1
| _, Global _ -> (-1)
| Predef { stamp = s1; _ }, Predef { stamp = s2; _ } -> compare s1 s2
let output oc id = output_string oc (unique_name id)
let hash i = (Char.code (name i).[0]) lxor (stamp i)
let original_equal = equal
include Identifiable.Make (struct
type nonrec t = t
let compare = compare
let output = output
let print = print
let hash = hash
let equal = same
end)
let equal = original_equal
let rename_no_exn = function
| Local { name; stamp = _ }
| Scoped { name; stamp = _; scope = _ } ->
incr currentstamp;
Local { name; stamp = !currentstamp }
| id -> id
| (**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *)
(* *)
(* Copyright 1996 Institut National de Recherche en Informatique et *)
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
(* the GNU Lesser General Public License version 2.1, with the *)
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
|
Parsing_stat.mli | (*
Type holding parsing stats and optionally AST stats.
*)
type ast_stat = { total_node_count : int; untranslated_node_count : int }
type t = {
filename : Common.filename;
total_line_count : int;
mutable error_line_count : int;
mutable have_timeout : bool;
(* used only for cpp for now, to help diagnose problematic macros,
* see print_recurring_problematic_tokens below.
*)
mutable commentized : int;
mutable problematic_lines : (string list * int) list;
(* AST stats obtained by inspecting the resulting AST, if any. *)
ast_stat : ast_stat option;
}
val default_stat : Common.filename -> t
val bad_stat : Common.filename -> t
val correct_stat : Common.filename -> t
(*
Print file name and number of lines and error lines in compact format
suitable for logging.
*)
val summary_of_stat : t -> string
val print_parsing_stat_list : ?verbose:bool -> t list -> unit
val print_recurring_problematic_tokens : t list -> unit
val aggregate_stats : t list -> int * int (* total * bad *)
val print_regression_information :
ext:string -> Common2.path list -> Common2.score -> unit
| (*
Type holding parsing stats and optionally AST stats.
*) |
cluster.ml | let next_name = ref 1
let fresh_name () =
let index = !next_name in
incr next_name ;
"cluster" ^ string_of_int index
let create ?path ?name count arguments =
let name = match name with None -> fresh_name () | Some name -> name in
List.map
(fun i -> Node.create ?path ~name:(name ^ "." ^ string_of_int i) arguments)
(range 1 count)
let symmetric_add_peer a b =
Node.add_peer a b ;
Node.add_peer b a
let meta_connect connect a b = List.iter (fun a -> List.iter (connect a) b) a
let rec meta_clique connect = function
| [] -> ()
| head :: tail ->
List.iter (connect head) tail ;
meta_clique connect tail
let meta_ring connect nodes =
match nodes with
| [] -> ()
| first :: _ ->
let rec loop = function
| [] ->
(* We checked that the list was not empty already. *)
assert false
| [last] -> connect last first
| a :: (b :: _ as tail) ->
connect a b ;
loop tail
in
loop nodes
let meta_star connect center other_nodes =
List.iter (connect center) other_nodes
let connect = meta_connect symmetric_add_peer
let clique = meta_clique symmetric_add_peer
let ring = meta_ring symmetric_add_peer
let star = meta_star symmetric_add_peer
let wait_for_connections node connections =
let counter = ref 0 in
let (waiter, resolver) = Lwt.task () in
Node.on_event node (fun {name; value} ->
if name = "node_chain_validator.v0" then
match JSON.(value |=> 1 |-> "event" |-> "kind" |> as_string_opt) with
| None -> ()
| Some "connection" ->
incr counter ;
if !counter = connections then Lwt.wakeup resolver ()
| Some "disconnection" ->
Log.warn "The topology of the test has changed"
| Some _ -> ()) ;
let* () = Node.wait_for_ready node in
waiter
let start ?(public = false) ?event_level ?event_sections_levels
?(wait_connections = false) nodes =
let start_node node =
let* () = Node.identity_generate node in
let n = Node.get_peers node |> List.length in
let* () = Node.config_init node [] in
let* () =
Node.run
?event_level
?event_sections_levels
node
(if public then [] else [Private_mode])
in
let waiter =
if wait_connections then wait_for_connections node n
else Node.wait_for_ready node
in
waiter
in
Lwt_list.iter_p start_node nodes
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2020 Nomadic Labs <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
path_encoding.mli | module type S = sig
type t
(** [to_path t postfix] returns the context path name for [t]
postfixed with [postfix] *)
val to_path : t -> string list -> string list
(** [of_path path] parses [path] as a context path name for [t] *)
val of_path : string list -> t option
(** Directory levels of the path encoding of [t] *)
val path_length : int
end
module type ENCODING = sig
type t
val to_bytes : t -> bytes
val of_bytes_opt : bytes -> t option
end
(** Path encoding in hex: [/[0-9a-f]{2}+/] *)
module Make_hex (H : ENCODING) : S with type t := H.t
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* Copyright (c) 2021 DaiLambda, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
baking_files.ml | type _ location = string
let resolve_location ~chain_id (kind : 'a) : 'a location =
let basename =
match kind with
| `Highwatermarks -> "highwatermark"
| `State -> "baker_state"
| `Nonce -> "nonce"
in
Format.asprintf "%a_%s" Chain_id.pp_short chain_id basename
let filename x = x
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
expat.h |
#ifndef Expat_INCLUDED
#define Expat_INCLUDED 1
#include <stdlib.h>
#include "expat_external.h"
#ifdef __cplusplus
extern "C" {
#endif
struct XML_ParserStruct;
typedef struct XML_ParserStruct *XML_Parser;
typedef unsigned char XML_Bool;
#define XML_TRUE ((XML_Bool)1)
#define XML_FALSE ((XML_Bool)0)
/* The XML_Status enum gives the possible return values for several
API functions. The preprocessor #defines are included so this
stanza can be added to code that still needs to support older
versions of Expat 1.95.x:
#ifndef XML_STATUS_OK
#define XML_STATUS_OK 1
#define XML_STATUS_ERROR 0
#endif
Otherwise, the #define hackery is quite ugly and would have been
dropped.
*/
enum XML_Status {
XML_STATUS_ERROR = 0,
#define XML_STATUS_ERROR XML_STATUS_ERROR
XML_STATUS_OK = 1,
#define XML_STATUS_OK XML_STATUS_OK
XML_STATUS_SUSPENDED = 2
#define XML_STATUS_SUSPENDED XML_STATUS_SUSPENDED
};
enum XML_Error {
XML_ERROR_NONE,
XML_ERROR_NO_MEMORY,
XML_ERROR_SYNTAX,
XML_ERROR_NO_ELEMENTS,
XML_ERROR_INVALID_TOKEN,
XML_ERROR_UNCLOSED_TOKEN,
XML_ERROR_PARTIAL_CHAR,
XML_ERROR_TAG_MISMATCH,
XML_ERROR_DUPLICATE_ATTRIBUTE,
XML_ERROR_JUNK_AFTER_DOC_ELEMENT,
XML_ERROR_PARAM_ENTITY_REF,
XML_ERROR_UNDEFINED_ENTITY,
XML_ERROR_RECURSIVE_ENTITY_REF,
XML_ERROR_ASYNC_ENTITY,
XML_ERROR_BAD_CHAR_REF,
XML_ERROR_BINARY_ENTITY_REF,
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF,
XML_ERROR_MISPLACED_XML_PI,
XML_ERROR_UNKNOWN_ENCODING,
XML_ERROR_INCORRECT_ENCODING,
XML_ERROR_UNCLOSED_CDATA_SECTION,
XML_ERROR_EXTERNAL_ENTITY_HANDLING,
XML_ERROR_NOT_STANDALONE,
XML_ERROR_UNEXPECTED_STATE,
XML_ERROR_ENTITY_DECLARED_IN_PE,
XML_ERROR_FEATURE_REQUIRES_XML_DTD,
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING,
/* Added in 1.95.7. */
XML_ERROR_UNBOUND_PREFIX,
/* Added in 1.95.8. */
XML_ERROR_UNDECLARING_PREFIX,
XML_ERROR_INCOMPLETE_PE,
XML_ERROR_XML_DECL,
XML_ERROR_TEXT_DECL,
XML_ERROR_PUBLICID,
XML_ERROR_SUSPENDED,
XML_ERROR_NOT_SUSPENDED,
XML_ERROR_ABORTED,
XML_ERROR_FINISHED,
XML_ERROR_SUSPEND_PE,
/* Added in 2.0. */
XML_ERROR_RESERVED_PREFIX_XML,
XML_ERROR_RESERVED_PREFIX_XMLNS,
XML_ERROR_RESERVED_NAMESPACE_URI,
/* Added in 2.2.1. */
XML_ERROR_INVALID_ARGUMENT,
/* Added in 2.3.0. */
XML_ERROR_NO_BUFFER,
/* Added in 2.4.0. */
XML_ERROR_AMPLIFICATION_LIMIT_BREACH
};
enum XML_Content_Type {
XML_CTYPE_EMPTY = 1,
XML_CTYPE_ANY,
XML_CTYPE_MIXED,
XML_CTYPE_NAME,
XML_CTYPE_CHOICE,
XML_CTYPE_SEQ
};
enum XML_Content_Quant {
XML_CQUANT_NONE,
XML_CQUANT_OPT,
XML_CQUANT_REP,
XML_CQUANT_PLUS
};
/* If type == XML_CTYPE_EMPTY or XML_CTYPE_ANY, then quant will be
XML_CQUANT_NONE, and the other fields will be zero or NULL.
If type == XML_CTYPE_MIXED, then quant will be NONE or REP and
numchildren will contain number of elements that may be mixed in
and children point to an array of XML_Content cells that will be
all of XML_CTYPE_NAME type with no quantification.
If type == XML_CTYPE_NAME, then the name points to the name, and
the numchildren field will be zero and children will be NULL. The
quant fields indicates any quantifiers placed on the name.
CHOICE and SEQ will have name NULL, the number of children in
numchildren and children will point, recursively, to an array
of XML_Content cells.
The EMPTY, ANY, and MIXED types will only occur at top level.
*/
typedef struct XML_cp XML_Content;
struct XML_cp {
enum XML_Content_Type type;
enum XML_Content_Quant quant;
XML_Char *name;
unsigned int numchildren;
XML_Content *children;
};
/* This is called for an element declaration. See above for
description of the model argument. It's the user code's responsibility
to free model when finished with it. See XML_FreeContentModel.
There is no need to free the model from the handler, it can be kept
around and freed at a later stage.
*/
typedef void(XMLCALL *XML_ElementDeclHandler)(void *userData,
const XML_Char *name,
XML_Content *model);
XMLPARSEAPI(void)
XML_SetElementDeclHandler(XML_Parser parser, XML_ElementDeclHandler eldecl);
/* The Attlist declaration handler is called for *each* attribute. So
a single Attlist declaration with multiple attributes declared will
generate multiple calls to this handler. The "default" parameter
may be NULL in the case of the "#IMPLIED" or "#REQUIRED"
keyword. The "isrequired" parameter will be true and the default
value will be NULL in the case of "#REQUIRED". If "isrequired" is
true and default is non-NULL, then this is a "#FIXED" default.
*/
typedef void(XMLCALL *XML_AttlistDeclHandler)(
void *userData, const XML_Char *elname, const XML_Char *attname,
const XML_Char *att_type, const XML_Char *dflt, int isrequired);
XMLPARSEAPI(void)
XML_SetAttlistDeclHandler(XML_Parser parser, XML_AttlistDeclHandler attdecl);
/* The XML declaration handler is called for *both* XML declarations
and text declarations. The way to distinguish is that the version
parameter will be NULL for text declarations. The encoding
parameter may be NULL for XML declarations. The standalone
parameter will be -1, 0, or 1 indicating respectively that there
was no standalone parameter in the declaration, that it was given
as no, or that it was given as yes.
*/
typedef void(XMLCALL *XML_XmlDeclHandler)(void *userData,
const XML_Char *version,
const XML_Char *encoding,
int standalone);
XMLPARSEAPI(void)
XML_SetXmlDeclHandler(XML_Parser parser, XML_XmlDeclHandler xmldecl);
typedef struct {
void *(*malloc_fcn)(size_t size);
void *(*realloc_fcn)(void *ptr, size_t size);
void (*free_fcn)(void *ptr);
} XML_Memory_Handling_Suite;
/* Constructs a new parser; encoding is the encoding specified by the
external protocol or NULL if there is none specified.
*/
XMLPARSEAPI(XML_Parser)
XML_ParserCreate(const XML_Char *encoding);
/* Constructs a new parser and namespace processor. Element type
names and attribute names that belong to a namespace will be
expanded; unprefixed attribute names are never expanded; unprefixed
element type names are expanded only if there is a default
namespace. The expanded name is the concatenation of the namespace
URI, the namespace separator character, and the local part of the
name. If the namespace separator is '\0' then the namespace URI
and the local part will be concatenated without any separator.
It is a programming error to use the separator '\0' with namespace
triplets (see XML_SetReturnNSTriplet).
If a namespace separator is chosen that can be part of a URI or
part of an XML name, splitting an expanded name back into its
1, 2 or 3 original parts on application level in the element handler
may end up vulnerable, so these are advised against; sane choices for
a namespace separator are e.g. '\n' (line feed) and '|' (pipe).
Note that Expat does not validate namespace URIs (beyond encoding)
against RFC 3986 today (and is not required to do so with regard to
the XML 1.0 namespaces specification) but it may start doing that
in future releases. Before that, an application using Expat must
be ready to receive namespace URIs containing non-URI characters.
*/
XMLPARSEAPI(XML_Parser)
XML_ParserCreateNS(const XML_Char *encoding, XML_Char namespaceSeparator);
/* Constructs a new parser using the memory management suite referred to
by memsuite. If memsuite is NULL, then use the standard library memory
suite. If namespaceSeparator is non-NULL it creates a parser with
namespace processing as described above. The character pointed at
will serve as the namespace separator.
All further memory operations used for the created parser will come from
the given suite.
*/
XMLPARSEAPI(XML_Parser)
XML_ParserCreate_MM(const XML_Char *encoding,
const XML_Memory_Handling_Suite *memsuite,
const XML_Char *namespaceSeparator);
/* Prepare a parser object to be re-used. This is particularly
valuable when memory allocation overhead is disproportionately high,
such as when a large number of small documnents need to be parsed.
All handlers are cleared from the parser, except for the
unknownEncodingHandler. The parser's external state is re-initialized
except for the values of ns and ns_triplets.
Added in Expat 1.95.3.
*/
XMLPARSEAPI(XML_Bool)
XML_ParserReset(XML_Parser parser, const XML_Char *encoding);
/* atts is array of name/value pairs, terminated by 0;
names and values are 0 terminated.
*/
typedef void(XMLCALL *XML_StartElementHandler)(void *userData,
const XML_Char *name,
const XML_Char **atts);
typedef void(XMLCALL *XML_EndElementHandler)(void *userData,
const XML_Char *name);
/* s is not 0 terminated. */
typedef void(XMLCALL *XML_CharacterDataHandler)(void *userData,
const XML_Char *s, int len);
/* target and data are 0 terminated */
typedef void(XMLCALL *XML_ProcessingInstructionHandler)(void *userData,
const XML_Char *target,
const XML_Char *data);
/* data is 0 terminated */
typedef void(XMLCALL *XML_CommentHandler)(void *userData, const XML_Char *data);
typedef void(XMLCALL *XML_StartCdataSectionHandler)(void *userData);
typedef void(XMLCALL *XML_EndCdataSectionHandler)(void *userData);
/* This is called for any characters in the XML document for which
there is no applicable handler. This includes both characters that
are part of markup which is of a kind that is not reported
(comments, markup declarations), or characters that are part of a
construct which could be reported but for which no handler has been
supplied. The characters are passed exactly as they were in the XML
document except that they will be encoded in UTF-8 or UTF-16.
Line boundaries are not normalized. Note that a byte order mark
character is not passed to the default handler. There are no
guarantees about how characters are divided between calls to the
default handler: for example, a comment might be split between
multiple calls.
*/
typedef void(XMLCALL *XML_DefaultHandler)(void *userData, const XML_Char *s,
int len);
/* This is called for the start of the DOCTYPE declaration, before
any DTD or internal subset is parsed.
*/
typedef void(XMLCALL *XML_StartDoctypeDeclHandler)(void *userData,
const XML_Char *doctypeName,
const XML_Char *sysid,
const XML_Char *pubid,
int has_internal_subset);
/* This is called for the end of the DOCTYPE declaration when the
closing > is encountered, but after processing any external
subset.
*/
typedef void(XMLCALL *XML_EndDoctypeDeclHandler)(void *userData);
/* This is called for entity declarations. The is_parameter_entity
argument will be non-zero if the entity is a parameter entity, zero
otherwise.
For internal entities (<!ENTITY foo "bar">), value will
be non-NULL and systemId, publicID, and notationName will be NULL.
The value string is NOT null-terminated; the length is provided in
the value_length argument. Since it is legal to have zero-length
values, do not use this argument to test for internal entities.
For external entities, value will be NULL and systemId will be
non-NULL. The publicId argument will be NULL unless a public
identifier was provided. The notationName argument will have a
non-NULL value only for unparsed entity declarations.
Note that is_parameter_entity can't be changed to XML_Bool, since
that would break binary compatibility.
*/
typedef void(XMLCALL *XML_EntityDeclHandler)(
void *userData, const XML_Char *entityName, int is_parameter_entity,
const XML_Char *value, int value_length, const XML_Char *base,
const XML_Char *systemId, const XML_Char *publicId,
const XML_Char *notationName);
XMLPARSEAPI(void)
XML_SetEntityDeclHandler(XML_Parser parser, XML_EntityDeclHandler handler);
/* OBSOLETE -- OBSOLETE -- OBSOLETE
This handler has been superseded by the EntityDeclHandler above.
It is provided here for backward compatibility.
This is called for a declaration of an unparsed (NDATA) entity.
The base argument is whatever was set by XML_SetBase. The
entityName, systemId and notationName arguments will never be
NULL. The other arguments may be.
*/
typedef void(XMLCALL *XML_UnparsedEntityDeclHandler)(
void *userData, const XML_Char *entityName, const XML_Char *base,
const XML_Char *systemId, const XML_Char *publicId,
const XML_Char *notationName);
/* This is called for a declaration of notation. The base argument is
whatever was set by XML_SetBase. The notationName will never be
NULL. The other arguments can be.
*/
typedef void(XMLCALL *XML_NotationDeclHandler)(void *userData,
const XML_Char *notationName,
const XML_Char *base,
const XML_Char *systemId,
const XML_Char *publicId);
/* When namespace processing is enabled, these are called once for
each namespace declaration. The call to the start and end element
handlers occur between the calls to the start and end namespace
declaration handlers. For an xmlns attribute, prefix will be
NULL. For an xmlns="" attribute, uri will be NULL.
*/
typedef void(XMLCALL *XML_StartNamespaceDeclHandler)(void *userData,
const XML_Char *prefix,
const XML_Char *uri);
typedef void(XMLCALL *XML_EndNamespaceDeclHandler)(void *userData,
const XML_Char *prefix);
/* This is called if the document is not standalone, that is, it has an
external subset or a reference to a parameter entity, but does not
have standalone="yes". If this handler returns XML_STATUS_ERROR,
then processing will not continue, and the parser will return a
XML_ERROR_NOT_STANDALONE error.
If parameter entity parsing is enabled, then in addition to the
conditions above this handler will only be called if the referenced
entity was actually read.
*/
typedef int(XMLCALL *XML_NotStandaloneHandler)(void *userData);
/* This is called for a reference to an external parsed general
entity. The referenced entity is not automatically parsed. The
application can parse it immediately or later using
XML_ExternalEntityParserCreate.
The parser argument is the parser parsing the entity containing the
reference; it can be passed as the parser argument to
XML_ExternalEntityParserCreate. The systemId argument is the
system identifier as specified in the entity declaration; it will
not be NULL.
The base argument is the system identifier that should be used as
the base for resolving systemId if systemId was relative; this is
set by XML_SetBase; it may be NULL.
The publicId argument is the public identifier as specified in the
entity declaration, or NULL if none was specified; the whitespace
in the public identifier will have been normalized as required by
the XML spec.
The context argument specifies the parsing context in the format
expected by the context argument to XML_ExternalEntityParserCreate;
context is valid only until the handler returns, so if the
referenced entity is to be parsed later, it must be copied.
context is NULL only when the entity is a parameter entity.
The handler should return XML_STATUS_ERROR if processing should not
continue because of a fatal error in the handling of the external
entity. In this case the calling parser will return an
XML_ERROR_EXTERNAL_ENTITY_HANDLING error.
Note that unlike other handlers the first argument is the parser,
not userData.
*/
typedef int(XMLCALL *XML_ExternalEntityRefHandler)(XML_Parser parser,
const XML_Char *context,
const XML_Char *base,
const XML_Char *systemId,
const XML_Char *publicId);
/* This is called in two situations:
1) An entity reference is encountered for which no declaration
has been read *and* this is not an error.
2) An internal entity reference is read, but not expanded, because
XML_SetDefaultHandler has been called.
Note: skipped parameter entities in declarations and skipped general
entities in attribute values cannot be reported, because
the event would be out of sync with the reporting of the
declarations or attribute values
*/
typedef void(XMLCALL *XML_SkippedEntityHandler)(void *userData,
const XML_Char *entityName,
int is_parameter_entity);
/* This structure is filled in by the XML_UnknownEncodingHandler to
provide information to the parser about encodings that are unknown
to the parser.
The map[b] member gives information about byte sequences whose
first byte is b.
If map[b] is c where c is >= 0, then b by itself encodes the
Unicode scalar value c.
If map[b] is -1, then the byte sequence is malformed.
If map[b] is -n, where n >= 2, then b is the first byte of an
n-byte sequence that encodes a single Unicode scalar value.
The data member will be passed as the first argument to the convert
function.
The convert function is used to convert multibyte sequences; s will
point to a n-byte sequence where map[(unsigned char)*s] == -n. The
convert function must return the Unicode scalar value represented
by this byte sequence or -1 if the byte sequence is malformed.
The convert function may be NULL if the encoding is a single-byte
encoding, that is if map[b] >= -1 for all bytes b.
When the parser is finished with the encoding, then if release is
not NULL, it will call release passing it the data member; once
release has been called, the convert function will not be called
again.
Expat places certain restrictions on the encodings that are supported
using this mechanism.
1. Every ASCII character that can appear in a well-formed XML document,
other than the characters
$@\^`{}~
must be represented by a single byte, and that byte must be the
same byte that represents that character in ASCII.
2. No character may require more than 4 bytes to encode.
3. All characters encoded must have Unicode scalar values <=
0xFFFF, (i.e., characters that would be encoded by surrogates in
UTF-16 are not allowed). Note that this restriction doesn't
apply to the built-in support for UTF-8 and UTF-16.
4. No Unicode character may be encoded by more than one distinct
sequence of bytes.
*/
typedef struct {
int map[256];
void *data;
int(XMLCALL *convert)(void *data, const char *s);
void(XMLCALL *release)(void *data);
} XML_Encoding;
/* This is called for an encoding that is unknown to the parser.
The encodingHandlerData argument is that which was passed as the
second argument to XML_SetUnknownEncodingHandler.
The name argument gives the name of the encoding as specified in
the encoding declaration.
If the callback can provide information about the encoding, it must
fill in the XML_Encoding structure, and return XML_STATUS_OK.
Otherwise it must return XML_STATUS_ERROR.
If info does not describe a suitable encoding, then the parser will
return an XML_ERROR_UNKNOWN_ENCODING error.
*/
typedef int(XMLCALL *XML_UnknownEncodingHandler)(void *encodingHandlerData,
const XML_Char *name,
XML_Encoding *info);
XMLPARSEAPI(void)
XML_SetElementHandler(XML_Parser parser, XML_StartElementHandler start,
XML_EndElementHandler end);
XMLPARSEAPI(void)
XML_SetStartElementHandler(XML_Parser parser, XML_StartElementHandler handler);
XMLPARSEAPI(void)
XML_SetEndElementHandler(XML_Parser parser, XML_EndElementHandler handler);
XMLPARSEAPI(void)
XML_SetCharacterDataHandler(XML_Parser parser,
XML_CharacterDataHandler handler);
XMLPARSEAPI(void)
XML_SetProcessingInstructionHandler(XML_Parser parser,
XML_ProcessingInstructionHandler handler);
XMLPARSEAPI(void)
XML_SetCommentHandler(XML_Parser parser, XML_CommentHandler handler);
XMLPARSEAPI(void)
XML_SetCdataSectionHandler(XML_Parser parser,
XML_StartCdataSectionHandler start,
XML_EndCdataSectionHandler end);
XMLPARSEAPI(void)
XML_SetStartCdataSectionHandler(XML_Parser parser,
XML_StartCdataSectionHandler start);
XMLPARSEAPI(void)
XML_SetEndCdataSectionHandler(XML_Parser parser,
XML_EndCdataSectionHandler end);
/* This sets the default handler and also inhibits expansion of
internal entities. These entity references will be passed to the
default handler, or to the skipped entity handler, if one is set.
*/
XMLPARSEAPI(void)
XML_SetDefaultHandler(XML_Parser parser, XML_DefaultHandler handler);
/* This sets the default handler but does not inhibit expansion of
internal entities. The entity reference will not be passed to the
default handler.
*/
XMLPARSEAPI(void)
XML_SetDefaultHandlerExpand(XML_Parser parser, XML_DefaultHandler handler);
XMLPARSEAPI(void)
XML_SetDoctypeDeclHandler(XML_Parser parser, XML_StartDoctypeDeclHandler start,
XML_EndDoctypeDeclHandler end);
XMLPARSEAPI(void)
XML_SetStartDoctypeDeclHandler(XML_Parser parser,
XML_StartDoctypeDeclHandler start);
XMLPARSEAPI(void)
XML_SetEndDoctypeDeclHandler(XML_Parser parser, XML_EndDoctypeDeclHandler end);
XMLPARSEAPI(void)
XML_SetUnparsedEntityDeclHandler(XML_Parser parser,
XML_UnparsedEntityDeclHandler handler);
XMLPARSEAPI(void)
XML_SetNotationDeclHandler(XML_Parser parser, XML_NotationDeclHandler handler);
XMLPARSEAPI(void)
XML_SetNamespaceDeclHandler(XML_Parser parser,
XML_StartNamespaceDeclHandler start,
XML_EndNamespaceDeclHandler end);
XMLPARSEAPI(void)
XML_SetStartNamespaceDeclHandler(XML_Parser parser,
XML_StartNamespaceDeclHandler start);
XMLPARSEAPI(void)
XML_SetEndNamespaceDeclHandler(XML_Parser parser,
XML_EndNamespaceDeclHandler end);
XMLPARSEAPI(void)
XML_SetNotStandaloneHandler(XML_Parser parser,
XML_NotStandaloneHandler handler);
XMLPARSEAPI(void)
XML_SetExternalEntityRefHandler(XML_Parser parser,
XML_ExternalEntityRefHandler handler);
/* If a non-NULL value for arg is specified here, then it will be
passed as the first argument to the external entity ref handler
instead of the parser object.
*/
XMLPARSEAPI(void)
XML_SetExternalEntityRefHandlerArg(XML_Parser parser, void *arg);
XMLPARSEAPI(void)
XML_SetSkippedEntityHandler(XML_Parser parser,
XML_SkippedEntityHandler handler);
XMLPARSEAPI(void)
XML_SetUnknownEncodingHandler(XML_Parser parser,
XML_UnknownEncodingHandler handler,
void *encodingHandlerData);
/* This can be called within a handler for a start element, end
element, processing instruction or character data. It causes the
corresponding markup to be passed to the default handler.
*/
XMLPARSEAPI(void)
XML_DefaultCurrent(XML_Parser parser);
/* If do_nst is non-zero, and namespace processing is in effect, and
a name has a prefix (i.e. an explicit namespace qualifier) then
that name is returned as a triplet in a single string separated by
the separator character specified when the parser was created: URI
+ sep + local_name + sep + prefix.
If do_nst is zero, then namespace information is returned in the
default manner (URI + sep + local_name) whether or not the name
has a prefix.
Note: Calling XML_SetReturnNSTriplet after XML_Parse or
XML_ParseBuffer has no effect.
*/
XMLPARSEAPI(void)
XML_SetReturnNSTriplet(XML_Parser parser, int do_nst);
/* This value is passed as the userData argument to callbacks. */
XMLPARSEAPI(void)
XML_SetUserData(XML_Parser parser, void *userData);
/* Returns the last value set by XML_SetUserData or NULL. */
#define XML_GetUserData(parser) (*(void **)(parser))
/* This is equivalent to supplying an encoding argument to
XML_ParserCreate. On success XML_SetEncoding returns non-zero,
zero otherwise.
Note: Calling XML_SetEncoding after XML_Parse or XML_ParseBuffer
has no effect and returns XML_STATUS_ERROR.
*/
XMLPARSEAPI(enum XML_Status)
XML_SetEncoding(XML_Parser parser, const XML_Char *encoding);
/* If this function is called, then the parser will be passed as the
first argument to callbacks instead of userData. The userData will
still be accessible using XML_GetUserData.
*/
XMLPARSEAPI(void)
XML_UseParserAsHandlerArg(XML_Parser parser);
/* If useDTD == XML_TRUE is passed to this function, then the parser
will assume that there is an external subset, even if none is
specified in the document. In such a case the parser will call the
externalEntityRefHandler with a value of NULL for the systemId
argument (the publicId and context arguments will be NULL as well).
Note: For the purpose of checking WFC: Entity Declared, passing
useDTD == XML_TRUE will make the parser behave as if the document
had a DTD with an external subset.
Note: If this function is called, then this must be done before
the first call to XML_Parse or XML_ParseBuffer, since it will
have no effect after that. Returns
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING.
Note: If the document does not have a DOCTYPE declaration at all,
then startDoctypeDeclHandler and endDoctypeDeclHandler will not
be called, despite an external subset being parsed.
Note: If XML_DTD is not defined when Expat is compiled, returns
XML_ERROR_FEATURE_REQUIRES_XML_DTD.
Note: If parser == NULL, returns XML_ERROR_INVALID_ARGUMENT.
*/
XMLPARSEAPI(enum XML_Error)
XML_UseForeignDTD(XML_Parser parser, XML_Bool useDTD);
/* Sets the base to be used for resolving relative URIs in system
identifiers in declarations. Resolving relative identifiers is
left to the application: this value will be passed through as the
base argument to the XML_ExternalEntityRefHandler,
XML_NotationDeclHandler and XML_UnparsedEntityDeclHandler. The base
argument will be copied. Returns XML_STATUS_ERROR if out of memory,
XML_STATUS_OK otherwise.
*/
XMLPARSEAPI(enum XML_Status)
XML_SetBase(XML_Parser parser, const XML_Char *base);
XMLPARSEAPI(const XML_Char *)
XML_GetBase(XML_Parser parser);
/* Returns the number of the attribute/value pairs passed in last call
to the XML_StartElementHandler that were specified in the start-tag
rather than defaulted. Each attribute/value pair counts as 2; thus
this corresponds to an index into the atts array passed to the
XML_StartElementHandler. Returns -1 if parser == NULL.
*/
XMLPARSEAPI(int)
XML_GetSpecifiedAttributeCount(XML_Parser parser);
/* Returns the index of the ID attribute passed in the last call to
XML_StartElementHandler, or -1 if there is no ID attribute or
parser == NULL. Each attribute/value pair counts as 2; thus this
corresponds to an index into the atts array passed to the
XML_StartElementHandler.
*/
XMLPARSEAPI(int)
XML_GetIdAttributeIndex(XML_Parser parser);
#ifdef XML_ATTR_INFO
/* Source file byte offsets for the start and end of attribute names and values.
The value indices are exclusive of surrounding quotes; thus in a UTF-8 source
file an attribute value of "blah" will yield:
info->valueEnd - info->valueStart = 4 bytes.
*/
typedef struct {
XML_Index nameStart; /* Offset to beginning of the attribute name. */
XML_Index nameEnd; /* Offset after the attribute name's last byte. */
XML_Index valueStart; /* Offset to beginning of the attribute value. */
XML_Index valueEnd; /* Offset after the attribute value's last byte. */
} XML_AttrInfo;
/* Returns an array of XML_AttrInfo structures for the attribute/value pairs
passed in last call to the XML_StartElementHandler that were specified
in the start-tag rather than defaulted. Each attribute/value pair counts
as 1; thus the number of entries in the array is
XML_GetSpecifiedAttributeCount(parser) / 2.
*/
XMLPARSEAPI(const XML_AttrInfo *)
XML_GetAttributeInfo(XML_Parser parser);
#endif
/* Parses some input. Returns XML_STATUS_ERROR if a fatal error is
detected. The last call to XML_Parse must have isFinal true; len
may be zero for this call (or any other).
Though the return values for these functions has always been
described as a Boolean value, the implementation, at least for the
1.95.x series, has always returned exactly one of the XML_Status
values.
*/
XMLPARSEAPI(enum XML_Status)
XML_Parse(XML_Parser parser, const char *s, int len, int isFinal);
XMLPARSEAPI(void *)
XML_GetBuffer(XML_Parser parser, int len);
XMLPARSEAPI(enum XML_Status)
XML_ParseBuffer(XML_Parser parser, int len, int isFinal);
/* Stops parsing, causing XML_Parse() or XML_ParseBuffer() to return.
Must be called from within a call-back handler, except when aborting
(resumable = 0) an already suspended parser. Some call-backs may
still follow because they would otherwise get lost. Examples:
- endElementHandler() for empty elements when stopped in
startElementHandler(),
- endNameSpaceDeclHandler() when stopped in endElementHandler(),
and possibly others.
Can be called from most handlers, including DTD related call-backs,
except when parsing an external parameter entity and resumable != 0.
Returns XML_STATUS_OK when successful, XML_STATUS_ERROR otherwise.
Possible error codes:
- XML_ERROR_SUSPENDED: when suspending an already suspended parser.
- XML_ERROR_FINISHED: when the parser has already finished.
- XML_ERROR_SUSPEND_PE: when suspending while parsing an external PE.
When resumable != 0 (true) then parsing is suspended, that is,
XML_Parse() and XML_ParseBuffer() return XML_STATUS_SUSPENDED.
Otherwise, parsing is aborted, that is, XML_Parse() and XML_ParseBuffer()
return XML_STATUS_ERROR with error code XML_ERROR_ABORTED.
*Note*:
This will be applied to the current parser instance only, that is, if
there is a parent parser then it will continue parsing when the
externalEntityRefHandler() returns. It is up to the implementation of
the externalEntityRefHandler() to call XML_StopParser() on the parent
parser (recursively), if one wants to stop parsing altogether.
When suspended, parsing can be resumed by calling XML_ResumeParser().
*/
XMLPARSEAPI(enum XML_Status)
XML_StopParser(XML_Parser parser, XML_Bool resumable);
/* Resumes parsing after it has been suspended with XML_StopParser().
Must not be called from within a handler call-back. Returns same
status codes as XML_Parse() or XML_ParseBuffer().
Additional error code XML_ERROR_NOT_SUSPENDED possible.
*Note*:
This must be called on the most deeply nested child parser instance
first, and on its parent parser only after the child parser has finished,
to be applied recursively until the document entity's parser is restarted.
That is, the parent parser will not resume by itself and it is up to the
application to call XML_ResumeParser() on it at the appropriate moment.
*/
XMLPARSEAPI(enum XML_Status)
XML_ResumeParser(XML_Parser parser);
enum XML_Parsing { XML_INITIALIZED, XML_PARSING, XML_FINISHED, XML_SUSPENDED };
typedef struct {
enum XML_Parsing parsing;
XML_Bool finalBuffer;
} XML_ParsingStatus;
/* Returns status of parser with respect to being initialized, parsing,
finished, or suspended and processing the final buffer.
XXX XML_Parse() and XML_ParseBuffer() should return XML_ParsingStatus,
XXX with XML_FINISHED_OK or XML_FINISHED_ERROR replacing XML_FINISHED
*/
XMLPARSEAPI(void)
XML_GetParsingStatus(XML_Parser parser, XML_ParsingStatus *status);
/* Creates an XML_Parser object that can parse an external general
entity; context is a '\0'-terminated string specifying the parse
context; encoding is a '\0'-terminated string giving the name of
the externally specified encoding, or NULL if there is no
externally specified encoding. The context string consists of a
sequence of tokens separated by formfeeds (\f); a token consisting
of a name specifies that the general entity of the name is open; a
token of the form prefix=uri specifies the namespace for a
particular prefix; a token of the form =uri specifies the default
namespace. This can be called at any point after the first call to
an ExternalEntityRefHandler so longer as the parser has not yet
been freed. The new parser is completely independent and may
safely be used in a separate thread. The handlers and userData are
initialized from the parser argument. Returns NULL if out of memory.
Otherwise returns a new XML_Parser object.
*/
XMLPARSEAPI(XML_Parser)
XML_ExternalEntityParserCreate(XML_Parser parser, const XML_Char *context,
const XML_Char *encoding);
enum XML_ParamEntityParsing {
XML_PARAM_ENTITY_PARSING_NEVER,
XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE,
XML_PARAM_ENTITY_PARSING_ALWAYS
};
/* Controls parsing of parameter entities (including the external DTD
subset). If parsing of parameter entities is enabled, then
references to external parameter entities (including the external
DTD subset) will be passed to the handler set with
XML_SetExternalEntityRefHandler. The context passed will be 0.
Unlike external general entities, external parameter entities can
only be parsed synchronously. If the external parameter entity is
to be parsed, it must be parsed during the call to the external
entity ref handler: the complete sequence of
XML_ExternalEntityParserCreate, XML_Parse/XML_ParseBuffer and
XML_ParserFree calls must be made during this call. After
XML_ExternalEntityParserCreate has been called to create the parser
for the external parameter entity (context must be 0 for this
call), it is illegal to make any calls on the old parser until
XML_ParserFree has been called on the newly created parser.
If the library has been compiled without support for parameter
entity parsing (ie without XML_DTD being defined), then
XML_SetParamEntityParsing will return 0 if parsing of parameter
entities is requested; otherwise it will return non-zero.
Note: If XML_SetParamEntityParsing is called after XML_Parse or
XML_ParseBuffer, then it has no effect and will always return 0.
Note: If parser == NULL, the function will do nothing and return 0.
*/
XMLPARSEAPI(int)
XML_SetParamEntityParsing(XML_Parser parser,
enum XML_ParamEntityParsing parsing);
/* Sets the hash salt to use for internal hash calculations.
Helps in preventing DoS attacks based on predicting hash
function behavior. This must be called before parsing is started.
Returns 1 if successful, 0 when called after parsing has started.
Note: If parser == NULL, the function will do nothing and return 0.
*/
XMLPARSEAPI(int)
XML_SetHashSalt(XML_Parser parser, unsigned long hash_salt);
/* If XML_Parse or XML_ParseBuffer have returned XML_STATUS_ERROR, then
XML_GetErrorCode returns information about the error.
*/
XMLPARSEAPI(enum XML_Error)
XML_GetErrorCode(XML_Parser parser);
/* These functions return information about the current parse
location. They may be called from any callback called to report
some parse event; in this case the location is the location of the
first of the sequence of characters that generated the event. When
called from callbacks generated by declarations in the document
prologue, the location identified isn't as neatly defined, but will
be within the relevant markup. When called outside of the callback
functions, the position indicated will be just past the last parse
event (regardless of whether there was an associated callback).
They may also be called after returning from a call to XML_Parse
or XML_ParseBuffer. If the return value is XML_STATUS_ERROR then
the location is the location of the character at which the error
was detected; otherwise the location is the location of the last
parse event, as described above.
Note: XML_GetCurrentLineNumber and XML_GetCurrentColumnNumber
return 0 to indicate an error.
Note: XML_GetCurrentByteIndex returns -1 to indicate an error.
*/
XMLPARSEAPI(XML_Size) XML_GetCurrentLineNumber(XML_Parser parser);
XMLPARSEAPI(XML_Size) XML_GetCurrentColumnNumber(XML_Parser parser);
XMLPARSEAPI(XML_Index) XML_GetCurrentByteIndex(XML_Parser parser);
/* Return the number of bytes in the current event.
Returns 0 if the event is in an internal entity.
*/
XMLPARSEAPI(int)
XML_GetCurrentByteCount(XML_Parser parser);
/* If XML_CONTEXT_BYTES is defined, returns the input buffer, sets
the integer pointed to by offset to the offset within this buffer
of the current parse position, and sets the integer pointed to by size
to the size of this buffer (the number of input bytes). Otherwise
returns a NULL pointer. Also returns a NULL pointer if a parse isn't
active.
NOTE: The character pointer returned should not be used outside
the handler that makes the call.
*/
XMLPARSEAPI(const char *)
XML_GetInputContext(XML_Parser parser, int *offset, int *size);
/* For backwards compatibility with previous versions. */
#define XML_GetErrorLineNumber XML_GetCurrentLineNumber
#define XML_GetErrorColumnNumber XML_GetCurrentColumnNumber
#define XML_GetErrorByteIndex XML_GetCurrentByteIndex
/* Frees the content model passed to the element declaration handler */
XMLPARSEAPI(void)
XML_FreeContentModel(XML_Parser parser, XML_Content *model);
/* Exposing the memory handling functions used in Expat */
XMLPARSEAPI(void *)
XML_ATTR_MALLOC
XML_ATTR_ALLOC_SIZE(2)
XML_MemMalloc(XML_Parser parser, size_t size);
XMLPARSEAPI(void *)
XML_ATTR_ALLOC_SIZE(3)
XML_MemRealloc(XML_Parser parser, void *ptr, size_t size);
XMLPARSEAPI(void)
XML_MemFree(XML_Parser parser, void *ptr);
/* Frees memory used by the parser. */
XMLPARSEAPI(void)
XML_ParserFree(XML_Parser parser);
/* Returns a string describing the error. */
XMLPARSEAPI(const XML_LChar *)
XML_ErrorString(enum XML_Error code);
/* Return a string containing the version number of this expat */
XMLPARSEAPI(const XML_LChar *)
XML_ExpatVersion(void);
typedef struct {
int major;
int minor;
int micro;
} XML_Expat_Version;
/* Return an XML_Expat_Version structure containing numeric version
number information for this version of expat.
*/
XMLPARSEAPI(XML_Expat_Version)
XML_ExpatVersionInfo(void);
/* Added in Expat 1.95.5. */
enum XML_FeatureEnum {
XML_FEATURE_END = 0,
XML_FEATURE_UNICODE,
XML_FEATURE_UNICODE_WCHAR_T,
XML_FEATURE_DTD,
XML_FEATURE_CONTEXT_BYTES,
XML_FEATURE_MIN_SIZE,
XML_FEATURE_SIZEOF_XML_CHAR,
XML_FEATURE_SIZEOF_XML_LCHAR,
XML_FEATURE_NS,
XML_FEATURE_LARGE_SIZE,
XML_FEATURE_ATTR_INFO,
/* Added in Expat 2.4.0. */
XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT,
XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT
/* Additional features must be added to the end of this enum. */
};
typedef struct {
enum XML_FeatureEnum feature;
const XML_LChar *name;
long int value;
} XML_Feature;
XMLPARSEAPI(const XML_Feature *)
XML_GetFeatureList(void);
#ifdef XML_DTD
/* Added in Expat 2.4.0. */
XMLPARSEAPI(XML_Bool)
XML_SetBillionLaughsAttackProtectionMaximumAmplification(
XML_Parser parser, float maximumAmplificationFactor);
/* Added in Expat 2.4.0. */
XMLPARSEAPI(XML_Bool)
XML_SetBillionLaughsAttackProtectionActivationThreshold(
XML_Parser parser, unsigned long long activationThresholdBytes);
#endif
/* Expat follows the semantic versioning convention.
See http://semver.org.
*/
#define XML_MAJOR_VERSION 2
#define XML_MINOR_VERSION 4
#define XML_MICRO_VERSION 7
#ifdef __cplusplus
}
#endif
#endif /* not Expat_INCLUDED */
| /*
__ __ _
___\ \/ /_ __ __ _| |_
/ _ \\ /| '_ \ / _` | __|
| __// \| |_) | (_| | |_
\___/_/\_\ .__/ \__,_|\__|
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
Copyright (c) 2000 Clark Cooper <[email protected]>
Copyright (c) 2000-2005 Fred L. Drake, Jr. <[email protected]>
Copyright (c) 2001-2002 Greg Stein <[email protected]>
Copyright (c) 2002-2016 Karl Waclawek <[email protected]>
Copyright (c) 2016-2022 Sebastian Pipping <[email protected]>
Copyright (c) 2016 Cristian Rodríguez <[email protected]>
Copyright (c) 2016 Thomas Beutlich <[email protected]>
Copyright (c) 2017 Rhodri James <[email protected]>
Copyright (c) 2022 Thijs Schreijer <[email protected]>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
*/ |
dune |
(library
(name a)
(libraries b)
(modules))
(library
(name b)
(libraries c)
(modules))
(library
(name c)
(libraries a)
(modules))
(executable
(name cycle)
(libraries a)
(modules cycle))
(rule (with-stdout-to cycle.ml (echo "")))
| |
typecore.ml | (* Typechecking for the core language *)
open Misc
open Asttypes
open Parsetree
open Types
open Typedtree
open Btype
open Ctype
type type_forcing_context =
| If_conditional
| If_no_else_branch
| While_loop_conditional
| While_loop_body
| For_loop_start_index
| For_loop_stop_index
| For_loop_body
| Assert_condition
| Sequence_left_hand_side
| When_guard
type type_expected = {
ty: type_expr;
explanation: type_forcing_context option;
}
type to_unpack = {
tu_name: string Location.loc;
tu_loc: Location.t;
tu_uid: Uid.t
}
module Datatype_kind = struct
type t = Record | Variant
let type_name = function
| Record -> "record"
| Variant -> "variant"
let label_name = function
| Record -> "field"
| Variant -> "constructor"
end
type wrong_name = {
type_path: Path.t;
kind: Datatype_kind.t;
name: string loc;
valid_names: string list;
}
type wrong_kind_context =
| Pattern
| Expression of type_forcing_context option
type wrong_kind_sort =
| Constructor
| Record
| Boolean
| List
| Unit
let wrong_kind_sort_of_constructor (lid : Longident.t) =
match lid with
| Lident "true" | Lident "false" | Ldot(_, "true") | Ldot(_, "false") ->
Boolean
| Lident "[]" | Lident "::" | Ldot(_, "[]") | Ldot(_, "::") -> List
| Lident "()" | Ldot(_, "()") -> Unit
| _ -> Constructor
type existential_restriction =
| At_toplevel (** no existential types at the toplevel *)
| In_group (** nor with let ... and ... *)
| In_rec (** or recursive definition *)
| With_attributes (** or let[@any_attribute] = ... *)
| In_class_args (** or in class arguments *)
| In_class_def (** or in [class c = let ... in ...] *)
| In_self_pattern (** or in self pattern *)
type error =
| Constructor_arity_mismatch of Longident.t * int * int
| Label_mismatch of Longident.t * Errortrace.unification_error
| Pattern_type_clash :
Errortrace.unification_error * _ pattern_desc option -> error
| Or_pattern_type_clash of Ident.t * Errortrace.unification_error
| Multiply_bound_variable of string
| Orpat_vars of Ident.t * Ident.t list
| Expr_type_clash of
Errortrace.unification_error * type_forcing_context option
* expression_desc option
| Apply_non_function of type_expr
| Apply_wrong_label of arg_label * type_expr * bool
| Label_multiply_defined of string
| Label_missing of Ident.t list
| Label_not_mutable of Longident.t
| Wrong_name of string * type_expected * wrong_name
| Name_type_mismatch of
Datatype_kind.t * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list
| Invalid_format of string
| Not_an_object of type_expr * type_forcing_context option
| Undefined_method of type_expr * string * string list option
| Undefined_self_method of string * string list
| Virtual_class of Longident.t
| Private_type of type_expr
| Private_label of Longident.t * type_expr
| Private_constructor of constructor_description * type_expr
| Unbound_instance_variable of string * string list
| Instance_variable_not_mutable of string
| Not_subtype of Errortrace.Subtype.error
| Outside_class
| Value_multiply_overridden of string
| Coercion_failure of
Errortrace.expanded_type * Errortrace.unification_error * bool
| Not_a_function of type_expr * type_forcing_context option
| Too_many_arguments of type_expr * type_forcing_context option
| Abstract_wrong_label of
{ got : arg_label
; expected : arg_label
; expected_type : type_expr
; explanation : type_forcing_context option
}
| Scoping_let_module of string * type_expr
| Not_a_polymorphic_variant_type of Longident.t
| Incoherent_label_order
| Less_general of string * Errortrace.unification_error
| Modules_not_allowed
| Cannot_infer_signature
| Not_a_packed_module of type_expr
| Unexpected_existential of existential_restriction * string * string list
| Invalid_interval
| Invalid_for_loop_index
| No_value_clauses
| Exception_pattern_disallowed
| Mixed_value_and_exception_patterns_under_guard
| Inlined_record_escape
| Inlined_record_expected
| Unrefuted_pattern of pattern
| Invalid_extension_constructor_payload
| Not_an_extension_constructor
| Literal_overflow of string
| Unknown_literal of string * char
| Illegal_letrec_pat
| Illegal_letrec_expr
| Illegal_class_expr
| Letop_type_clash of string * Errortrace.unification_error
| Andop_type_clash of string * Errortrace.unification_error
| Bindings_type_clash of Errortrace.unification_error
| Unbound_existential of Ident.t list * type_expr
| Missing_type_constraint
| Wrong_expected_kind of wrong_kind_sort * wrong_kind_context * type_expr
| Expr_not_a_record_type of type_expr
exception Error of Location.t * Env.t * error
exception Error_forward of Location.error
(* Forward declaration, to be filled in by Typemod.type_module *)
let type_module =
ref ((fun _env _md -> assert false) :
Env.t -> Parsetree.module_expr -> Typedtree.module_expr * Shape.t)
(* Forward declaration, to be filled in by Typemod.type_open *)
let type_open :
(?used_slot:bool ref -> override_flag -> Env.t -> Location.t ->
Longident.t loc -> Path.t * Env.t)
ref =
ref (fun ?used_slot:_ _ -> assert false)
let type_open_decl :
(?used_slot:bool ref -> Env.t -> Parsetree.open_declaration
-> open_declaration * Types.signature * Env.t)
ref =
ref (fun ?used_slot:_ _ -> assert false)
(* Forward declaration, to be filled in by Typemod.type_package *)
let type_package =
ref (fun _ -> assert false)
(* Forward declaration, to be filled in by Typeclass.class_structure *)
let type_object =
ref (fun _env _s -> assert false :
Env.t -> Location.t -> Parsetree.class_structure ->
Typedtree.class_structure * string list)
(*
Saving and outputting type information.
We keep these function names short, because they have to be
called each time we create a record of type [Typedtree.expression]
or [Typedtree.pattern] that will end up in the typed AST.
*)
let re node =
Cmt_format.add_saved_type (Cmt_format.Partial_expression node);
node
let rp node =
Cmt_format.add_saved_type (Cmt_format.Partial_pattern (Value, node));
node
let rcp node =
Cmt_format.add_saved_type (Cmt_format.Partial_pattern (Computation, node));
node
(* Context for inline record arguments; see [type_ident] *)
type recarg =
| Allowed
| Required
| Rejected
let mk_expected ?explanation ty = { ty; explanation; }
let case lhs rhs =
{c_lhs = lhs; c_guard = None; c_rhs = rhs}
(* Typing of constants *)
let type_constant = function
Const_int _ -> instance Predef.type_int
| Const_char _ -> instance Predef.type_char
| Const_string _ -> instance Predef.type_string
| Const_float _ -> instance Predef.type_float
| Const_int32 _ -> instance Predef.type_int32
| Const_int64 _ -> instance Predef.type_int64
| Const_nativeint _ -> instance Predef.type_nativeint
let constant : Parsetree.constant -> (Asttypes.constant, error) result =
function
| Pconst_integer (i,None) ->
begin
try Ok (Const_int (Misc.Int_literal_converter.int i))
with Failure _ -> Error (Literal_overflow "int")
end
| Pconst_integer (i,Some 'l') ->
begin
try Ok (Const_int32 (Misc.Int_literal_converter.int32 i))
with Failure _ -> Error (Literal_overflow "int32")
end
| Pconst_integer (i,Some 'L') ->
begin
try Ok (Const_int64 (Misc.Int_literal_converter.int64 i))
with Failure _ -> Error (Literal_overflow "int64")
end
| Pconst_integer (i,Some 'n') ->
begin
try Ok (Const_nativeint (Misc.Int_literal_converter.nativeint i))
with Failure _ -> Error (Literal_overflow "nativeint")
end
| Pconst_integer (i,Some c) -> Error (Unknown_literal (i, c))
| Pconst_char c -> Ok (Const_char c)
| Pconst_string (s,loc,d) -> Ok (Const_string (s,loc,d))
| Pconst_float (f,None)-> Ok (Const_float f)
| Pconst_float (f,Some c) -> Error (Unknown_literal (f, c))
let constant_or_raise env loc cst =
match constant cst with
| Ok c -> c
| Error err -> raise (Error (loc, env, err))
(* Specific version of type_option, using newty rather than newgenty *)
let type_option ty =
newty (Tconstr(Predef.path_option,[ty], ref Mnil))
let mkexp exp_desc exp_type exp_loc exp_env =
{ exp_desc; exp_type; exp_loc; exp_env; exp_extra = []; exp_attributes = [] }
let option_none env ty loc =
let lid = Longident.Lident "None" in
let cnone = Env.find_ident_constructor Predef.ident_none env in
mkexp (Texp_construct(mknoloc lid, cnone, [])) ty loc env
let option_some env texp =
let lid = Longident.Lident "Some" in
let csome = Env.find_ident_constructor Predef.ident_some env in
mkexp ( Texp_construct(mknoloc lid , csome, [texp]) )
(type_option texp.exp_type) texp.exp_loc texp.exp_env
let extract_option_type env ty =
match get_desc (expand_head env ty) with
Tconstr(path, [ty], _) when Path.same path Predef.path_option -> ty
| _ -> assert false
let protect_expansion env ty =
if Env.has_local_constraints env then generic_instance ty else ty
type record_extraction_result =
| Record_type of Path.t * Path.t * Types.label_declaration list
| Not_a_record_type
| Maybe_a_record_type
let extract_concrete_typedecl_protected env ty =
extract_concrete_typedecl env (protect_expansion env ty)
let extract_concrete_record env ty =
match extract_concrete_typedecl_protected env ty with
| Typedecl(p0, p, {type_kind=Type_record (fields, _)}) ->
Record_type (p0, p, fields)
| Has_no_typedecl | Typedecl(_, _, _) -> Not_a_record_type
| May_have_typedecl -> Maybe_a_record_type
type variant_extraction_result =
| Variant_type of Path.t * Path.t * Types.constructor_declaration list
| Not_a_variant_type
| Maybe_a_variant_type
let extract_concrete_variant env ty =
match extract_concrete_typedecl_protected env ty with
| Typedecl(p0, p, {type_kind=Type_variant (cstrs, _)}) ->
Variant_type (p0, p, cstrs)
| Typedecl(p0, p, {type_kind=Type_open}) ->
Variant_type (p0, p, [])
| Has_no_typedecl | Typedecl(_, _, _) -> Not_a_variant_type
| May_have_typedecl -> Maybe_a_variant_type
let extract_label_names env ty =
match extract_concrete_record env ty with
| Record_type (_, _,fields) -> List.map (fun l -> l.Types.ld_id) fields
| Not_a_record_type | Maybe_a_record_type -> assert false
let is_principal ty =
not !Clflags.principal || get_level ty = generic_level
(* Typing of patterns *)
(* unification inside type_exp and type_expect *)
let unify_exp_types loc env ty expected_ty =
(* Format.eprintf "@[%a@ %a@]@." Printtyp.raw_type_expr exp.exp_type
Printtyp.raw_type_expr expected_ty; *)
try
unify env ty expected_ty
with
Unify err ->
raise(Error(loc, env, Expr_type_clash(err, None, None)))
| Tags(l1,l2) ->
raise(Typetexp.Error(loc, env, Typetexp.Variant_tags (l1, l2)))
(* level at which to create the local type declarations *)
let gadt_equations_level = ref None
let get_gadt_equations_level () =
match !gadt_equations_level with
Some y -> y
| None -> assert false
let nothing_equated = TypePairs.create 0
(* unification inside type_pat*)
let unify_pat_types_return_equated_pairs ?(refine = None) loc env ty ty' =
try
match refine with
| Some allow_recursive ->
unify_gadt ~equations_level:(get_gadt_equations_level ())
~allow_recursive env ty ty'
| None ->
unify !env ty ty';
nothing_equated
with
| Unify err ->
raise(Error(loc, !env, Pattern_type_clash(err, None)))
| Tags(l1,l2) ->
raise(Typetexp.Error(loc, !env, Typetexp.Variant_tags (l1, l2)))
let unify_pat_types ?refine loc env ty ty' =
ignore (unify_pat_types_return_equated_pairs ?refine loc env ty ty')
let unify_pat ?refine env pat expected_ty =
try unify_pat_types ?refine pat.pat_loc env pat.pat_type expected_ty
with Error (loc, env, Pattern_type_clash(err, None)) ->
raise(Error(loc, env, Pattern_type_clash(err, Some pat.pat_desc)))
(* unification of a type with a Tconstr with freshly created arguments *)
let unify_head_only ~refine loc env ty constr =
let path = cstr_type_path constr in
let decl = Env.find_type path !env in
let ty' = Ctype.newconstr path (Ctype.instance_list decl.type_params) in
unify_pat_types ~refine loc env ty' ty
(* Creating new conjunctive types is not allowed when typing patterns *)
(* make all Reither present in open variants *)
let finalize_variant pat tag opat r =
let row =
match get_desc (expand_head pat.pat_env pat.pat_type) with
Tvariant row -> r := row; row
| _ -> assert false
in
let f = get_row_field tag row in
begin match row_field_repr f with
| Rabsent -> () (* assert false *)
| Reither (true, [], _) when not (row_closed row) ->
link_row_field_ext ~inside:f (rf_present None)
| Reither (false, ty::tl, _) when not (row_closed row) ->
link_row_field_ext ~inside:f (rf_present (Some ty));
begin match opat with None -> assert false
| Some pat ->
let env = ref pat.pat_env in List.iter (unify_pat env pat) (ty::tl)
end
| Reither (c, _l, true) when not (has_fixed_explanation row) ->
link_row_field_ext ~inside:f (rf_either [] ~no_arg:c ~matched:false)
| _ -> ()
end
(* Force check of well-formedness WHY? *)
(* unify_pat pat.pat_env pat
(newty(Tvariant{row_fields=[]; row_more=newvar(); row_closed=false;
row_bound=(); row_fixed=false; row_name=None})); *)
let has_variants p =
exists_general_pattern
{ f = fun (type k) (p : k general_pattern) -> match p.pat_desc with
| (Tpat_variant _) -> true
| _ -> false } p
let finalize_variants p =
iter_general_pattern
{ f = fun (type k) (p : k general_pattern) -> match p.pat_desc with
| Tpat_variant(tag, opat, r) ->
finalize_variant p tag opat r
| _ -> () } p
(* pattern environment *)
type pattern_variable =
{
pv_id: Ident.t;
pv_type: type_expr;
pv_loc: Location.t;
pv_as_var: bool;
pv_attributes: attributes;
}
type module_variable =
string loc * Location.t
let pattern_variables = ref ([] : pattern_variable list)
let pattern_force = ref ([] : (unit -> unit) list)
let allow_modules = ref false
let module_variables = ref ([] : module_variable list)
let reset_pattern allow =
pattern_variables := [];
pattern_force := [];
allow_modules := allow;
module_variables := []
let maybe_add_pattern_variables_ghost loc_let env pv =
List.fold_right
(fun {pv_id; _} env ->
let name = Ident.name pv_id in
if Env.bound_value name env then env
else begin
Env.enter_unbound_value name
(Val_unbound_ghost_recursive loc_let) env
end
) pv env
let enter_variable ?(is_module=false) ?(is_as_variable=false) loc name ty
attrs =
if List.exists (fun {pv_id; _} -> Ident.name pv_id = name.txt)
!pattern_variables
then raise(Error(loc, Env.empty, Multiply_bound_variable name.txt));
let id = Ident.create_local name.txt in
pattern_variables :=
{pv_id = id;
pv_type = ty;
pv_loc = loc;
pv_as_var = is_as_variable;
pv_attributes = attrs} :: !pattern_variables;
if is_module then begin
(* Note: unpack patterns enter a variable of the same name *)
if not !allow_modules then
raise (Error (loc, Env.empty, Modules_not_allowed));
module_variables := (name, loc) :: !module_variables
end;
id
let sort_pattern_variables vs =
List.sort
(fun {pv_id = x; _} {pv_id = y; _} ->
Stdlib.compare (Ident.name x) (Ident.name y))
vs
let enter_orpat_variables loc env p1_vs p2_vs =
(* unify_vars operate on sorted lists *)
let p1_vs = sort_pattern_variables p1_vs
and p2_vs = sort_pattern_variables p2_vs in
let rec unify_vars p1_vs p2_vs =
let vars vs = List.map (fun {pv_id; _} -> pv_id) vs in
match p1_vs, p2_vs with
| {pv_id = x1; pv_type = t1; _}::rem1, {pv_id = x2; pv_type = t2; _}::rem2
when Ident.equal x1 x2 ->
if x1==x2 then
unify_vars rem1 rem2
else begin
begin try
unify_var env (newvar ()) t1;
unify env t1 t2
with
| Unify err ->
raise(Error(loc, env, Or_pattern_type_clash(x1, err)))
end;
(x2,x1)::unify_vars rem1 rem2
end
| [],[] -> []
| {pv_id; _}::_, [] | [],{pv_id; _}::_ ->
raise (Error (loc, env, Orpat_vars (pv_id, [])))
| {pv_id = x; _}::_, {pv_id = y; _}::_ ->
let err =
if Ident.name x < Ident.name y
then Orpat_vars (x, vars p2_vs)
else Orpat_vars (y, vars p1_vs) in
raise (Error (loc, env, err)) in
unify_vars p1_vs p2_vs
let rec build_as_type ~refine (env : Env.t ref) p =
let as_ty = build_as_type_aux ~refine env p in
(* Cf. #1655 *)
List.fold_left (fun as_ty (extra, _loc, _attrs) ->
match extra with
| Tpat_type _ | Tpat_open _ | Tpat_unpack -> as_ty
| Tpat_constraint cty ->
(* [generic_instance] can only be used if the variables of the original
type ([cty.ctyp_type] here) are not at [generic_level], which they are
here.
If we used [generic_instance] we would lose the sharing between
[instance ty] and [ty]. *)
begin_def ();
let ty = instance cty.ctyp_type in
end_def ();
generalize_structure ty;
(* This call to unify can't fail since the pattern is well typed. *)
unify_pat_types ~refine p.pat_loc env (instance as_ty) (instance ty);
ty
) as_ty p.pat_extra
and build_as_type_aux ~refine (env : Env.t ref) p =
let build_as_type = build_as_type ~refine in
match p.pat_desc with
Tpat_alias(p1,_, _) -> build_as_type env p1
| Tpat_tuple pl ->
let tyl = List.map (build_as_type env) pl in
newty (Ttuple tyl)
| Tpat_construct(_, cstr, pl, vto) ->
let keep =
cstr.cstr_private = Private || cstr.cstr_existentials <> [] ||
vto <> None (* be lazy and keep the type for node constraints *) in
if keep then p.pat_type else
let tyl = List.map (build_as_type env) pl in
let ty_args, ty_res, _ =
instance_constructor Keep_existentials_flexible cstr
in
List.iter2 (fun (p,ty) -> unify_pat ~refine env {p with pat_type = ty})
(List.combine pl tyl) ty_args;
ty_res
| Tpat_variant(l, p', _) ->
let ty = Option.map (build_as_type env) p' in
let fields = [l, rf_present ty] in
newty (Tvariant (create_row ~fields ~more:(newvar())
~name:None ~fixed:None ~closed:false))
| Tpat_record (lpl,_) ->
let lbl = snd3 (List.hd lpl) in
if lbl.lbl_private = Private then p.pat_type else
let ty = newvar () in
let ppl = List.map (fun (_, l, p) -> l.lbl_pos, p) lpl in
let do_label lbl =
let _, ty_arg, ty_res = instance_label false lbl in
unify_pat ~refine env {p with pat_type = ty} ty_res;
let refinable =
lbl.lbl_mut = Immutable && List.mem_assoc lbl.lbl_pos ppl &&
match get_desc lbl.lbl_arg with Tpoly _ -> false | _ -> true in
if refinable then begin
let arg = List.assoc lbl.lbl_pos ppl in
unify_pat ~refine env
{arg with pat_type = build_as_type env arg} ty_arg
end else begin
let _, ty_arg', ty_res' = instance_label false lbl in
unify_pat_types ~refine p.pat_loc env ty_arg ty_arg';
unify_pat ~refine env p ty_res'
end in
Array.iter do_label lbl.lbl_all;
ty
| Tpat_or(p1, p2, row) ->
begin match row with
None ->
let ty1 = build_as_type env p1 and ty2 = build_as_type env p2 in
unify_pat ~refine env {p2 with pat_type = ty2} ty1;
ty1
| Some row ->
let Row {fields; fixed; name} = row_repr row in
newty (Tvariant (create_row ~fields ~fixed ~name
~closed:false ~more:(newvar())))
end
| Tpat_any | Tpat_var _ | Tpat_constant _
| Tpat_array _ | Tpat_lazy _ -> p.pat_type
(* Constraint solving during typing of patterns *)
let solve_Ppat_poly_constraint ~refine env loc sty expected_ty =
let cty, ty, force = Typetexp.transl_simple_type_delayed !env sty in
unify_pat_types ~refine loc env ty (instance expected_ty);
pattern_force := force :: !pattern_force;
match get_desc ty with
| Tpoly (body, tyl) ->
begin_def ();
init_def generic_level;
let _, ty' = instance_poly ~keep_names:true false tyl body in
end_def ();
(cty, ty, ty')
| _ -> assert false
let solve_Ppat_alias ~refine env pat =
begin_def ();
let ty_var = build_as_type ~refine env pat in
end_def ();
generalize ty_var;
ty_var
let solve_Ppat_tuple (type a) ~refine loc env (args : a list) expected_ty =
let vars = List.map (fun _ -> newgenvar ()) args in
let ty = newgenty (Ttuple vars) in
let expected_ty = generic_instance expected_ty in
unify_pat_types ~refine loc env ty expected_ty;
vars
let solve_constructor_annotation env name_list sty ty_args ty_ex =
let expansion_scope = get_gadt_equations_level () in
let ids =
List.map
(fun name ->
let decl = new_local_type ~loc:name.loc () in
let (id, new_env) =
Env.enter_type ~scope:expansion_scope name.txt decl !env in
env := new_env;
{name with txt = id})
name_list
in
begin_def ();
let cty, ty, force = Typetexp.transl_simple_type_delayed !env sty in
end_def ();
generalize_structure ty;
pattern_force := force :: !pattern_force;
let ty_args =
let ty1 = instance ty and ty2 = instance ty in
match ty_args with
[] -> assert false
| [ty_arg] ->
unify_pat_types cty.ctyp_loc env ty1 ty_arg;
[ty2]
| _ ->
unify_pat_types cty.ctyp_loc env ty1 (newty (Ttuple ty_args));
match get_desc (expand_head !env ty2) with
Ttuple tyl -> tyl
| _ -> assert false
in
if ids <> [] then ignore begin
let ids = List.map (fun x -> x.txt) ids in
let rem =
List.fold_left
(fun rem tv ->
match get_desc tv with
Tconstr(Path.Pident id, [], _) when List.mem id rem ->
list_remove id rem
| _ ->
raise (Error (cty.ctyp_loc, !env,
Unbound_existential (ids, ty))))
ids ty_ex
in
if rem <> [] then
raise (Error (cty.ctyp_loc, !env,
Unbound_existential (ids, ty)))
end;
ty_args, Some (ids, cty)
let solve_Ppat_construct ~refine env loc constr no_existentials
existential_styp expected_ty =
(* if constructor is gadt, we must verify that the expected type has the
correct head *)
if constr.cstr_generalized then
unify_head_only ~refine loc env (instance expected_ty) constr;
begin_def ();
let expected_ty = instance expected_ty in
(* PR#7214: do not use gadt unification for toplevel lets *)
let unify_res ty_res =
let refine =
match refine, no_existentials with
| None, None when constr.cstr_generalized -> Some false
| _ -> refine
in
unify_pat_types_return_equated_pairs ~refine loc env ty_res expected_ty
in
let expansion_scope = get_gadt_equations_level () in
let ty_args, ty_res, equated_types, existential_ctyp =
match existential_styp with
None ->
let ty_args, ty_res, _ =
instance_constructor
(Make_existentials_abstract { env; scope = expansion_scope }) constr
in
ty_args, ty_res, unify_res ty_res, None
| Some (name_list, sty) ->
let existential_treatment =
if name_list = [] then
Make_existentials_abstract { env; scope = expansion_scope }
else
(* we will unify them (in solve_constructor_annotation) with the
local types provided by the user *)
Keep_existentials_flexible
in
let ty_args, ty_res, ty_ex =
instance_constructor existential_treatment constr
in
let equated_types = unify_res ty_res in
let ty_args, existential_ctyp =
solve_constructor_annotation env name_list sty ty_args ty_ex in
ty_args, ty_res, equated_types, existential_ctyp
in
if constr.cstr_existentials <> [] then
lower_variables_only !env expansion_scope ty_res;
end_def ();
generalize_structure expected_ty;
generalize_structure ty_res;
List.iter generalize_structure ty_args;
if !Clflags.principal && refine = None then begin
(* Do not warn for counter-examples *)
let exception Warn_only_once in
try
TypePairs.iter
(fun (t1, t2) ->
generalize_structure t1;
generalize_structure t2;
if not (fully_generic t1 && fully_generic t2) then
let msg =
Format.asprintf
"typing this pattern requires considering@ %a@ and@ %a@ as \
equal.@,\
But the knowledge of these types"
Printtyp.type_expr t1
Printtyp.type_expr t2
in
Location.prerr_warning loc (Warnings.Not_principal msg);
raise Warn_only_once)
equated_types
with Warn_only_once -> ()
end;
(ty_args, existential_ctyp)
let solve_Ppat_record_field ~refine loc env label label_lid record_ty =
begin_def ();
let (_, ty_arg, ty_res) = instance_label false label in
begin try
unify_pat_types ~refine loc env ty_res (instance record_ty)
with Error(_loc, _env, Pattern_type_clash(err, _)) ->
raise(Error(label_lid.loc, !env,
Label_mismatch(label_lid.txt, err)))
end;
end_def ();
generalize_structure ty_res;
generalize_structure ty_arg;
ty_arg
let solve_Ppat_array ~refine loc env expected_ty =
let ty_elt = newgenvar() in
let expected_ty = generic_instance expected_ty in
unify_pat_types ~refine
loc env (Predef.type_array ty_elt) expected_ty;
ty_elt
let solve_Ppat_lazy ~refine loc env expected_ty =
let nv = newgenvar () in
unify_pat_types ~refine loc env (Predef.type_lazy_t nv)
(generic_instance expected_ty);
nv
let solve_Ppat_constraint ~refine loc env sty expected_ty =
begin_def();
let cty, ty, force = Typetexp.transl_simple_type_delayed !env sty in
end_def();
pattern_force := force :: !pattern_force;
generalize_structure ty;
let ty, expected_ty' = instance ty, ty in
unify_pat_types ~refine loc env ty (instance expected_ty);
(cty, ty, expected_ty')
let solve_Ppat_variant ~refine loc env tag no_arg expected_ty =
let arg_type = if no_arg then [] else [newgenvar()] in
let fields = [tag, rf_either ~no_arg arg_type ~matched:true] in
let make_row more =
create_row ~fields ~closed:false ~more ~fixed:None ~name:None
in
let row = make_row (newgenvar ()) in
let expected_ty = generic_instance expected_ty in
(* PR#7404: allow some_private_tag blindly, as it would not unify with
the abstract row variable *)
if tag <> Parmatch.some_private_tag then
unify_pat_types ~refine loc env (newgenty(Tvariant row)) expected_ty;
(arg_type, make_row (newvar ()), instance expected_ty)
(* Building the or-pattern corresponding to a polymorphic variant type *)
let build_or_pat env loc lid =
let path, decl = Env.lookup_type ~loc:lid.loc lid.txt env in
let tyl = List.map (fun _ -> newvar()) decl.type_params in
let row0 =
let ty = expand_head env (newty(Tconstr(path, tyl, ref Mnil))) in
match get_desc ty with
Tvariant row when static_row row -> row
| _ -> raise(Error(lid.loc, env, Not_a_polymorphic_variant_type lid.txt))
in
let pats, fields =
List.fold_left
(fun (pats,fields) (l,f) ->
match row_field_repr f with
Rpresent None ->
let f = rf_either [] ~no_arg:true ~matched:true in
(l,None) :: pats,
(l, f) :: fields
| Rpresent (Some ty) ->
let f = rf_either [ty] ~no_arg:false ~matched:true in
(l, Some {pat_desc=Tpat_any; pat_loc=Location.none; pat_env=env;
pat_type=ty; pat_extra=[]; pat_attributes=[]})
:: pats,
(l, f) :: fields
| _ -> pats, fields)
([],[]) (row_fields row0) in
let fields = List.rev fields in
let name = Some (path, tyl) in
let make_row more =
create_row ~fields ~more ~closed:false ~fixed:None ~name in
let ty = newty (Tvariant (make_row (newvar()))) in
let gloc = {loc with Location.loc_ghost=true} in
let row' = ref (make_row (newvar())) in
let pats =
List.map
(fun (l,p) ->
{pat_desc=Tpat_variant(l,p,row'); pat_loc=gloc;
pat_env=env; pat_type=ty; pat_extra=[]; pat_attributes=[]})
pats
in
match pats with
[] ->
(* empty polymorphic variants: not possible with the concrete language
but valid at the ast level *)
raise(Error(lid.loc, env, Not_a_polymorphic_variant_type lid.txt))
| pat :: pats ->
let r =
List.fold_left
(fun pat pat0 ->
{pat_desc=Tpat_or(pat0,pat,Some row0); pat_extra=[];
pat_loc=gloc; pat_env=env; pat_type=ty; pat_attributes=[]})
pat pats in
(path, rp { r with pat_loc = loc })
let split_cases env cases =
let add_case lst case = function
| None -> lst
| Some c_lhs -> { case with c_lhs } :: lst
in
List.fold_right (fun ({ c_lhs; c_guard } as case) (vals, exns) ->
match split_pattern c_lhs with
| Some _, Some _ when c_guard <> None ->
raise (Error (c_lhs.pat_loc, env,
Mixed_value_and_exception_patterns_under_guard))
| vp, ep -> add_case vals case vp, add_case exns case ep
) cases ([], [])
(* Type paths *)
let rec expand_path env p =
let decl =
try Some (Env.find_type p env) with Not_found -> None
in
match decl with
Some {type_manifest = Some ty} ->
begin match get_desc ty with
Tconstr(p,_,_) -> expand_path env p
| _ -> assert false
end
| _ ->
let p' = Env.normalize_type_path None env p in
if Path.same p p' then p else expand_path env p'
let compare_type_path env tpath1 tpath2 =
Path.same (expand_path env tpath1) (expand_path env tpath2)
(* Records *)
exception Wrong_name_disambiguation of Env.t * wrong_name
let get_constr_type_path ty =
match get_desc ty with
| Tconstr(p, _, _) -> p
| _ -> assert false
module NameChoice(Name : sig
type t
type usage
val kind: Datatype_kind.t
val get_name: t -> string
val get_type: t -> type_expr
val lookup_all_from_type:
Location.t -> usage -> Path.t -> Env.t -> (t * (unit -> unit)) list
(** Some names (for example the fields of inline records) are not
in the typing environment -- they behave as structural labels
rather than nominal labels.*)
val in_env: t -> bool
end) = struct
open Name
let get_type_path d = get_constr_type_path (get_type d)
let lookup_from_type env type_path usage lid =
let descrs = lookup_all_from_type lid.loc usage type_path env in
match lid.txt with
| Longident.Lident name -> begin
match
List.find (fun (nd, _) -> get_name nd = name) descrs
with
| descr, use ->
use ();
descr
| exception Not_found ->
let valid_names = List.map (fun (nd, _) -> get_name nd) descrs in
raise (Wrong_name_disambiguation (env, {
type_path;
name = { lid with txt = name };
kind;
valid_names;
}))
end
| _ -> raise Not_found
let rec unique eq acc = function
[] -> List.rev acc
| x :: rem ->
if List.exists (eq x) acc then unique eq acc rem
else unique eq (x :: acc) rem
let ambiguous_types env lbl others =
let tpath = get_type_path lbl in
let others =
List.map (fun (lbl, _) -> get_type_path lbl) others in
let tpaths = unique (compare_type_path env) [tpath] others in
match tpaths with
[_] -> []
| _ -> let open Printtyp in
wrap_printing_env ~error:true env (fun () ->
reset(); strings_of_paths Type tpaths)
let disambiguate_by_type env tpath lbls =
match lbls with
| (Error _ : _ result) -> raise Not_found
| Ok lbls ->
let check_type (lbl, _) =
let lbl_tpath = get_type_path lbl in
compare_type_path env tpath lbl_tpath
in
List.find check_type lbls
(* warn if there are several distinct candidates in scope *)
let warn_if_ambiguous warn lid env lbl rest =
if Warnings.is_active (Ambiguous_name ([],[],false,"")) then begin
Printtyp.Conflicts.reset ();
let paths = ambiguous_types env lbl rest in
let expansion =
Format.asprintf "%t" Printtyp.Conflicts.print_explanations in
if paths <> [] then
warn lid.loc
(Warnings.Ambiguous_name ([Longident.last lid.txt],
paths, false, expansion))
end
(* a non-principal type was used for disambiguation *)
let warn_non_principal warn lid =
let name = Datatype_kind.label_name kind in
warn lid.loc
(Warnings.Not_principal
("this type-based " ^ name ^ " disambiguation"))
(* we selected a name out of the lexical scope *)
let warn_out_of_scope warn lid env tpath =
if Warnings.is_active (Name_out_of_scope ("",[],false)) then begin
let path_s =
Printtyp.wrap_printing_env ~error:true env
(fun () -> Printtyp.string_of_path tpath) in
warn lid.loc
(Warnings.Name_out_of_scope (path_s, [Longident.last lid.txt], false))
end
(* warn if the selected name is not the last introduced in scope
-- in these cases the resolution is different from pre-disambiguation OCaml
(this warning is not enabled by default, it is specifically for people
wishing to write backward-compatible code).
*)
let warn_if_disambiguated_name warn lid lbl scope =
match scope with
| Ok ((lab1,_) :: _) when lab1 == lbl -> ()
| _ ->
warn lid.loc
(Warnings.Disambiguated_name (get_name lbl))
let force_error : ('a, _) result -> 'a = function
| Ok lbls -> lbls
| Error (loc', env', err) ->
Env.lookup_error loc' env' err
type candidate = t * (unit -> unit)
type nonempty_candidate_filter =
candidate list -> (candidate list, candidate list) result
(** This type is used for candidate filtering functions.
Filtering typically proceeds in several passes, filtering
candidates through increasingly precise conditions.
We assume that the input list is non-empty, and the output is one of
- [Ok result] for a non-empty list [result] of valid candidates
- [Error candidates] with there are no valid candidates,
and [candidates] is a non-empty subset of the input, typically
the result of the last non-empty filtering step.
*)
(** [disambiguate] selects a concrete description for [lid] using
some contextual information:
- An optional [expected_type].
- A list of candidates labels in the current lexical scope,
[candidates_in_scope], that is actually at the type
[(label_descr list, lookup_error) result] so that the
lookup error is only raised when necessary.
- A filtering criterion on candidates in scope [filter_candidates],
representing extra contextual information that can help
candidate selection (see [disambiguate_label_by_ids]).
*)
let disambiguate
?(warn=Location.prerr_warning)
?(filter : nonempty_candidate_filter = Result.ok)
usage lid env
expected_type
candidates_in_scope =
let lbl = match expected_type with
| None ->
(* no expected type => no disambiguation *)
begin match filter (force_error candidates_in_scope) with
| Ok [] | Error [] -> assert false
| Error((lbl, _use) :: _rest) -> lbl (* will fail later *)
| Ok((lbl, use) :: rest) ->
use ();
warn_if_ambiguous warn lid env lbl rest;
lbl
end
| Some(tpath0, tpath, principal) ->
(* If [expected_type] is available, the candidate selected
will correspond to the type-based resolution.
There are two reasons to still check the lexical scope:
- for warning purposes
- for extension types, the type environment does not contain
a list of constructors, so using only type-based selection
would fail.
*)
(* note that [disambiguate_by_type] does not
force [candidates_in_scope]: we just skip this case if there
are no candidates in scope *)
begin match disambiguate_by_type env tpath candidates_in_scope with
| lbl, use ->
use ();
if not principal then begin
(* Check if non-principal type is affecting result *)
match (candidates_in_scope : _ result) with
| Error _ -> warn_non_principal warn lid
| Ok lbls ->
match filter lbls with
| Error _ -> warn_non_principal warn lid
| Ok [] -> assert false
| Ok ((lbl', _use') :: rest) ->
let lbl_tpath = get_type_path lbl' in
(* no principality warning if the non-principal
type-based selection corresponds to the last
definition in scope *)
if not (compare_type_path env tpath lbl_tpath)
then warn_non_principal warn lid
else warn_if_ambiguous warn lid env lbl rest;
end;
lbl
| exception Not_found ->
(* look outside the lexical scope *)
match lookup_from_type env tpath usage lid with
| lbl ->
(* warn only on nominal labels;
structural labels cannot be qualified anyway *)
if in_env lbl then warn_out_of_scope warn lid env tpath;
if not principal then warn_non_principal warn lid;
lbl
| exception Not_found ->
match filter (force_error candidates_in_scope) with
| Ok lbls | Error lbls ->
let tp = (tpath0, expand_path env tpath) in
let tpl =
List.map
(fun (lbl, _) ->
let tp0 = get_type_path lbl in
let tp = expand_path env tp0 in
(tp0, tp))
lbls
in
raise (Error (lid.loc, env,
Name_type_mismatch (kind, lid.txt, tp, tpl)));
end
in
(* warn only on nominal labels *)
if in_env lbl then
warn_if_disambiguated_name warn lid lbl candidates_in_scope;
lbl
end
let wrap_disambiguate msg ty f x =
try f x with
| Wrong_name_disambiguation (env, wrong_name) ->
raise (Error (wrong_name.name.loc, env, Wrong_name (msg, ty, wrong_name)))
module Label = NameChoice (struct
type t = label_description
type usage = Env.label_usage
let kind = Datatype_kind.Record
let get_name lbl = lbl.lbl_name
let get_type lbl = lbl.lbl_res
let lookup_all_from_type loc usage path env =
Env.lookup_all_labels_from_type ~loc usage path env
let in_env lbl =
match lbl.lbl_repres with
| Record_regular | Record_float | Record_unboxed false -> true
| Record_unboxed true | Record_inlined _ | Record_extension _ -> false
end)
(* In record-construction expressions and patterns, we have many labels
at once; find a candidate type in the intersection of the candidates
of each label. In the [closed] expression case, this candidate must
contain exactly all the labels.
If our successive refinements result in an empty list,
return [Error] with the last non-empty list of candidates
for use in error messages.
*)
let disambiguate_label_by_ids closed ids labels : (_, _) result =
let check_ids (lbl, _) =
let lbls = Hashtbl.create 8 in
Array.iter (fun lbl -> Hashtbl.add lbls lbl.lbl_name ()) lbl.lbl_all;
List.for_all (Hashtbl.mem lbls) ids
and check_closed (lbl, _) =
(not closed || List.length ids = Array.length lbl.lbl_all)
in
match List.filter check_ids labels with
| [] -> Error labels
| labels ->
match List.filter check_closed labels with
| [] -> Error labels
| labels ->
Ok labels
(* Only issue warnings once per record constructor/pattern *)
let disambiguate_lid_a_list loc closed env usage expected_type lid_a_list =
let ids = List.map (fun (lid, _) -> Longident.last lid.txt) lid_a_list in
let w_pr = ref false and w_amb = ref []
and w_scope = ref [] and w_scope_ty = ref "" in
let warn loc msg =
let open Warnings in
match msg with
| Not_principal _ -> w_pr := true
| Ambiguous_name([s], l, _, ex) -> w_amb := (s, l, ex) :: !w_amb
| Name_out_of_scope(ty, [s], _) ->
w_scope := s :: !w_scope; w_scope_ty := ty
| _ -> Location.prerr_warning loc msg
in
let process_label lid =
let scope = Env.lookup_all_labels ~loc:lid.loc usage lid.txt env in
let filter : Label.nonempty_candidate_filter =
disambiguate_label_by_ids closed ids in
Label.disambiguate ~warn ~filter usage lid env expected_type scope in
let lbl_a_list =
List.map (fun (lid,a) -> lid, process_label lid, a) lid_a_list in
if !w_pr then
Location.prerr_warning loc
(Warnings.Not_principal "this type-based record disambiguation")
else begin
match List.rev !w_amb with
(_,types,ex)::_ as amb ->
let paths =
List.map (fun (_,lbl,_) -> Label.get_type_path lbl) lbl_a_list in
let path = List.hd paths in
let fst3 (x,_,_) = x in
if List.for_all (compare_type_path env path) (List.tl paths) then
Location.prerr_warning loc
(Warnings.Ambiguous_name (List.map fst3 amb, types, true, ex))
else
List.iter
(fun (s,l,ex) -> Location.prerr_warning loc
(Warnings.Ambiguous_name ([s],l,false, ex)))
amb
| _ -> ()
end;
if !w_scope <> [] then
Location.prerr_warning loc
(Warnings.Name_out_of_scope (!w_scope_ty, List.rev !w_scope, true));
lbl_a_list
let rec find_record_qual = function
| [] -> None
| ({ txt = Longident.Ldot (modname, _) }, _) :: _ -> Some modname
| _ :: rest -> find_record_qual rest
let map_fold_cont f xs k =
List.fold_right (fun x k ys -> f x (fun y -> k (y :: ys)))
xs (fun ys -> k (List.rev ys)) []
let type_label_a_list
?labels loc closed env usage type_lbl_a expected_type lid_a_list k =
let lbl_a_list =
match lid_a_list, labels with
({txt=Longident.Lident s}, _)::_, Some labels when Hashtbl.mem labels s ->
(* Special case for rebuilt syntax trees *)
List.map
(function lid, a -> match lid.txt with
Longident.Lident s -> lid, Hashtbl.find labels s, a
| _ -> assert false)
lid_a_list
| _ ->
let lid_a_list =
match find_record_qual lid_a_list with
None -> lid_a_list
| Some modname ->
List.map
(fun (lid, a as lid_a) ->
match lid.txt with Longident.Lident s ->
{lid with txt=Longident.Ldot (modname, s)}, a
| _ -> lid_a)
lid_a_list
in
disambiguate_lid_a_list loc closed env usage expected_type lid_a_list
in
(* Invariant: records are sorted in the typed tree *)
let lbl_a_list =
List.sort
(fun (_,lbl1,_) (_,lbl2,_) -> compare lbl1.lbl_pos lbl2.lbl_pos)
lbl_a_list
in
map_fold_cont type_lbl_a lbl_a_list k
(* Checks over the labels mentioned in a record pattern:
no duplicate definitions (error); properly closed (warning) *)
let check_recordpat_labels loc lbl_pat_list closed =
match lbl_pat_list with
| [] -> () (* should not happen *)
| (_, label1, _) :: _ ->
let all = label1.lbl_all in
let defined = Array.make (Array.length all) false in
let check_defined (_, label, _) =
if defined.(label.lbl_pos)
then raise(Error(loc, Env.empty, Label_multiply_defined label.lbl_name))
else defined.(label.lbl_pos) <- true in
List.iter check_defined lbl_pat_list;
if closed = Closed
&& Warnings.is_active (Warnings.Missing_record_field_pattern "")
then begin
let undefined = ref [] in
for i = 0 to Array.length all - 1 do
if not defined.(i) then undefined := all.(i).lbl_name :: !undefined
done;
if !undefined <> [] then begin
let u = String.concat ", " (List.rev !undefined) in
Location.prerr_warning loc (Warnings.Missing_record_field_pattern u)
end
end
(* Constructors *)
module Constructor = NameChoice (struct
type t = constructor_description
type usage = Env.constructor_usage
let kind = Datatype_kind.Variant
let get_name cstr = cstr.cstr_name
let get_type cstr = cstr.cstr_res
let lookup_all_from_type loc usage path env =
match Env.lookup_all_constructors_from_type ~loc usage path env with
| _ :: _ as x -> x
| [] ->
match (Env.find_type path env).type_kind with
| Type_open ->
(* Extension constructors cannot be found by looking at the type
declaration.
We scan the whole environment to get an accurate spellchecking
hint in the subsequent error message *)
let filter lbl =
compare_type_path env
path (get_constr_type_path @@ get_type lbl) in
let add_valid x acc = if filter x then (x,ignore)::acc else acc in
Env.fold_constructors add_valid None env []
| _ -> []
let in_env _ = true
end)
(* Typing of patterns *)
(* "half typed" cases are produced in [type_cases] when we've just typechecked
the pattern but haven't type-checked the body yet.
At this point we might have added some type equalities to the environment,
but haven't yet added identifiers bound by the pattern. *)
type 'case_pattern half_typed_case =
{ typed_pat: 'case_pattern;
pat_type_for_unif: type_expr;
untyped_case: Parsetree.case;
branch_env: Env.t;
pat_vars: pattern_variable list;
unpacks: module_variable list;
contains_gadt: bool; }
let rec has_literal_pattern p = match p.ppat_desc with
| Ppat_constant _
| Ppat_interval _ ->
true
| Ppat_any
| Ppat_variant (_, None)
| Ppat_construct (_, None)
| Ppat_type _
| Ppat_var _
| Ppat_unpack _
| Ppat_extension _ ->
false
| Ppat_exception p
| Ppat_variant (_, Some p)
| Ppat_construct (_, Some (_, p))
| Ppat_constraint (p, _)
| Ppat_alias (p, _)
| Ppat_lazy p
| Ppat_open (_, p) ->
has_literal_pattern p
| Ppat_tuple ps
| Ppat_array ps ->
List.exists has_literal_pattern ps
| Ppat_record (ps, _) ->
List.exists (fun (_,p) -> has_literal_pattern p) ps
| Ppat_or (p, q) ->
has_literal_pattern p || has_literal_pattern q
let check_scope_escape loc env level ty =
try Ctype.check_scope_escape env level ty
with Escape esc ->
(* We don't expand the type here because if we do, we might expand to the
type that escaped, leading to confusing error messages. *)
let trace = Errortrace.[Escape (map_escape trivial_expansion esc)] in
raise (Error(loc,
env,
Pattern_type_clash(Errortrace.unification_error ~trace, None)))
type pattern_checking_mode =
| Normal
(** We are checking user code. *)
| Counter_example of counter_example_checking_info
(** In [Counter_example] mode, we are checking a counter-example
candidate produced by Parmatch. This is a syntactic pattern that
represents a set of values by using or-patterns (p_1 | ... | p_n)
to enumerate all alternatives in the counter-example
search. These or-patterns occur at every choice point, possibly
deep inside the pattern.
Parmatch does not use type information, so this pattern may
exhibit two issues:
- some parts of the pattern may be ill-typed due to GADTs, and
- some wildcard patterns may not match any values: their type is
empty.
The aim of [type_pat] in the [Counter_example] mode is to refine
this syntactic pattern into a well-typed pattern, and ensure
that it matches at least one concrete value.
- It filters ill-typed branches of or-patterns.
(see {!splitting_mode} below)
- It tries to check that wildcard patterns are non-empty.
(see {!explosion_fuel})
*)
and counter_example_checking_info = {
explosion_fuel: int;
splitting_mode: splitting_mode;
constrs: (string, Types.constructor_description) Hashtbl.t;
labels: (string, Types.label_description) Hashtbl.t;
}
(**
[explosion_fuel] controls the checking of wildcard patterns. We
eliminate potentially-empty wildcard patterns by exploding them
into concrete sub-patterns, for example (K1 _ | K2 _) or
{ l1: _; l2: _ }. [explosion_fuel] is the depth limit on wildcard
explosion. Such depth limit is required to avoid non-termination
and compilation-time blowups.
[splitting_mode] controls the handling of or-patterns. In
[Counter_example] mode, we only need to select one branch that
leads to a well-typed pattern. Checking all branches is expensive,
we use different search strategies (see {!splitting_mode}) to
reduce the number of explored alternatives.
[constrs] and [labels] contain metadata produced by [Parmatch] to
type-check the given syntactic pattern. [Parmatch] produces
counter-examples by turning typed patterns into
[Parsetree.pattern]. In this process, constructor and label paths
are lost, and are replaced by generated strings. [constrs] and
[labels] map those synthetic names back to the typed descriptions
of the original names.
*)
(** Due to GADT constraints, an or-pattern produced within
a counter-example may have ill-typed branches. Consider for example
{[
type _ tag = Int : int tag | Bool : bool tag
]}
then [Parmatch] will propose the or-pattern [Int | Bool] whenever
a pattern of type [tag] is required to form a counter-example. For
example, a function expects a (int tag option) and only [None] is
handled by the user-written pattern. [Some (Int | Bool)] is not
well-typed in this context, only the sub-pattern [Some Int] is.
In this example, the expected type coming from the context
suffices to know which or-pattern branch must be chosen.
In the general case, choosing a branch can have non-local effects
on the typability of the term. For example, consider a tuple type
['a tag * ...'a...], where the first component is a GADT. All
constructor choices for this GADT lead to a well-typed branch in
isolation (['a] is unconstrained), but choosing one of them adds
a constraint on ['a] that may make the other tuple elements
ill-typed.
In general, after choosing each possible branch of the or-pattern,
[type_pat] has to check the rest of the pattern to tell if this
choice leads to a well-typed term. This may lead to an explosion
of typing/search work -- the rest of the term may in turn contain
alternatives.
We use careful strategies to try to limit counterexample-checking
time; [splitting_mode] represents those strategies.
*)
and splitting_mode =
| Backtrack_or
(** Always backtrack in or-patterns.
[Backtrack_or] selects a single alternative from an or-pattern
by using backtracking, trying to choose each branch in turn, and
to complete it into a valid sub-pattern. We call this
"splitting" the or-pattern.
We use this mode when looking for unused patterns or sub-patterns,
in particular to check a refutation clause (p -> .).
*)
| Refine_or of { inside_nonsplit_or: bool; }
(** Only backtrack when needed.
[Refine_or] tries another approach for refining or-pattern.
Instead of always splitting each or-pattern, It first attempts to
find branches that do not introduce new constraints (because they
do not contain GADT constructors). Those branches are such that,
if they fail, all other branches will fail.
If we find one such branch, we attempt to complete the subpattern
(checking what's outside the or-pattern), ignoring other
branches -- we never consider another branch choice again. If all
branches are constrained, it falls back to splitting the
or-pattern.
We use this mode when checking exhaustivity of pattern matching.
*)
(** This exception is only used internally within [type_pat_aux], in
counter-example mode, to jump back to the parent or-pattern in the
[Refine_or] strategy.
Such a parent exists precisely when [inside_nonsplit_or = true];
it's an invariant that we always setup an exception handler for
[Need_backtrack] when we set this flag. *)
exception Need_backtrack
(** This exception is only used internally within [type_pat_aux], in
counter-example mode. We use it to discard counter-example candidates
that do not match any value. *)
exception Empty_branch
type abort_reason = Adds_constraints | Empty
(** Remember current typing state for backtracking.
No variable information, as we only backtrack on
patterns without variables (cf. assert statements). *)
type state =
{ snapshot: snapshot;
levels: Ctype.levels;
env: Env.t; }
let save_state env =
{ snapshot = Btype.snapshot ();
levels = Ctype.save_levels ();
env = !env; }
let set_state s env =
Btype.backtrack s.snapshot;
Ctype.set_levels s.levels;
env := s.env
(** Find the first alternative in the tree of or-patterns for which
[f] does not raise an error. If all fail, the last error is
propagated *)
let rec find_valid_alternative f pat =
match pat.ppat_desc with
| Ppat_or(p1,p2) ->
(try find_valid_alternative f p1 with
| Empty_branch | Error _ -> find_valid_alternative f p2
)
| _ -> f pat
let no_explosion = function
| Normal -> Normal
| Counter_example info ->
Counter_example { info with explosion_fuel = 0 }
let get_splitting_mode = function
| Normal -> None
| Counter_example {splitting_mode} -> Some splitting_mode
let enter_nonsplit_or mode = match mode with
| Normal -> Normal
| Counter_example info ->
let splitting_mode = match info.splitting_mode with
| Backtrack_or ->
(* in Backtrack_or mode, or-patterns are always split *)
assert false
| Refine_or _ ->
Refine_or {inside_nonsplit_or = true}
in Counter_example { info with splitting_mode }
(** The typedtree has two distinct syntactic categories for patterns,
"value" patterns, matching on values, and "computation" patterns
that match on the effect of a computation -- typically, exception
patterns (exception p).
On the other hand, the parsetree has an unstructured representation
where all categories of patterns are mixed together. The
decomposition according to the value/computation structure has to
happen during type-checking.
We don't want to duplicate the type-checking logic in two different
functions, depending on the kind of pattern to be produced. In
particular, there are both value and computation or-patterns, and
the type-checking logic for or-patterns is horribly complex; having
it in two different places would be twice as horirble.
The solution is to pass a GADT tag to [type_pat] to indicate whether
a value or computation pattern is expected. This way, there is a single
place where [Ppat_or] nodes are type-checked, the checking logic is shared,
and only at the end do we inspect the tag to decide to produce a value
or computation pattern.
*)
let pure
: type k . k pattern_category -> value general_pattern -> k general_pattern
= fun category pat ->
match category with
| Value -> pat
| Computation -> as_computation_pattern pat
let only_impure
: type k . k pattern_category ->
computation general_pattern -> k general_pattern
= fun category pat ->
match category with
| Value ->
(* LATER: this exception could be renamed/generalized *)
raise (Error (pat.pat_loc, pat.pat_env,
Exception_pattern_disallowed))
| Computation -> pat
let as_comp_pattern
: type k . k pattern_category ->
k general_pattern -> computation general_pattern
= fun category pat ->
match category with
| Value -> as_computation_pattern pat
| Computation -> pat
(* type_pat propagates the expected type.
Unification may update the typing environment.
In counter-example mode, [Empty_branch] is raised when the counter-example
does not match any value. *)
let rec type_pat
: type k r . k pattern_category ->
no_existentials: existential_restriction option ->
mode: pattern_checking_mode -> env: Env.t ref -> Parsetree.pattern ->
type_expr -> (k general_pattern -> r) -> r
= fun category ~no_existentials ~mode
~env sp expected_ty k ->
Builtin_attributes.warning_scope sp.ppat_attributes
(fun () ->
type_pat_aux category ~no_existentials ~mode
~env sp expected_ty k
)
and type_pat_aux
: type k r . k pattern_category -> no_existentials:_ -> mode:_ ->
env:_ -> _ -> _ -> (k general_pattern -> r) -> r
= fun category ~no_existentials ~mode
~env sp expected_ty k ->
let type_pat category ?(mode=mode) ?(env=env) =
type_pat category ~no_existentials ~mode ~env
in
let loc = sp.ppat_loc in
let refine =
match mode with Normal -> None | Counter_example _ -> Some true in
let solve_expected (x : pattern) : pattern =
unify_pat ~refine env x (instance expected_ty);
x
in
let rp x =
let crp (x : k general_pattern) : k general_pattern =
match category with
| Value -> rp x
| Computation -> rcp x in
if mode = Normal then crp x else x in
let rp k x = k (rp x)
and rvp k x = k (rp (pure category x))
and rcp k x = k (rp (only_impure category x)) in
let construction_not_used_in_counterexamples = (mode = Normal) in
let must_backtrack_on_gadt = match get_splitting_mode mode with
| None -> false
| Some Backtrack_or -> false
| Some (Refine_or {inside_nonsplit_or}) -> inside_nonsplit_or
in
match sp.ppat_desc with
Ppat_any ->
let k' d = rvp k {
pat_desc = d;
pat_loc = loc; pat_extra=[];
pat_type = instance expected_ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env }
in
begin match mode with
| Normal -> k' Tpat_any
| Counter_example {explosion_fuel; _} when explosion_fuel <= 0 ->
k' Tpat_any
| Counter_example ({explosion_fuel; _} as info) ->
let open Parmatch in
begin match ppat_of_type !env expected_ty with
| PT_empty -> raise Empty_branch
| PT_any -> k' Tpat_any
| PT_pattern (explosion, sp, constrs, labels) ->
let explosion_fuel =
match explosion with
| PE_single -> explosion_fuel - 1
| PE_gadt_cases ->
if must_backtrack_on_gadt then raise Need_backtrack;
explosion_fuel - 5
in
let mode =
Counter_example { info with explosion_fuel; constrs; labels }
in
type_pat category ~mode sp expected_ty k
end
end
| Ppat_var name ->
let ty = instance expected_ty in
let id = (* PR#7330 *)
if name.txt = "*extension*" then
Ident.create_local name.txt
else
enter_variable loc name ty sp.ppat_attributes
in
rvp k {
pat_desc = Tpat_var (id, name);
pat_loc = loc; pat_extra=[];
pat_type = ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env }
| Ppat_unpack name ->
assert construction_not_used_in_counterexamples;
let t = instance expected_ty in
begin match name.txt with
| None ->
rvp k {
pat_desc = Tpat_any;
pat_loc = sp.ppat_loc;
pat_extra=[Tpat_unpack, name.loc, sp.ppat_attributes];
pat_type = t;
pat_attributes = [];
pat_env = !env }
| Some s ->
let v = { name with txt = s } in
let id = enter_variable loc v t ~is_module:true sp.ppat_attributes in
rvp k {
pat_desc = Tpat_var (id, v);
pat_loc = sp.ppat_loc;
pat_extra=[Tpat_unpack, loc, sp.ppat_attributes];
pat_type = t;
pat_attributes = [];
pat_env = !env }
end
| Ppat_constraint(
{ppat_desc=Ppat_var name; ppat_loc=lloc; ppat_attributes = attrs},
({ptyp_desc=Ptyp_poly _} as sty)) ->
(* explicitly polymorphic type *)
assert construction_not_used_in_counterexamples;
let cty, ty, ty' =
solve_Ppat_poly_constraint ~refine env lloc sty expected_ty in
let id = enter_variable lloc name ty' attrs in
rvp k { pat_desc = Tpat_var (id, name);
pat_loc = lloc;
pat_extra = [Tpat_constraint cty, loc, sp.ppat_attributes];
pat_type = ty;
pat_attributes = [];
pat_env = !env }
| Ppat_alias(sq, name) ->
assert construction_not_used_in_counterexamples;
type_pat Value sq expected_ty (fun q ->
let ty_var = solve_Ppat_alias ~refine env q in
let id =
enter_variable ~is_as_variable:true loc name ty_var sp.ppat_attributes
in
rvp k {
pat_desc = Tpat_alias(q, id, name);
pat_loc = loc; pat_extra=[];
pat_type = q.pat_type;
pat_attributes = sp.ppat_attributes;
pat_env = !env })
| Ppat_constant cst ->
let cst = constant_or_raise !env loc cst in
rvp k @@ solve_expected {
pat_desc = Tpat_constant cst;
pat_loc = loc; pat_extra=[];
pat_type = type_constant cst;
pat_attributes = sp.ppat_attributes;
pat_env = !env }
| Ppat_interval (Pconst_char c1, Pconst_char c2) ->
let open Ast_helper.Pat in
let gloc = {loc with Location.loc_ghost=true} in
let rec loop c1 c2 =
if c1 = c2 then constant ~loc:gloc (Pconst_char c1)
else
or_ ~loc:gloc
(constant ~loc:gloc (Pconst_char c1))
(loop (Char.chr(Char.code c1 + 1)) c2)
in
let p = if c1 <= c2 then loop c1 c2 else loop c2 c1 in
let p = {p with ppat_loc=loc} in
type_pat category ~mode:(no_explosion mode) p expected_ty k
(* TODO: record 'extra' to remember about interval *)
| Ppat_interval _ ->
raise (Error (loc, !env, Invalid_interval))
| Ppat_tuple spl ->
assert (List.length spl >= 2);
let expected_tys = solve_Ppat_tuple ~refine loc env spl expected_ty in
let spl_ann = List.combine spl expected_tys in
map_fold_cont (fun (p,t) -> type_pat Value p t) spl_ann (fun pl ->
rvp k {
pat_desc = Tpat_tuple pl;
pat_loc = loc; pat_extra=[];
pat_type = newty (Ttuple(List.map (fun p -> p.pat_type) pl));
pat_attributes = sp.ppat_attributes;
pat_env = !env })
| Ppat_construct(lid, sarg) ->
let expected_type =
match extract_concrete_variant !env expected_ty with
| Variant_type(p0, p, _) ->
Some (p0, p, is_principal expected_ty)
| Maybe_a_variant_type -> None
| Not_a_variant_type ->
let srt = wrong_kind_sort_of_constructor lid.txt in
let error = Wrong_expected_kind(srt, Pattern, expected_ty) in
raise (Error (loc, !env, error))
in
let constr =
match lid.txt, mode with
| Longident.Lident s, Counter_example {constrs; _} ->
(* assert: cf. {!counter_example_checking_info} documentation *)
assert (Hashtbl.mem constrs s);
Hashtbl.find constrs s
| _ ->
let candidates =
Env.lookup_all_constructors Env.Pattern ~loc:lid.loc lid.txt !env in
wrap_disambiguate "This variant pattern is expected to have"
(mk_expected expected_ty)
(Constructor.disambiguate Env.Pattern lid !env expected_type)
candidates
in
if constr.cstr_generalized && must_backtrack_on_gadt then
raise Need_backtrack;
begin match no_existentials, constr.cstr_existentials with
| None, _ | _, [] -> ()
| Some r, (_ :: _ as exs) ->
let exs = List.map (Ctype.existential_name constr) exs in
let name = constr.cstr_name in
raise (Error (loc, !env, Unexpected_existential (r, name, exs)))
end;
let sarg', existential_styp =
match sarg with
None -> None, None
| Some (vl, {ppat_desc = Ppat_constraint (sp, sty)})
when vl <> [] || constr.cstr_arity > 1 ->
Some sp, Some (vl, sty)
| Some ([], sp) ->
Some sp, None
| Some (_, sp) ->
raise (Error (sp.ppat_loc, !env, Missing_type_constraint))
in
let sargs =
match sarg' with
None -> []
| Some {ppat_desc = Ppat_tuple spl} when
constr.cstr_arity > 1 ||
Builtin_attributes.explicit_arity sp.ppat_attributes
-> spl
| Some({ppat_desc = Ppat_any} as sp) when
constr.cstr_arity = 0 && existential_styp = None
->
Location.prerr_warning sp.ppat_loc
Warnings.Wildcard_arg_to_constant_constr;
[]
| Some({ppat_desc = Ppat_any} as sp) when constr.cstr_arity > 1 ->
replicate_list sp constr.cstr_arity
| Some sp -> [sp] in
if Builtin_attributes.warn_on_literal_pattern constr.cstr_attributes then
begin match List.filter has_literal_pattern sargs with
| sp :: _ ->
Location.prerr_warning sp.ppat_loc Warnings.Fragile_literal_pattern
| _ -> ()
end;
if List.length sargs <> constr.cstr_arity then
raise(Error(loc, !env, Constructor_arity_mismatch(lid.txt,
constr.cstr_arity, List.length sargs)));
let (ty_args, existential_ctyp) =
solve_Ppat_construct ~refine env loc constr no_existentials
existential_styp expected_ty
in
let rec check_non_escaping p =
match p.ppat_desc with
| Ppat_or (p1, p2) ->
check_non_escaping p1;
check_non_escaping p2
| Ppat_alias (p, _) ->
check_non_escaping p
| Ppat_constraint _ ->
raise (Error (p.ppat_loc, !env, Inlined_record_escape))
| _ ->
()
in
if constr.cstr_inlined <> None then begin
List.iter check_non_escaping sargs;
Option.iter (fun (_, sarg) -> check_non_escaping sarg) sarg
end;
map_fold_cont
(fun (p,t) -> type_pat Value p t)
(List.combine sargs ty_args)
(fun args ->
rvp k {
pat_desc=Tpat_construct(lid, constr, args, existential_ctyp);
pat_loc = loc; pat_extra=[];
pat_type = instance expected_ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env })
| Ppat_variant(tag, sarg) ->
if tag = Parmatch.some_private_tag then
assert (match mode with Normal -> false | Counter_example _ -> true);
let constant = (sarg = None) in
let arg_type, row, pat_type =
solve_Ppat_variant ~refine loc env tag constant expected_ty in
let k arg =
rvp k {
pat_desc = Tpat_variant(tag, arg, ref row);
pat_loc = loc; pat_extra = [];
pat_type = pat_type;
pat_attributes = sp.ppat_attributes;
pat_env = !env }
in begin
(* PR#6235: propagate type information *)
match sarg, arg_type with
Some p, [ty] -> type_pat Value p ty (fun p -> k (Some p))
| _ -> k None
end
| Ppat_record(lid_sp_list, closed) ->
assert (lid_sp_list <> []);
let expected_type, record_ty =
match extract_concrete_record !env expected_ty with
| Record_type(p0, p, _) ->
let ty = generic_instance expected_ty in
Some (p0, p, is_principal expected_ty), ty
| Maybe_a_record_type -> None, newvar ()
| Not_a_record_type ->
let error = Wrong_expected_kind(Record, Pattern, expected_ty) in
raise (Error (loc, !env, error))
in
let type_label_pat (label_lid, label, sarg) k =
let ty_arg =
solve_Ppat_record_field ~refine loc env label label_lid record_ty in
type_pat Value sarg ty_arg (fun arg ->
k (label_lid, label, arg))
in
let make_record_pat lbl_pat_list =
check_recordpat_labels loc lbl_pat_list closed;
{
pat_desc = Tpat_record (lbl_pat_list, closed);
pat_loc = loc; pat_extra=[];
pat_type = instance record_ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env;
}
in
let k' pat = rvp k @@ solve_expected pat in
begin match mode with
| Normal ->
k' (wrap_disambiguate "This record pattern is expected to have"
(mk_expected expected_ty)
(type_label_a_list loc false !env Env.Projection
type_label_pat expected_type lid_sp_list)
make_record_pat)
| Counter_example {labels; _} ->
type_label_a_list ~labels loc false !env Env.Projection
type_label_pat expected_type lid_sp_list
(fun lbl_pat_list -> k' (make_record_pat lbl_pat_list))
end
| Ppat_array spl ->
let ty_elt = solve_Ppat_array ~refine loc env expected_ty in
map_fold_cont (fun p -> type_pat Value p ty_elt) spl (fun pl ->
rvp k {
pat_desc = Tpat_array pl;
pat_loc = loc; pat_extra=[];
pat_type = instance expected_ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env })
| Ppat_or(sp1, sp2) ->
begin match mode with
| Normal ->
let initial_pattern_variables = !pattern_variables in
let initial_module_variables = !module_variables in
let equation_level = !gadt_equations_level in
let outter_lev = get_current_level () in
(* introduce a new scope *)
begin_def ();
let lev = get_current_level () in
gadt_equations_level := Some lev;
let type_pat_rec env sp =
type_pat category sp expected_ty ~env (fun x -> x) in
let env1 = ref !env in
let p1 = type_pat_rec env1 sp1 in
let p1_variables = !pattern_variables in
let p1_module_variables = !module_variables in
pattern_variables := initial_pattern_variables;
module_variables := initial_module_variables;
let env2 = ref !env in
let p2 = type_pat_rec env2 sp2 in
end_def ();
gadt_equations_level := equation_level;
let p2_variables = !pattern_variables in
(* Make sure no variable with an ambiguous type gets added to the
environment. *)
List.iter (fun { pv_type; pv_loc; _ } ->
check_scope_escape pv_loc !env1 outter_lev pv_type
) p1_variables;
List.iter (fun { pv_type; pv_loc; _ } ->
check_scope_escape pv_loc !env2 outter_lev pv_type
) p2_variables;
let alpha_env =
enter_orpat_variables loc !env p1_variables p2_variables in
let p2 = alpha_pat alpha_env p2 in
pattern_variables := p1_variables;
module_variables := p1_module_variables;
rp k { pat_desc = Tpat_or (p1, p2, None);
pat_loc = loc; pat_extra = [];
pat_type = instance expected_ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env }
| Counter_example {splitting_mode; _} ->
(* We are in counter-example mode, but try to avoid backtracking *)
let must_split =
match splitting_mode with
| Backtrack_or -> true
| Refine_or _ -> false in
let state = save_state env in
let split_or sp =
let typ pat = type_pat category pat expected_ty k in
find_valid_alternative (fun pat -> set_state state env; typ pat) sp
in
if must_split then split_or sp else
let type_pat_result env sp : (_, abort_reason) result =
let mode = enter_nonsplit_or mode in
match type_pat category ~mode sp expected_ty ~env (fun x -> x) with
| res -> Ok res
| exception Need_backtrack -> Error Adds_constraints
| exception Empty_branch -> Error Empty
in
let p1 = type_pat_result (ref !env) sp1 in
let p2 = type_pat_result (ref !env) sp2 in
match p1, p2 with
| Error Empty, Error Empty ->
raise Empty_branch
| Error Adds_constraints, Error _
| Error _, Error Adds_constraints ->
let inside_nonsplit_or =
match splitting_mode with
| Backtrack_or -> false
| Refine_or {inside_nonsplit_or} -> inside_nonsplit_or in
if inside_nonsplit_or
then raise Need_backtrack
else split_or sp
| Ok p, Error _
| Error _, Ok p ->
rp k p
| Ok p1, Ok p2 ->
rp k { pat_desc = Tpat_or (p1, p2, None);
pat_loc = loc; pat_extra = [];
pat_type = instance expected_ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env }
end
| Ppat_lazy sp1 ->
let nv = solve_Ppat_lazy ~refine loc env expected_ty in
(* do not explode under lazy: PR#7421 *)
type_pat Value ~mode:(no_explosion mode) sp1 nv (fun p1 ->
rvp k {
pat_desc = Tpat_lazy p1;
pat_loc = loc; pat_extra=[];
pat_type = instance expected_ty;
pat_attributes = sp.ppat_attributes;
pat_env = !env })
| Ppat_constraint(sp, sty) ->
assert construction_not_used_in_counterexamples;
(* Pretend separate = true *)
let cty, ty, expected_ty' =
solve_Ppat_constraint ~refine loc env sty expected_ty in
type_pat category sp expected_ty' (fun p ->
(*Format.printf "%a@.%a@."
Printtyp.raw_type_expr ty
Printtyp.raw_type_expr p.pat_type;*)
let extra = (Tpat_constraint cty, loc, sp.ppat_attributes) in
let p : k general_pattern =
match category, (p : k general_pattern) with
| Value, {pat_desc = Tpat_var (id,s); _} ->
{p with
pat_type = ty;
pat_desc =
Tpat_alias
({p with pat_desc = Tpat_any; pat_attributes = []}, id,s);
pat_extra = [extra];
}
| _, p ->
{ p with pat_type = ty; pat_extra = extra::p.pat_extra }
in k p)
| Ppat_type lid ->
assert construction_not_used_in_counterexamples;
let (path, p) = build_or_pat !env loc lid in
k @@ pure category @@ solve_expected
{ p with pat_extra = (Tpat_type (path, lid), loc, sp.ppat_attributes)
:: p.pat_extra }
| Ppat_open (lid,p) ->
assert construction_not_used_in_counterexamples;
let path, new_env =
!type_open Asttypes.Fresh !env sp.ppat_loc lid in
env := new_env;
type_pat category ~env p expected_ty ( fun p ->
let new_env = !env in
begin match Env.remove_last_open path new_env with
| None -> assert false
| Some closed_env -> env := closed_env
end;
k { p with pat_extra = (Tpat_open (path,lid,new_env),
loc, sp.ppat_attributes) :: p.pat_extra }
)
| Ppat_exception p ->
type_pat Value p Predef.type_exn (fun p_exn ->
rcp k {
pat_desc = Tpat_exception p_exn;
pat_loc = sp.ppat_loc;
pat_extra = [];
pat_type = expected_ty;
pat_env = !env;
pat_attributes = sp.ppat_attributes;
})
| Ppat_extension ext ->
raise (Error_forward (Builtin_attributes.error_of_extension ext))
let type_pat category ?no_existentials ?(mode=Normal)
?(lev=get_current_level()) env sp expected_ty =
Misc.protect_refs [Misc.R (gadt_equations_level, Some lev)] (fun () ->
type_pat category ~no_existentials ~mode
~env sp expected_ty (fun x -> x)
)
(* this function is passed to Partial.parmatch
to type check gadt nonexhaustiveness *)
let partial_pred ~lev ~splitting_mode ?(explode=0)
env expected_ty constrs labels p =
let env = ref env in
let state = save_state env in
let mode =
Counter_example {
splitting_mode;
explosion_fuel = explode;
constrs; labels;
} in
try
reset_pattern true;
let typed_p = type_pat Value ~lev ~mode env p expected_ty in
set_state state env;
(* types are invalidated but we don't need them here *)
Some typed_p
with Error _ | Empty_branch ->
set_state state env;
None
let check_partial ?(lev=get_current_level ()) env expected_ty loc cases =
let explode = match cases with [_] -> 5 | _ -> 0 in
let splitting_mode = Refine_or {inside_nonsplit_or = false} in
Parmatch.check_partial
(partial_pred ~lev ~splitting_mode ~explode env expected_ty) loc cases
let check_unused ?(lev=get_current_level ()) env expected_ty cases =
Parmatch.check_unused
(fun refute constrs labels spat ->
match
partial_pred ~lev ~splitting_mode:Backtrack_or ~explode:5
env expected_ty constrs labels spat
with
Some pat when refute ->
raise (Error (spat.ppat_loc, env, Unrefuted_pattern pat))
| r -> r)
cases
let iter_pattern_variables_type f : pattern_variable list -> unit =
List.iter (fun {pv_type; _} -> f pv_type)
let add_pattern_variables ?check ?check_as env pv =
List.fold_right
(fun {pv_id; pv_type; pv_loc; pv_as_var; pv_attributes} env ->
let check = if pv_as_var then check_as else check in
Env.add_value ?check pv_id
{val_type = pv_type; val_kind = Val_reg; Types.val_loc = pv_loc;
val_attributes = pv_attributes;
val_uid = Uid.mk ~current_unit:(Env.get_unit_name ());
} env
)
pv env
let type_pattern category ~lev env spat expected_ty =
reset_pattern true;
let new_env = ref env in
let pat = type_pat category ~lev new_env spat expected_ty in
let pvs = get_ref pattern_variables in
let unpacks = get_ref module_variables in
(pat, !new_env, get_ref pattern_force, pvs, unpacks)
let type_pattern_list
category no_existentials env spatl expected_tys allow
=
reset_pattern allow;
let new_env = ref env in
let type_pat (attrs, pat) ty =
Builtin_attributes.warning_scope ~ppwarning:false attrs
(fun () ->
type_pat category ~no_existentials new_env pat ty
)
in
let patl = List.map2 type_pat spatl expected_tys in
let pvs = get_ref pattern_variables in
let unpacks =
List.map (fun (name, loc) ->
{tu_name = name; tu_loc = loc;
tu_uid = Uid.mk ~current_unit:(Env.get_unit_name ())}
) (get_ref module_variables)
in
let new_env = add_pattern_variables !new_env pvs in
(patl, new_env, get_ref pattern_force, pvs, unpacks)
let type_class_arg_pattern cl_num val_env met_env l spat =
reset_pattern false;
let nv = newvar () in
let pat =
type_pat Value ~no_existentials:In_class_args (ref val_env) spat nv in
if has_variants pat then begin
Parmatch.pressure_variants val_env [pat];
finalize_variants pat;
end;
List.iter (fun f -> f()) (get_ref pattern_force);
if is_optional l then unify_pat (ref val_env) pat (type_option (newvar ()));
let (pv, val_env, met_env) =
List.fold_right
(fun {pv_id; pv_type; pv_loc; pv_as_var; pv_attributes}
(pv, val_env, met_env) ->
let check s =
if pv_as_var then Warnings.Unused_var s
else Warnings.Unused_var_strict s in
let id' = Ident.rename pv_id in
let val_uid = Uid.mk ~current_unit:(Env.get_unit_name ()) in
let val_env =
Env.add_value pv_id
{ val_type = pv_type
; val_kind = Val_reg
; val_attributes = pv_attributes
; val_loc = pv_loc
; val_uid
}
val_env
in
let met_env =
Env.add_value id' ~check
{ val_type = pv_type
; val_kind = Val_ivar (Immutable, cl_num)
; val_attributes = pv_attributes
; val_loc = pv_loc
; val_uid
}
met_env
in
((id', pv_id, pv_type)::pv, val_env, met_env))
!pattern_variables ([], val_env, met_env)
in
(pat, pv, val_env, met_env)
let type_self_pattern env spat =
let open Ast_helper in
let spat = Pat.mk(Ppat_alias (spat, mknoloc "selfpat-*")) in
reset_pattern false;
let nv = newvar() in
let pat =
type_pat Value ~no_existentials:In_self_pattern (ref env) spat nv in
List.iter (fun f -> f()) (get_ref pattern_force);
let pv = !pattern_variables in
pattern_variables := [];
pat, pv
let delayed_checks = ref []
let reset_delayed_checks () = delayed_checks := []
let add_delayed_check f =
delayed_checks := (f, Warnings.backup ()) :: !delayed_checks
let force_delayed_checks () =
(* checks may change type levels *)
let snap = Btype.snapshot () in
let w_old = Warnings.backup () in
List.iter
(fun (f, w) -> Warnings.restore w; f ())
(List.rev !delayed_checks);
Warnings.restore w_old;
reset_delayed_checks ();
Btype.backtrack snap
let rec final_subexpression exp =
match exp.exp_desc with
Texp_let (_, _, e)
| Texp_sequence (_, e)
| Texp_try (e, _)
| Texp_ifthenelse (_, e, _)
| Texp_match (_, {c_rhs=e} :: _, _)
| Texp_letmodule (_, _, _, _, e)
| Texp_letexception (_, e)
| Texp_open (_, e)
-> final_subexpression e
| _ -> exp
(* Generalization criterion for expressions *)
let rec is_nonexpansive exp =
match exp.exp_desc with
| Texp_ident _
| Texp_constant _
| Texp_unreachable
| Texp_function _
| Texp_array [] -> true
| Texp_let(_rec_flag, pat_exp_list, body) ->
List.for_all (fun vb -> is_nonexpansive vb.vb_expr) pat_exp_list &&
is_nonexpansive body
| Texp_apply(e, (_,None)::el) ->
is_nonexpansive e && List.for_all is_nonexpansive_opt (List.map snd el)
| Texp_match(e, cases, _) ->
(* Not sure this is necessary, if [e] is nonexpansive then we shouldn't
care if there are exception patterns. But the previous version enforced
that there be none, so... *)
let contains_exception_pat pat =
exists_general_pattern { f = fun (type k) (p : k general_pattern) ->
match p.pat_desc with
| Tpat_exception _ -> true
| _ -> false } pat
in
is_nonexpansive e &&
List.for_all
(fun {c_lhs; c_guard; c_rhs} ->
is_nonexpansive_opt c_guard && is_nonexpansive c_rhs
&& not (contains_exception_pat c_lhs)
) cases
| Texp_tuple el ->
List.for_all is_nonexpansive el
| Texp_construct( _, _, el) ->
List.for_all is_nonexpansive el
| Texp_variant(_, arg) -> is_nonexpansive_opt arg
| Texp_record { fields; extended_expression } ->
Array.for_all
(fun (lbl, definition) ->
match definition with
| Overridden (_, exp) ->
lbl.lbl_mut = Immutable && is_nonexpansive exp
| Kept _ -> true)
fields
&& is_nonexpansive_opt extended_expression
| Texp_field(exp, _, _) -> is_nonexpansive exp
| Texp_ifthenelse(_cond, ifso, ifnot) ->
is_nonexpansive ifso && is_nonexpansive_opt ifnot
| Texp_sequence (_e1, e2) -> is_nonexpansive e2 (* PR#4354 *)
| Texp_new (_, _, cl_decl) -> Btype.class_type_arity cl_decl.cty_type > 0
(* Note: nonexpansive only means no _observable_ side effects *)
| Texp_lazy e -> is_nonexpansive e
| Texp_object ({cstr_fields=fields; cstr_type = { csig_vars=vars}}, _) ->
let count = ref 0 in
List.for_all
(fun field -> match field.cf_desc with
Tcf_method _ -> true
| Tcf_val (_, _, _, Tcfk_concrete (_, e), _) ->
incr count; is_nonexpansive e
| Tcf_val (_, _, _, Tcfk_virtual _, _) ->
incr count; true
| Tcf_initializer e -> is_nonexpansive e
| Tcf_constraint _ -> true
| Tcf_inherit _ -> false
| Tcf_attribute _ -> true)
fields &&
Vars.fold (fun _ (mut,_,_) b -> decr count; b && mut = Immutable)
vars true &&
!count = 0
| Texp_letmodule (_, _, _, mexp, e)
| Texp_open ({ open_expr = mexp; _}, e) ->
is_nonexpansive_mod mexp && is_nonexpansive e
| Texp_pack mexp ->
is_nonexpansive_mod mexp
(* Computations which raise exceptions are nonexpansive, since (raise e) is
equivalent to (raise e; diverge), and a nonexpansive "diverge" can be
produced using lazy values or the relaxed value restriction.
See GPR#1142 *)
| Texp_assert exp ->
is_nonexpansive exp
| Texp_apply (
{ exp_desc = Texp_ident (_, _, {val_kind =
Val_prim {Primitive.prim_name =
("%raise" | "%reraise" | "%raise_notrace")}}) },
[Nolabel, Some e]) ->
is_nonexpansive e
| Texp_array (_ :: _)
| Texp_apply _
| Texp_try _
| Texp_setfield _
| Texp_while _
| Texp_for _
| Texp_send _
| Texp_instvar _
| Texp_setinstvar _
| Texp_override _
| Texp_letexception _
| Texp_letop _
| Texp_extension_constructor _ ->
false
and is_nonexpansive_mod mexp =
match mexp.mod_desc with
| Tmod_ident _
| Tmod_functor _ -> true
| Tmod_unpack (e, _) -> is_nonexpansive e
| Tmod_constraint (m, _, _, _) -> is_nonexpansive_mod m
| Tmod_structure str ->
List.for_all
(fun item -> match item.str_desc with
| Tstr_eval _ | Tstr_primitive _ | Tstr_type _
| Tstr_modtype _ | Tstr_class_type _ -> true
| Tstr_value (_, pat_exp_list) ->
List.for_all (fun vb -> is_nonexpansive vb.vb_expr) pat_exp_list
| Tstr_module {mb_expr=m;_}
| Tstr_open {open_expr=m;_}
| Tstr_include {incl_mod=m;_} -> is_nonexpansive_mod m
| Tstr_recmodule id_mod_list ->
List.for_all (fun {mb_expr=m;_} -> is_nonexpansive_mod m)
id_mod_list
| Tstr_exception {tyexn_constructor = {ext_kind = Text_decl _}} ->
false (* true would be unsound *)
| Tstr_exception {tyexn_constructor = {ext_kind = Text_rebind _}} ->
true
| Tstr_typext te ->
List.for_all
(function {ext_kind = Text_decl _} -> false
| {ext_kind = Text_rebind _} -> true)
te.tyext_constructors
| Tstr_class _ -> false (* could be more precise *)
| Tstr_attribute _ -> true
)
str.str_items
| Tmod_apply _ -> false
and is_nonexpansive_opt = function
| None -> true
| Some e -> is_nonexpansive e
let maybe_expansive e = not (is_nonexpansive e)
let check_recursive_bindings env valbinds =
let ids = let_bound_idents valbinds in
List.iter
(fun {vb_expr} ->
if not (Rec_check.is_valid_recursive_expression ids vb_expr) then
raise(Error(vb_expr.exp_loc, env, Illegal_letrec_expr))
)
valbinds
let check_recursive_class_bindings env ids exprs =
List.iter
(fun expr ->
if not (Rec_check.is_valid_class_expr ids expr) then
raise(Error(expr.cl_loc, env, Illegal_class_expr)))
exprs
let is_prim ~name funct =
match funct.exp_desc with
| Texp_ident (_, _, {val_kind=Val_prim{Primitive.prim_name; _}}) ->
prim_name = name
| _ -> false
(* Approximate the type of an expression, for better recursion *)
let rec approx_type env sty =
match sty.ptyp_desc with
Ptyp_arrow (p, _, sty) ->
let ty1 = if is_optional p then type_option (newvar ()) else newvar () in
newty (Tarrow (p, ty1, approx_type env sty, commu_ok))
| Ptyp_tuple args ->
newty (Ttuple (List.map (approx_type env) args))
| Ptyp_constr (lid, ctl) ->
let path, decl = Env.lookup_type ~use:false ~loc:lid.loc lid.txt env in
if List.length ctl <> decl.type_arity then newvar ()
else begin
let tyl = List.map (approx_type env) ctl in
newconstr path tyl
end
| Ptyp_poly (_, sty) ->
approx_type env sty
| _ -> newvar ()
let rec type_approx env sexp =
match sexp.pexp_desc with
Pexp_let (_, _, e) -> type_approx env e
| Pexp_fun (p, _, _, e) ->
let ty = if is_optional p then type_option (newvar ()) else newvar () in
newty (Tarrow(p, ty, type_approx env e, commu_ok))
| Pexp_function ({pc_rhs=e}::_) ->
newty (Tarrow(Nolabel, newvar (), type_approx env e, commu_ok))
| Pexp_match (_, {pc_rhs=e}::_) -> type_approx env e
| Pexp_try (e, _) -> type_approx env e
| Pexp_tuple l -> newty (Ttuple(List.map (type_approx env) l))
| Pexp_ifthenelse (_,e,_) -> type_approx env e
| Pexp_sequence (_,e) -> type_approx env e
| Pexp_constraint (e, sty) ->
let ty = type_approx env e in
let ty1 = approx_type env sty in
begin try unify env ty ty1 with Unify err ->
raise(Error(sexp.pexp_loc, env, Expr_type_clash (err, None, None)))
end;
ty1
| Pexp_coerce (e, sty1, sty2) ->
let approx_ty_opt = function
| None -> newvar ()
| Some sty -> approx_type env sty
in
let ty = type_approx env e
and ty1 = approx_ty_opt sty1
and ty2 = approx_type env sty2 in
begin try unify env ty ty1 with Unify err ->
raise(Error(sexp.pexp_loc, env, Expr_type_clash (err, None, None)))
end;
ty2
| _ -> newvar ()
(* List labels in a function type, and whether return type is a variable *)
let rec list_labels_aux env visited ls ty_fun =
let ty = expand_head env ty_fun in
if TypeSet.mem ty visited then
List.rev ls, false
else match get_desc ty with
Tarrow (l, _, ty_res, _) ->
list_labels_aux env (TypeSet.add ty visited) (l::ls) ty_res
| _ ->
List.rev ls, is_Tvar ty
let list_labels env ty =
wrap_trace_gadt_instances env (list_labels_aux env TypeSet.empty []) ty
(* Check that all univars are safe in a type. Both exp.exp_type and
ty_expected should already be generalized. *)
let check_univars env kind exp ty_expected vars =
let pty = instance ty_expected in
begin_def ();
let exp_ty, vars =
match get_desc pty with
Tpoly (body, tl) ->
(* Enforce scoping for type_let:
since body is not generic, instance_poly only makes
copies of nodes that have a Tvar as descendant *)
let _, ty' = instance_poly true tl body in
let vars, exp_ty = instance_parameterized_type vars exp.exp_type in
unify_exp_types exp.exp_loc env exp_ty ty';
exp_ty, vars
| _ -> assert false
in
end_def ();
generalize exp_ty;
List.iter generalize vars;
let ty, complete = polyfy env exp_ty vars in
if not complete then
let ty_expected = instance ty_expected in
raise (Error(exp.exp_loc,
env,
Less_general(kind,
Errortrace.unification_error
~trace:[Ctype.expanded_diff env
~got:ty ~expected:ty_expected])))
let generalize_and_check_univars env kind exp ty_expected vars =
generalize exp.exp_type;
generalize ty_expected;
List.iter generalize vars;
check_univars env kind exp ty_expected vars
(* [check_statement] implements the [non-unit-statement] check.
This check is called in contexts where the value of the expression is known
to be discarded (eg. the lhs of a sequence). We check that [exp] has type
unit, or has an explicit type annotation; otherwise we raise the
[non-unit-statement] warning. *)
let check_statement exp =
let ty = get_desc (expand_head exp.exp_env exp.exp_type) in
match ty with
| Tconstr (p, _, _) when Path.same p Predef.path_unit -> ()
| Tvar _ -> ()
| _ ->
let rec loop {exp_loc; exp_desc; exp_extra; _} =
match exp_desc with
| Texp_let (_, _, e)
| Texp_sequence (_, e)
| Texp_letexception (_, e)
| Texp_letmodule (_, _, _, _, e) ->
loop e
| _ ->
let loc =
match List.find_opt (function
| (Texp_constraint _, _, _) -> true
| _ -> false) exp_extra
with
| Some (_, loc, _) -> loc
| None -> exp_loc
in
Location.prerr_warning loc Warnings.Non_unit_statement
in
loop exp
(* [check_partial_application] implements the [ignored-partial-application]
warning (and if [statement] is [true], also [non-unit-statement]).
If [exp] has a function type, we check that it is not syntactically the
result of a function application, as this is often a bug in certain contexts
(eg the rhs of a let-binding or in the argument of [ignore]). For example,
[ignore (List.map print_int)] written by mistake instead of [ignore (List.map
print_int li)].
The check can be disabled by explicitly annotating the expression with a type
constraint, eg [(e : _ -> _)].
If [statement] is [true] and the [ignored-partial-application] is {em not}
triggered, then the [non-unit-statement] check is performed (see
[check_statement]).
If the type of [exp] is not known at the time this function is called, the
check is retried again after typechecking. *)
let check_partial_application ~statement exp =
let check_statement () = if statement then check_statement exp in
let doit () =
let ty = get_desc (expand_head exp.exp_env exp.exp_type) in
match ty with
| Tarrow _ ->
let rec check {exp_desc; exp_loc; exp_extra; _} =
if List.exists (function
| (Texp_constraint _, _, _) -> true
| _ -> false) exp_extra then check_statement ()
else begin
match exp_desc with
| Texp_ident _ | Texp_constant _ | Texp_tuple _
| Texp_construct _ | Texp_variant _ | Texp_record _
| Texp_field _ | Texp_setfield _ | Texp_array _
| Texp_while _ | Texp_for _ | Texp_instvar _
| Texp_setinstvar _ | Texp_override _ | Texp_assert _
| Texp_lazy _ | Texp_object _ | Texp_pack _ | Texp_unreachable
| Texp_extension_constructor _ | Texp_ifthenelse (_, _, None)
| Texp_function _ ->
check_statement ()
| Texp_match (_, cases, _) ->
List.iter (fun {c_rhs; _} -> check c_rhs) cases
| Texp_try (e, cases) ->
check e; List.iter (fun {c_rhs; _} -> check c_rhs) cases
| Texp_ifthenelse (_, e1, Some e2) ->
check e1; check e2
| Texp_let (_, _, e) | Texp_sequence (_, e) | Texp_open (_, e)
| Texp_letexception (_, e) | Texp_letmodule (_, _, _, _, e) ->
check e
| Texp_apply _ | Texp_send _ | Texp_new _ | Texp_letop _ ->
Location.prerr_warning exp_loc
Warnings.Ignored_partial_application
end
in
check exp
| _ ->
check_statement ()
in
let ty = get_desc (expand_head exp.exp_env exp.exp_type) in
match ty with
| Tvar _ ->
(* The type of [exp] is not known. Delay the check until after
typechecking in order to give a chance for the type to become known
through unification. *)
add_delayed_check doit
| _ ->
doit ()
(* Check that a type is generalizable at some level *)
let generalizable level ty =
let rec check ty =
if not_marked_node ty then
if get_level ty <= level then raise Exit else
(flip_mark_node ty; iter_type_expr check ty)
in
try check ty; unmark_type ty; true
with Exit -> unmark_type ty; false
(* Hack to allow coercion of self. Will clean-up later. *)
let self_coercion = ref ([] : (Path.t * Location.t list ref) list)
(* Helpers for type_cases *)
let contains_variant_either ty =
let rec loop ty =
if try_mark_node ty then
begin match get_desc ty with
Tvariant row ->
if not (is_fixed row) then
List.iter
(fun (_,f) ->
match row_field_repr f with Reither _ -> raise Exit | _ -> ())
(row_fields row);
iter_row loop row
| _ ->
iter_type_expr loop ty
end
in
try loop ty; unmark_type ty; false
with Exit -> unmark_type ty; true
let shallow_iter_ppat f p =
match p.ppat_desc with
| Ppat_any | Ppat_var _ | Ppat_constant _ | Ppat_interval _
| Ppat_construct (_, None)
| Ppat_extension _
| Ppat_type _ | Ppat_unpack _ -> ()
| Ppat_array pats -> List.iter f pats
| Ppat_or (p1,p2) -> f p1; f p2
| Ppat_variant (_, arg) -> Option.iter f arg
| Ppat_tuple lst -> List.iter f lst
| Ppat_construct (_, Some (_, p))
| Ppat_exception p | Ppat_alias (p,_)
| Ppat_open (_,p)
| Ppat_constraint (p,_) | Ppat_lazy p -> f p
| Ppat_record (args, _flag) -> List.iter (fun (_,p) -> f p) args
let exists_ppat f p =
let exception Found in
let rec loop p =
if f p then raise Found else ();
shallow_iter_ppat loop p in
match loop p with
| exception Found -> true
| () -> false
let contains_polymorphic_variant p =
exists_ppat
(function
| {ppat_desc = (Ppat_variant _ | Ppat_type _)} -> true
| _ -> false)
p
let contains_gadt p =
exists_general_pattern { f = fun (type k) (p : k general_pattern) ->
match p.pat_desc with
| Tpat_construct (_, cd, _, _) when cd.cstr_generalized -> true
| _ -> false } p
(* There are various things that we need to do in presence of GADT constructors
that aren't required if there are none.
However, because of disambiguation, we can't know for sure whether the
patterns contain some GADT constructors. So we conservatively assume that
any constructor might be a GADT constructor. *)
let may_contain_gadts p =
exists_ppat
(function
| {ppat_desc = Ppat_construct _} -> true
| _ -> false)
p
let check_absent_variant env =
iter_general_pattern { f = fun (type k) (pat : k general_pattern) ->
match pat.pat_desc with
| Tpat_variant (s, arg, row) ->
let row = !row in
if List.exists (fun (s',fi) -> s = s' && row_field_repr fi <> Rabsent)
(row_fields row)
|| not (is_fixed row) && not (static_row row) (* same as Ctype.poly *)
then () else
let ty_arg =
match arg with None -> [] | Some p -> [correct_levels p.pat_type] in
let fields = [s, rf_either ty_arg ~no_arg:(arg=None) ~matched:true] in
let row' =
create_row ~fields
~more:(newvar ()) ~closed:false ~fixed:None ~name:None in
(* Should fail *)
unify_pat (ref env) {pat with pat_type = newty (Tvariant row')}
(correct_levels pat.pat_type)
| _ -> () }
(* Getting proper location of already typed expressions.
Used to avoid confusing locations on type error messages in presence of
type constraints.
For example:
(* Before patch *)
# let x : string = (5 : int);;
^
(* After patch *)
# let x : string = (5 : int);;
^^^^^^^^^
*)
let proper_exp_loc exp =
let rec aux = function
| [] -> exp.exp_loc
| ((Texp_constraint _ | Texp_coerce _), loc, _) :: _ -> loc
| _ :: rest -> aux rest
in
aux exp.exp_extra
(* To find reasonable names for let-bound and lambda-bound idents *)
let rec name_pattern default = function
[] -> Ident.create_local default
| p :: rem ->
match p.pat_desc with
Tpat_var (id, _) -> id
| Tpat_alias(_, id, _) -> id
| _ -> name_pattern default rem
let name_cases default lst =
name_pattern default (List.map (fun c -> c.c_lhs) lst)
(* Typing of expressions *)
let unify_exp env exp expected_ty =
let loc = proper_exp_loc exp in
try
unify_exp_types loc env exp.exp_type expected_ty
with Error(loc, env, Expr_type_clash(err, tfc, None)) ->
raise (Error(loc, env, Expr_type_clash(err, tfc, Some exp.exp_desc)))
(* If [is_inferred e] is true, [e] will be typechecked without using
the "expected type" provided by the context. *)
let rec is_inferred sexp =
match sexp.pexp_desc with
| Pexp_ident _ | Pexp_apply _ | Pexp_field _ | Pexp_constraint _
| Pexp_coerce _ | Pexp_send _ | Pexp_new _ -> true
| Pexp_sequence (_, e) | Pexp_open (_, e) -> is_inferred e
| Pexp_ifthenelse (_, e1, Some e2) -> is_inferred e1 && is_inferred e2
| _ -> false
(* check if the type of %apply or %revapply matches the type expected by
the specialized typing rule for those primitives.
*)
type apply_prim =
| Apply
| Revapply
let check_apply_prim_type prim typ =
match get_desc typ with
| Tarrow (Nolabel,a,b,_) ->
begin match get_desc b with
| Tarrow(Nolabel,c,d,_) ->
let f, x, res =
match prim with
| Apply -> a, c, d
| Revapply -> c, a, d
in
begin match get_desc f with
| Tarrow(Nolabel,fl,fr,_) ->
is_Tvar fl && is_Tvar fr && is_Tvar x && is_Tvar res
&& Types.eq_type fl x && Types.eq_type fr res
| _ -> false
end
| _ -> false
end
| _ -> false
(* Merge explanation to type clash error *)
let with_explanation explanation f =
match explanation with
| None -> f ()
| Some explanation ->
try f ()
with Error (loc', env', Expr_type_clash(err', None, exp'))
when not loc'.Location.loc_ghost ->
let err = Expr_type_clash(err', Some explanation, exp') in
raise (Error (loc', env', err))
let rec type_exp ?recarg env sexp =
(* We now delegate everything to type_expect *)
type_expect ?recarg env sexp (mk_expected (newvar ()))
(* Typing of an expression with an expected type.
This provide better error messages, and allows controlled
propagation of return type information.
In the principal case, [type_expected'] may be at generic_level.
*)
and type_expect ?in_function ?recarg env sexp ty_expected_explained =
let previous_saved_types = Cmt_format.get_saved_types () in
let exp =
Builtin_attributes.warning_scope sexp.pexp_attributes
(fun () ->
type_expect_ ?in_function ?recarg env sexp ty_expected_explained
)
in
Cmt_format.set_saved_types
(Cmt_format.Partial_expression exp :: previous_saved_types);
exp
and type_expect_
?in_function ?(recarg=Rejected)
env sexp ty_expected_explained =
let { ty = ty_expected; explanation } = ty_expected_explained in
let loc = sexp.pexp_loc in
(* Record the expression type before unifying it with the expected type *)
let with_explanation = with_explanation explanation in
let rue exp =
with_explanation (fun () ->
unify_exp env (re exp) (instance ty_expected));
exp
in
match sexp.pexp_desc with
| Pexp_ident lid ->
let path, desc = type_ident env ~recarg lid in
let exp_desc =
match desc.val_kind with
| Val_ivar (_, cl_num) ->
let (self_path, _) =
Env.find_value_by_name
(Longident.Lident ("self-" ^ cl_num)) env
in
Texp_instvar(self_path, path,
match lid.txt with
Longident.Lident txt -> { txt; loc = lid.loc }
| _ -> assert false)
| Val_self (_, _, _, cl_num) ->
let (path, _) =
Env.find_value_by_name (Longident.Lident ("self-" ^ cl_num)) env
in
Texp_ident(path, lid, desc)
| _ ->
Texp_ident(path, lid, desc)
in
rue {
exp_desc; exp_loc = loc; exp_extra = [];
exp_type = instance desc.val_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_constant(Pconst_string (str, _, _) as cst) -> (
let cst = constant_or_raise env loc cst in
(* Terrible hack for format strings *)
let ty_exp = expand_head env (protect_expansion env ty_expected) in
let fmt6_path =
Path.(Pdot (Pident (Ident.create_persistent "CamlinternalFormatBasics"),
"format6"))
in
let is_format = match get_desc ty_exp with
| Tconstr(path, _, _) when Path.same path fmt6_path ->
if !Clflags.principal && get_level ty_exp <> generic_level then
Location.prerr_warning loc
(Warnings.Not_principal "this coercion to format6");
true
| _ -> false
in
if is_format then
let format_parsetree =
{ (type_format loc str env) with pexp_loc = sexp.pexp_loc } in
type_expect ?in_function env format_parsetree ty_expected_explained
else
rue {
exp_desc = Texp_constant cst;
exp_loc = loc; exp_extra = [];
exp_type = instance Predef.type_string;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
)
| Pexp_constant cst ->
let cst = constant_or_raise env loc cst in
rue {
exp_desc = Texp_constant cst;
exp_loc = loc; exp_extra = [];
exp_type = type_constant cst;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_let(Nonrecursive,
[{pvb_pat=spat; pvb_expr=sval; pvb_attributes=[]}], sbody)
when may_contain_gadts spat ->
(* TODO: allow non-empty attributes? *)
type_expect ?in_function env
{sexp with
pexp_desc = Pexp_match (sval, [Ast_helper.Exp.case spat sbody])}
ty_expected_explained
| Pexp_let(rec_flag, spat_sexp_list, sbody) ->
let existential_context =
if rec_flag = Recursive then In_rec
else if List.compare_length_with spat_sexp_list 1 > 0 then In_group
else With_attributes in
let (pat_exp_list, new_env, unpacks) =
type_let existential_context env rec_flag spat_sexp_list true in
let body = type_unpacks new_env unpacks sbody ty_expected_explained in
let () =
if rec_flag = Recursive then
check_recursive_bindings env pat_exp_list
in
re {
exp_desc = Texp_let(rec_flag, pat_exp_list, body);
exp_loc = loc; exp_extra = [];
exp_type = body.exp_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_fun (l, Some default, spat, sbody) ->
assert(is_optional l); (* default allowed only with optional argument *)
let open Ast_helper in
let default_loc = default.pexp_loc in
let scases = [
Exp.case
(Pat.construct ~loc:default_loc
(mknoloc (Longident.(Ldot (Lident "*predef*", "Some"))))
(Some ([], Pat.var ~loc:default_loc (mknoloc "*sth*"))))
(Exp.ident ~loc:default_loc (mknoloc (Longident.Lident "*sth*")));
Exp.case
(Pat.construct ~loc:default_loc
(mknoloc (Longident.(Ldot (Lident "*predef*", "None"))))
None)
default;
]
in
let sloc =
{ Location.loc_start = spat.ppat_loc.Location.loc_start;
loc_end = default_loc.Location.loc_end;
loc_ghost = true }
in
let smatch =
Exp.match_ ~loc:sloc
(Exp.ident ~loc (mknoloc (Longident.Lident "*opt*")))
scases
in
let pat = Pat.var ~loc:sloc (mknoloc "*opt*") in
let body =
Exp.let_ ~loc Nonrecursive
~attrs:[Attr.mk (mknoloc "#default") (PStr [])]
[Vb.mk spat smatch] sbody
in
type_function ?in_function loc sexp.pexp_attributes env
ty_expected_explained l [Exp.case pat body]
| Pexp_fun (l, None, spat, sbody) ->
type_function ?in_function loc sexp.pexp_attributes env
ty_expected_explained l [Ast_helper.Exp.case spat sbody]
| Pexp_function caselist ->
type_function ?in_function
loc sexp.pexp_attributes env ty_expected_explained Nolabel caselist
| Pexp_apply(sfunct, sargs) ->
assert (sargs <> []);
let rec lower_args seen ty_fun =
let ty = expand_head env ty_fun in
if TypeSet.mem ty seen then () else
match get_desc ty with
Tarrow (_l, ty_arg, ty_fun, _com) ->
(try unify_var env (newvar()) ty_arg
with Unify _ -> assert false);
lower_args (TypeSet.add ty seen) ty_fun
| _ -> ()
in
let type_sfunct sfunct =
begin_def (); (* one more level for non-returning functions *)
if !Clflags.principal then begin_def ();
let funct = type_exp env sfunct in
if !Clflags.principal then begin
end_def ();
generalize_structure funct.exp_type
end;
let ty = instance funct.exp_type in
end_def ();
wrap_trace_gadt_instances env (lower_args TypeSet.empty) ty;
funct
in
let funct, sargs =
let funct = type_sfunct sfunct in
match funct.exp_desc, sargs with
| Texp_ident (_, _,
{val_kind = Val_prim {prim_name="%revapply"}; val_type}),
[Nolabel, sarg; Nolabel, actual_sfunct]
when is_inferred actual_sfunct
&& check_apply_prim_type Revapply val_type ->
type_sfunct actual_sfunct, [Nolabel, sarg]
| Texp_ident (_, _,
{val_kind = Val_prim {prim_name="%apply"}; val_type}),
[Nolabel, actual_sfunct; Nolabel, sarg]
when check_apply_prim_type Apply val_type ->
type_sfunct actual_sfunct, [Nolabel, sarg]
| _ ->
funct, sargs
in
begin_def ();
let (args, ty_res) = type_application env funct sargs in
end_def ();
unify_var env (newvar()) funct.exp_type;
let exp =
{ exp_desc = Texp_apply(funct, args);
exp_loc = loc; exp_extra = [];
exp_type = ty_res;
exp_attributes = sexp.pexp_attributes;
exp_env = env } in
begin
try rue exp
with Error (_, _, Expr_type_clash _) as err ->
Misc.reraise_preserving_backtrace err (fun () ->
check_partial_application ~statement:false exp)
end
| Pexp_match(sarg, caselist) ->
begin_def ();
let arg = type_exp env sarg in
end_def ();
if maybe_expansive arg then lower_contravariant env arg.exp_type;
generalize arg.exp_type;
let cases, partial =
type_cases Computation env
arg.exp_type ty_expected_explained true loc caselist in
re {
exp_desc = Texp_match(arg, cases, partial);
exp_loc = loc; exp_extra = [];
exp_type = instance ty_expected;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_try(sbody, caselist) ->
let body = type_expect env sbody ty_expected_explained in
let cases, _ =
type_cases Value env
Predef.type_exn ty_expected_explained false loc caselist in
re {
exp_desc = Texp_try(body, cases);
exp_loc = loc; exp_extra = [];
exp_type = body.exp_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_tuple sexpl ->
assert (List.length sexpl >= 2);
let subtypes = List.map (fun _ -> newgenvar ()) sexpl in
let to_unify = newgenty (Ttuple subtypes) in
with_explanation (fun () ->
unify_exp_types loc env to_unify (generic_instance ty_expected));
let expl =
List.map2 (fun body ty -> type_expect env body (mk_expected ty))
sexpl subtypes
in
re {
exp_desc = Texp_tuple expl;
exp_loc = loc; exp_extra = [];
(* Keep sharing *)
exp_type = newty (Ttuple (List.map (fun e -> e.exp_type) expl));
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_construct(lid, sarg) ->
type_construct env loc lid sarg ty_expected_explained sexp.pexp_attributes
| Pexp_variant(l, sarg) ->
(* Keep sharing *)
let ty_expected1 = protect_expansion env ty_expected in
let ty_expected0 = instance ty_expected in
begin try match
sarg, get_desc (expand_head env ty_expected1),
get_desc (expand_head env ty_expected0)
with
| Some sarg, Tvariant row, Tvariant row0 ->
begin match
row_field_repr (get_row_field l row),
row_field_repr (get_row_field l row0)
with
Rpresent (Some ty), Rpresent (Some ty0) ->
let arg = type_argument env sarg ty ty0 in
re { exp_desc = Texp_variant(l, Some arg);
exp_loc = loc; exp_extra = [];
exp_type = ty_expected0;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| _ -> raise Exit
end
| _ -> raise Exit
with Exit ->
let arg = Option.map (type_exp env) sarg in
let arg_type = Option.map (fun arg -> arg.exp_type) arg in
let row =
create_row
~fields: [l, rf_present arg_type]
~more: (newvar ())
~closed: false
~fixed: None
~name: None
in
rue {
exp_desc = Texp_variant(l, arg);
exp_loc = loc; exp_extra = [];
exp_type = newty (Tvariant row);
exp_attributes = sexp.pexp_attributes;
exp_env = env }
end
| Pexp_record(lid_sexp_list, opt_sexp) ->
assert (lid_sexp_list <> []);
let opt_exp =
match opt_sexp with
None -> None
| Some sexp ->
if !Clflags.principal then begin_def ();
let exp = type_exp ~recarg env sexp in
if !Clflags.principal then begin
end_def ();
generalize_structure exp.exp_type
end;
Some exp
in
let ty_record, expected_type =
let expected_opath =
match extract_concrete_record env ty_expected with
| Record_type (p0, p, _) -> Some (p0, p, is_principal ty_expected)
| Maybe_a_record_type -> None
| Not_a_record_type ->
let error =
Wrong_expected_kind(Record, Expression explanation, ty_expected)
in
raise (Error (loc, env, error))
in
let opt_exp_opath =
match opt_exp with
| None -> None
| Some exp ->
match extract_concrete_record env exp.exp_type with
| Record_type (p0, p, _) -> Some (p0, p, is_principal exp.exp_type)
| Maybe_a_record_type -> None
| Not_a_record_type ->
let error = Expr_not_a_record_type exp.exp_type in
raise (Error (exp.exp_loc, env, error))
in
match expected_opath, opt_exp_opath with
| None, None -> newvar (), None
| Some _, None -> ty_expected, expected_opath
| Some(_, _, true), Some _ -> ty_expected, expected_opath
| (None | Some (_, _, false)), Some (_, p', _) ->
let decl = Env.find_type p' env in
begin_def ();
let ty = newconstr p' (instance_list decl.type_params) in
end_def ();
generalize_structure ty;
ty, opt_exp_opath
in
let closed = (opt_sexp = None) in
let lbl_exp_list =
wrap_disambiguate "This record expression is expected to have"
(mk_expected ty_record)
(type_label_a_list loc closed env Env.Construct
(fun e k -> k (type_label_exp true env loc ty_record e))
expected_type lid_sexp_list)
(fun x -> x)
in
with_explanation (fun () ->
unify_exp_types loc env (instance ty_record) (instance ty_expected));
(* type_label_a_list returns a list of labels sorted by lbl_pos *)
(* note: check_duplicates would better be implemented in
type_label_a_list directly *)
let rec check_duplicates = function
| (_, lbl1, _) :: (_, lbl2, _) :: _ when lbl1.lbl_pos = lbl2.lbl_pos ->
raise(Error(loc, env, Label_multiply_defined lbl1.lbl_name))
| _ :: rem ->
check_duplicates rem
| [] -> ()
in
check_duplicates lbl_exp_list;
let opt_exp, label_definitions =
let (_lid, lbl, _lbl_exp) = List.hd lbl_exp_list in
let matching_label lbl =
List.find
(fun (_, lbl',_) -> lbl'.lbl_pos = lbl.lbl_pos)
lbl_exp_list
in
match opt_exp with
None ->
let label_definitions =
Array.map (fun lbl ->
match matching_label lbl with
| (lid, _lbl, lbl_exp) ->
Overridden (lid, lbl_exp)
| exception Not_found ->
let present_indices =
List.map (fun (_, lbl, _) -> lbl.lbl_pos) lbl_exp_list
in
let label_names = extract_label_names env ty_expected in
let rec missing_labels n = function
[] -> []
| lbl :: rem ->
if List.mem n present_indices
then missing_labels (n + 1) rem
else lbl :: missing_labels (n + 1) rem
in
let missing = missing_labels 0 label_names in
raise(Error(loc, env, Label_missing missing)))
lbl.lbl_all
in
None, label_definitions
| Some exp ->
let ty_exp = instance exp.exp_type in
let unify_kept lbl =
let _, ty_arg1, ty_res1 = instance_label false lbl in
unify_exp_types exp.exp_loc env ty_exp ty_res1;
match matching_label lbl with
| lid, _lbl, lbl_exp ->
(* do not connect result types for overridden labels *)
Overridden (lid, lbl_exp)
| exception Not_found -> begin
let _, ty_arg2, ty_res2 = instance_label false lbl in
unify_exp_types loc env ty_arg1 ty_arg2;
with_explanation (fun () ->
unify_exp_types loc env (instance ty_expected) ty_res2);
Kept (ty_arg1, lbl.lbl_mut)
end
in
let label_definitions = Array.map unify_kept lbl.lbl_all in
Some {exp with exp_type = ty_exp}, label_definitions
in
let num_fields =
match lbl_exp_list with [] -> assert false
| (_, lbl,_)::_ -> Array.length lbl.lbl_all in
if opt_sexp <> None && List.length lid_sexp_list = num_fields then
Location.prerr_warning loc Warnings.Useless_record_with;
let label_descriptions, representation =
let (_, { lbl_all; lbl_repres }, _) = List.hd lbl_exp_list in
lbl_all, lbl_repres
in
let fields =
Array.map2 (fun descr def -> descr, def)
label_descriptions label_definitions
in
re {
exp_desc = Texp_record {
fields; representation;
extended_expression = opt_exp
};
exp_loc = loc; exp_extra = [];
exp_type = instance ty_expected;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_field(srecord, lid) ->
let (record, label, _) =
type_label_access env srecord Env.Projection lid
in
let (_, ty_arg, ty_res) = instance_label false label in
unify_exp env record ty_res;
rue {
exp_desc = Texp_field(record, lid, label);
exp_loc = loc; exp_extra = [];
exp_type = ty_arg;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_setfield(srecord, lid, snewval) ->
let (record, label, expected_type) =
type_label_access env srecord Env.Mutation lid in
let ty_record =
if expected_type = None then newvar () else record.exp_type in
let (label_loc, label, newval) =
type_label_exp false env loc ty_record (lid, label, snewval) in
unify_exp env record ty_record;
if label.lbl_mut = Immutable then
raise(Error(loc, env, Label_not_mutable lid.txt));
rue {
exp_desc = Texp_setfield(record, label_loc, label, newval);
exp_loc = loc; exp_extra = [];
exp_type = instance Predef.type_unit;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_array(sargl) ->
let ty = newgenvar() in
let to_unify = Predef.type_array ty in
with_explanation (fun () ->
unify_exp_types loc env to_unify (generic_instance ty_expected));
let argl =
List.map (fun sarg -> type_expect env sarg (mk_expected ty)) sargl in
re {
exp_desc = Texp_array argl;
exp_loc = loc; exp_extra = [];
exp_type = instance ty_expected;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_ifthenelse(scond, sifso, sifnot) ->
let cond = type_expect env scond
(mk_expected ~explanation:If_conditional Predef.type_bool) in
begin match sifnot with
None ->
let ifso = type_expect env sifso
(mk_expected ~explanation:If_no_else_branch Predef.type_unit) in
rue {
exp_desc = Texp_ifthenelse(cond, ifso, None);
exp_loc = loc; exp_extra = [];
exp_type = ifso.exp_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Some sifnot ->
let ifso = type_expect env sifso ty_expected_explained in
let ifnot = type_expect env sifnot ty_expected_explained in
(* Keep sharing *)
unify_exp env ifnot ifso.exp_type;
re {
exp_desc = Texp_ifthenelse(cond, ifso, Some ifnot);
exp_loc = loc; exp_extra = [];
exp_type = ifso.exp_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
end
| Pexp_sequence(sexp1, sexp2) ->
let exp1 = type_statement ~explanation:Sequence_left_hand_side
env sexp1 in
let exp2 = type_expect env sexp2 ty_expected_explained in
re {
exp_desc = Texp_sequence(exp1, exp2);
exp_loc = loc; exp_extra = [];
exp_type = exp2.exp_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_while(scond, sbody) ->
let cond = type_expect env scond
(mk_expected ~explanation:While_loop_conditional Predef.type_bool) in
let body = type_statement ~explanation:While_loop_body env sbody in
rue {
exp_desc = Texp_while(cond, body);
exp_loc = loc; exp_extra = [];
exp_type = instance Predef.type_unit;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_for(param, slow, shigh, dir, sbody) ->
let low = type_expect env slow
(mk_expected ~explanation:For_loop_start_index Predef.type_int) in
let high = type_expect env shigh
(mk_expected ~explanation:For_loop_stop_index Predef.type_int) in
let id, new_env =
match param.ppat_desc with
| Ppat_any -> Ident.create_local "_for", env
| Ppat_var {txt} ->
Env.enter_value txt
{val_type = instance Predef.type_int;
val_attributes = [];
val_kind = Val_reg;
val_loc = loc;
val_uid = Uid.mk ~current_unit:(Env.get_unit_name ());
} env
~check:(fun s -> Warnings.Unused_for_index s)
| _ ->
raise (Error (param.ppat_loc, env, Invalid_for_loop_index))
in
let body = type_statement ~explanation:For_loop_body new_env sbody in
rue {
exp_desc = Texp_for(id, param, low, high, dir, body);
exp_loc = loc; exp_extra = [];
exp_type = instance Predef.type_unit;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_constraint (sarg, sty) ->
(* Pretend separate = true, 1% slowdown for lablgtk *)
begin_def ();
let cty = Typetexp.transl_simple_type env false sty in
let ty = cty.ctyp_type in
end_def ();
generalize_structure ty;
let (arg, ty') = (type_argument env sarg ty (instance ty), instance ty) in
rue {
exp_desc = arg.exp_desc;
exp_loc = arg.exp_loc;
exp_type = ty';
exp_attributes = arg.exp_attributes;
exp_env = env;
exp_extra =
(Texp_constraint cty, loc, sexp.pexp_attributes) :: arg.exp_extra;
}
| Pexp_coerce(sarg, sty, sty') ->
(* Pretend separate = true, 1% slowdown for lablgtk *)
(* Also see PR#7199 for a problem with the following:
let separate = !Clflags.principal || Env.has_local_constraints env in*)
let (arg, ty',cty,cty') =
match sty with
| None ->
let (cty', ty', force) =
Typetexp.transl_simple_type_delayed env sty'
in
begin_def ();
let arg = type_exp env sarg in
end_def ();
let tv = newvar () in
let gen = generalizable (get_level tv) arg.exp_type in
unify_var env tv arg.exp_type;
begin match arg.exp_desc, !self_coercion, get_desc ty' with
Texp_ident(_, _, {val_kind=Val_self _}), (path,r) :: _,
Tconstr(path',_,_) when Path.same path path' ->
(* prerr_endline "self coercion"; *)
r := loc :: !r;
force ()
| _ when free_variables ~env arg.exp_type = []
&& free_variables ~env ty' = [] ->
if not gen && (* first try a single coercion *)
let snap = snapshot () in
let ty, _b = enlarge_type env ty' in
try
force (); Ctype.unify env arg.exp_type ty; true
with Unify _ ->
backtrack snap; false
then ()
else begin try
let force' = subtype env arg.exp_type ty' in
force (); force' ();
if not gen && !Clflags.principal then
Location.prerr_warning loc
(Warnings.Not_principal "this ground coercion");
with Subtype err ->
(* prerr_endline "coercion failed"; *)
raise (Error(loc, env, Not_subtype err))
end;
| _ ->
let ty, b = enlarge_type env ty' in
force ();
begin try Ctype.unify env arg.exp_type ty with Unify err ->
let expanded = full_expand ~may_forget_scope:true env ty' in
raise(Error(sarg.pexp_loc, env,
Coercion_failure({ty = ty'; expanded}, err, b)))
end
end;
(arg, ty', None, cty')
| Some sty ->
begin_def ();
let (cty, ty, force) =
Typetexp.transl_simple_type_delayed env sty
and (cty', ty', force') =
Typetexp.transl_simple_type_delayed env sty'
in
end_def ();
generalize_structure ty;
generalize_structure ty';
begin try
let force'' = subtype env (instance ty) (instance ty') in
force (); force' (); force'' ()
with Subtype err ->
raise (Error(loc, env, Not_subtype err))
end;
(type_argument env sarg ty (instance ty),
instance ty', Some cty, cty')
in
rue {
exp_desc = arg.exp_desc;
exp_loc = arg.exp_loc;
exp_type = ty';
exp_attributes = arg.exp_attributes;
exp_env = env;
exp_extra = (Texp_coerce (cty, cty'), loc, sexp.pexp_attributes) ::
arg.exp_extra;
}
| Pexp_send (e, {txt=met}) ->
if !Clflags.principal then begin_def ();
let obj = type_exp env e in
let (meth, typ) =
match obj.exp_desc with
| Texp_ident(_, _, {val_kind = Val_self(sign, meths, _, _)}) ->
let id, typ =
match meths with
| Self_concrete meths ->
let id =
match Meths.find met meths with
| id -> id
| exception Not_found ->
let valid_methods =
Meths.fold (fun lab _ acc -> lab :: acc) meths []
in
raise (Error(e.pexp_loc, env,
Undefined_self_method (met, valid_methods)))
in
let typ = Btype.method_type met sign in
id, typ
| Self_virtual meths_ref -> begin
match Meths.find met !meths_ref with
| id -> id, Btype.method_type met sign
| exception Not_found ->
let id = Ident.create_local met in
let ty = newvar () in
meths_ref := Meths.add met id !meths_ref;
add_method env met Private Virtual ty sign;
Location.prerr_warning loc
(Warnings.Undeclared_virtual_method met);
id, ty
end
in
Tmeth_val id, typ
| Texp_ident(_, _, {val_kind = Val_anc (sign, meths, cl_num)}) ->
let id =
match Meths.find met meths with
| id -> id
| exception Not_found ->
let valid_methods =
Meths.fold (fun lab _ acc -> lab :: acc) meths []
in
raise (Error(e.pexp_loc, env,
Undefined_self_method (met, valid_methods)))
in
let typ = Btype.method_type met sign in
let (self_path, _) =
Env.find_value_by_name
(Longident.Lident ("self-" ^ cl_num)) env
in
Tmeth_ancestor(id, self_path), typ
| _ ->
let ty =
match filter_method env met obj.exp_type with
| ty -> ty
| exception Filter_method_failed err ->
let error =
match err with
| Unification_error err ->
Expr_type_clash(err, explanation, None)
| Not_an_object ty ->
Not_an_object(ty, explanation)
| Not_a_method ->
let valid_methods =
match get_desc (expand_head env obj.exp_type) with
| Tobject (fields, _) ->
let (fields, _) = Ctype.flatten_fields fields in
let collect_fields li (meth, meth_kind, _meth_ty) =
if field_kind_repr meth_kind = Fpublic
then meth::li else li
in
Some (List.fold_left collect_fields [] fields)
| _ -> None
in
Undefined_method(obj.exp_type, met, valid_methods)
in
raise (Error(e.pexp_loc, env, error))
in
Tmeth_name met, ty
in
if !Clflags.principal then begin
end_def ();
generalize_structure typ;
end;
let typ =
match get_desc typ with
| Tpoly (ty, []) ->
instance ty
| Tpoly (ty, tl) ->
if !Clflags.principal && get_level typ <> generic_level then
Location.prerr_warning loc
(Warnings.Not_principal "this use of a polymorphic method");
snd (instance_poly false tl ty)
| Tvar _ ->
let ty' = newvar () in
unify env (instance typ) (newty(Tpoly(ty',[])));
(* if not !Clflags.nolabels then
Location.prerr_warning loc (Warnings.Unknown_method met); *)
ty'
| _ ->
assert false
in
rue {
exp_desc = Texp_send(obj, meth);
exp_loc = loc; exp_extra = [];
exp_type = typ;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_new cl ->
let (cl_path, cl_decl) = Env.lookup_class ~loc:cl.loc cl.txt env in
begin match cl_decl.cty_new with
None ->
raise(Error(loc, env, Virtual_class cl.txt))
| Some ty ->
rue {
exp_desc = Texp_new (cl_path, cl, cl_decl);
exp_loc = loc; exp_extra = [];
exp_type = instance ty;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
end
| Pexp_setinstvar (lab, snewval) -> begin
let (path, mut, cl_num, ty) =
Env.lookup_instance_variable ~loc lab.txt env
in
match mut with
| Mutable ->
let newval =
type_expect env snewval (mk_expected (instance ty))
in
let (path_self, _) =
Env.find_value_by_name (Longident.Lident ("self-" ^ cl_num)) env
in
rue {
exp_desc = Texp_setinstvar(path_self, path, lab, newval);
exp_loc = loc; exp_extra = [];
exp_type = instance Predef.type_unit;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| _ ->
raise(Error(loc, env, Instance_variable_not_mutable lab.txt))
end
| Pexp_override lst ->
let _ =
List.fold_right
(fun (lab, _) l ->
if List.exists (fun l -> l.txt = lab.txt) l then
raise(Error(loc, env,
Value_multiply_overridden lab.txt));
lab::l)
lst
[] in
begin match
try
Env.find_value_by_name (Longident.Lident "selfpat-*") env,
Env.find_value_by_name (Longident.Lident "self-*") env
with Not_found ->
raise(Error(loc, env, Outside_class))
with
(_, {val_type = self_ty; val_kind = Val_self (sign, _, vars, _)}),
(path_self, _) ->
let type_override (lab, snewval) =
begin try
let id = Vars.find lab.txt vars in
let ty = Btype.instance_variable_type lab.txt sign in
(id, lab, type_expect env snewval (mk_expected (instance ty)))
with
Not_found ->
let vars = Vars.fold (fun var _ li -> var::li) vars [] in
raise(Error(loc, env,
Unbound_instance_variable (lab.txt, vars)))
end
in
let modifs = List.map type_override lst in
rue {
exp_desc = Texp_override(path_self, modifs);
exp_loc = loc; exp_extra = [];
exp_type = self_ty;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| _ ->
assert false
end
| Pexp_letmodule(name, smodl, sbody) ->
let ty = newvar() in
(* remember original level *)
begin_def ();
let context = Typetexp.narrow () in
let modl, md_shape = !type_module env smodl in
Mtype.lower_nongen (get_level ty) modl.mod_type;
let pres =
match modl.mod_type with
| Mty_alias _ -> Mp_absent
| _ -> Mp_present
in
let scope = create_scope () in
let md =
{ md_type = modl.mod_type; md_attributes = []; md_loc = name.loc;
md_uid = Uid.mk ~current_unit:(Env.get_unit_name ()); }
in
let (id, new_env) =
match name.txt with
| None -> None, env
| Some name ->
let id, env =
Env.enter_module_declaration ~scope ~shape:md_shape name pres md env
in
Some id, env
in
Typetexp.widen context;
(* ideally, we should catch Expr_type_clash errors
in type_expect triggered by escaping identifiers from the local module
and refine them into Scoping_let_module errors
*)
let body = type_expect new_env sbody ty_expected_explained in
(* go back to original level *)
end_def ();
Ctype.unify_var new_env ty body.exp_type;
re {
exp_desc = Texp_letmodule(id, name, pres, modl, body);
exp_loc = loc; exp_extra = [];
exp_type = ty;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_letexception(cd, sbody) ->
let (cd, newenv) = Typedecl.transl_exception env cd in
let body = type_expect newenv sbody ty_expected_explained in
re {
exp_desc = Texp_letexception(cd, body);
exp_loc = loc; exp_extra = [];
exp_type = body.exp_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_assert (e) ->
let cond = type_expect env e
(mk_expected ~explanation:Assert_condition Predef.type_bool) in
let exp_type =
match cond.exp_desc with
| Texp_construct(_, {cstr_name="false"}, _) ->
instance ty_expected
| _ ->
instance Predef.type_unit
in
rue {
exp_desc = Texp_assert cond;
exp_loc = loc; exp_extra = [];
exp_type;
exp_attributes = sexp.pexp_attributes;
exp_env = env;
}
| Pexp_lazy e ->
let ty = newgenvar () in
let to_unify = Predef.type_lazy_t ty in
with_explanation (fun () ->
unify_exp_types loc env to_unify (generic_instance ty_expected));
let arg = type_expect env e (mk_expected ty) in
re {
exp_desc = Texp_lazy arg;
exp_loc = loc; exp_extra = [];
exp_type = instance ty_expected;
exp_attributes = sexp.pexp_attributes;
exp_env = env;
}
| Pexp_object s ->
let desc, meths = !type_object env loc s in
rue {
exp_desc = Texp_object (desc, meths);
exp_loc = loc; exp_extra = [];
exp_type = desc.cstr_type.csig_self;
exp_attributes = sexp.pexp_attributes;
exp_env = env;
}
| Pexp_poly(sbody, sty) ->
if !Clflags.principal then begin_def ();
let ty, cty =
match sty with None -> protect_expansion env ty_expected, None
| Some sty ->
let sty = Ast_helper.Typ.force_poly sty in
let cty = Typetexp.transl_simple_type env false sty in
cty.ctyp_type, Some cty
in
if !Clflags.principal then begin
end_def ();
generalize_structure ty
end;
if sty <> None then
with_explanation (fun () ->
unify_exp_types loc env (instance ty) (instance ty_expected));
let exp =
match get_desc (expand_head env ty) with
Tpoly (ty', []) ->
let exp = type_expect env sbody (mk_expected ty') in
{ exp with exp_type = instance ty }
| Tpoly (ty', tl) ->
(* One more level to generalize locally *)
begin_def ();
if !Clflags.principal then begin_def ();
let vars, ty'' = instance_poly true tl ty' in
if !Clflags.principal then begin
end_def ();
generalize_structure ty''
end;
let exp = type_expect env sbody (mk_expected ty'') in
end_def ();
generalize_and_check_univars env "method" exp ty_expected vars;
{ exp with exp_type = instance ty }
| Tvar _ ->
let exp = type_exp env sbody in
let exp = {exp with exp_type = newty (Tpoly (exp.exp_type, []))} in
unify_exp env exp ty;
exp
| _ -> assert false
in
re { exp with exp_extra =
(Texp_poly cty, loc, sexp.pexp_attributes) :: exp.exp_extra }
| Pexp_newtype({txt=name}, sbody) ->
let ty =
if Typetexp.valid_tyvar_name name then
newvar ~name ()
else
newvar ()
in
(* remember original level *)
begin_def ();
(* Create a fake abstract type declaration for name. *)
let decl = new_local_type ~loc () in
let scope = create_scope () in
let (id, new_env) = Env.enter_type ~scope name decl env in
let body = type_exp new_env sbody in
(* Replace every instance of this type constructor in the resulting
type. *)
let seen = Hashtbl.create 8 in
let rec replace t =
if Hashtbl.mem seen (get_id t) then ()
else begin
Hashtbl.add seen (get_id t) ();
match get_desc t with
| Tconstr (Path.Pident id', _, _) when id == id' -> link_type t ty
| _ -> Btype.iter_type_expr replace t
end
in
let ety = Subst.type_expr Subst.identity body.exp_type in
replace ety;
(* back to original level *)
end_def ();
(* lower the levels of the result type *)
(* unify_var env ty ety; *)
(* non-expansive if the body is non-expansive, so we don't introduce
any new extra node in the typed AST. *)
rue { body with exp_loc = loc; exp_type = ety;
exp_extra =
(Texp_newtype name, loc, sexp.pexp_attributes) :: body.exp_extra }
| Pexp_pack m ->
let (p, fl) =
match get_desc (Ctype.expand_head env (instance ty_expected)) with
Tpackage (p, fl) ->
if !Clflags.principal &&
get_level (Ctype.expand_head env
(protect_expansion env ty_expected))
< Btype.generic_level
then
Location.prerr_warning loc
(Warnings.Not_principal "this module packing");
(p, fl)
| Tvar _ ->
raise (Error (loc, env, Cannot_infer_signature))
| _ ->
raise (Error (loc, env, Not_a_packed_module ty_expected))
in
let (modl, fl') = !type_package env m p fl in
rue {
exp_desc = Texp_pack modl;
exp_loc = loc; exp_extra = [];
exp_type = newty (Tpackage (p, fl'));
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| Pexp_open (od, e) ->
let tv = newvar () in
let (od, _, newenv) = !type_open_decl env od in
let exp = type_expect newenv e ty_expected_explained in
(* Force the return type to be well-formed in the original
environment. *)
unify_var newenv tv exp.exp_type;
re {
exp_desc = Texp_open (od, exp);
exp_type = exp.exp_type;
exp_loc = loc;
exp_extra = [];
exp_attributes = sexp.pexp_attributes;
exp_env = env;
}
| Pexp_letop{ let_ = slet; ands = sands; body = sbody } ->
let rec loop spat_acc ty_acc sands =
match sands with
| [] -> spat_acc, ty_acc
| { pbop_pat = spat; _} :: rest ->
let ty = newvar () in
let loc = { slet.pbop_op.loc with Location.loc_ghost = true } in
let spat_acc = Ast_helper.Pat.tuple ~loc [spat_acc; spat] in
let ty_acc = newty (Ttuple [ty_acc; ty]) in
loop spat_acc ty_acc rest
in
if !Clflags.principal then begin_def ();
let let_loc = slet.pbop_op.loc in
let op_path, op_desc = type_binding_op_ident env slet.pbop_op in
let op_type = instance op_desc.val_type in
let spat_params, ty_params = loop slet.pbop_pat (newvar ()) sands in
let ty_func_result = newvar () in
let ty_func =
newty (Tarrow(Nolabel, ty_params, ty_func_result, commu_ok)) in
let ty_result = newvar () in
let ty_andops = newvar () in
let ty_op =
newty (Tarrow(Nolabel, ty_andops,
newty (Tarrow(Nolabel, ty_func, ty_result, commu_ok)), commu_ok))
in
begin try
unify env op_type ty_op
with Unify err ->
raise(Error(let_loc, env, Letop_type_clash(slet.pbop_op.txt, err)))
end;
if !Clflags.principal then begin
end_def ();
generalize_structure ty_andops;
generalize_structure ty_params;
generalize_structure ty_func_result;
generalize_structure ty_result
end;
let exp, ands = type_andops env slet.pbop_exp sands ty_andops in
let scase = Ast_helper.Exp.case spat_params sbody in
let cases, partial =
type_cases Value env
ty_params (mk_expected ty_func_result) true loc [scase]
in
let body =
match cases with
| [case] -> case
| _ -> assert false
in
let param = name_cases "param" cases in
let let_ =
{ bop_op_name = slet.pbop_op;
bop_op_path = op_path;
bop_op_val = op_desc;
bop_op_type = op_type;
bop_exp = exp;
bop_loc = slet.pbop_loc; }
in
let desc =
Texp_letop{let_; ands; param; body; partial}
in
rue { exp_desc = desc;
exp_loc = sexp.pexp_loc;
exp_extra = [];
exp_type = instance ty_result;
exp_env = env;
exp_attributes = sexp.pexp_attributes; }
| Pexp_extension ({ txt = ("ocaml.extension_constructor"
|"extension_constructor"); _ },
payload) ->
begin match payload with
| PStr [ { pstr_desc =
Pstr_eval ({ pexp_desc = Pexp_construct (lid, None); _ }, _)
} ] ->
let path =
let cd =
Env.lookup_constructor Env.Positive ~loc:lid.loc lid.txt env
in
match cd.cstr_tag with
| Cstr_extension (path, _) -> path
| _ -> raise (Error (lid.loc, env, Not_an_extension_constructor))
in
rue {
exp_desc = Texp_extension_constructor (lid, path);
exp_loc = loc; exp_extra = [];
exp_type = instance Predef.type_extension_constructor;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
| _ ->
raise (Error (loc, env, Invalid_extension_constructor_payload))
end
| Pexp_extension ext ->
raise (Error_forward (Builtin_attributes.error_of_extension ext))
| Pexp_unreachable ->
re { exp_desc = Texp_unreachable;
exp_loc = loc; exp_extra = [];
exp_type = instance ty_expected;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
and type_ident env ?(recarg=Rejected) lid =
let (path, desc) = Env.lookup_value ~loc:lid.loc lid.txt env in
let is_recarg =
match get_desc desc.val_type with
| Tconstr(p, _, _) -> Path.is_constructor_typath p
| _ -> false
in
begin match is_recarg, recarg, get_desc desc.val_type with
| _, Allowed, _
| true, Required, _
| false, Rejected, _ -> ()
| true, Rejected, _
| false, Required, (Tvar _ | Tconstr _) ->
raise (Error (lid.loc, env, Inlined_record_escape))
| false, Required, _ -> () (* will fail later *)
end;
path, desc
and type_binding_op_ident env s =
let loc = s.loc in
let lid = Location.mkloc (Longident.Lident s.txt) loc in
let path, desc = type_ident env lid in
let path =
match desc.val_kind with
| Val_ivar _ ->
fatal_error "Illegal name for instance variable"
| Val_self (_, _, _, cl_num) ->
let path, _ =
Env.find_value_by_name (Longident.Lident ("self-" ^ cl_num)) env
in
path
| _ -> path
in
path, desc
and type_function ?(in_function : (Location.t * type_expr) option)
loc attrs env ty_expected_explained arg_label caselist =
let { ty = ty_expected; explanation } = ty_expected_explained in
let (loc_fun, ty_fun) =
match in_function with Some p -> p
| None -> (loc, instance ty_expected)
in
let separate = !Clflags.principal || Env.has_local_constraints env in
if separate then begin_def ();
let (ty_arg, ty_res) =
try filter_arrow env (instance ty_expected) arg_label
with Filter_arrow_failed err ->
let err = match err with
| Unification_error unif_err ->
Expr_type_clash(unif_err, explanation, None)
| Label_mismatch { got; expected; expected_type} ->
Abstract_wrong_label { got; expected; expected_type; explanation }
| Not_a_function -> begin
match in_function with
| Some _ -> Too_many_arguments(ty_fun, explanation)
| None -> Not_a_function(ty_fun, explanation)
end
in
raise (Error(loc_fun, env, err))
in
let ty_arg =
if is_optional arg_label then
let tv = newvar() in
begin
try unify env ty_arg (type_option tv)
with Unify _ -> assert false
end;
type_option tv
else ty_arg
in
if separate then begin
end_def ();
generalize_structure ty_arg;
generalize_structure ty_res
end;
let cases, partial =
type_cases Value ~in_function:(loc_fun,ty_fun) env
ty_arg (mk_expected ty_res) true loc caselist in
let not_nolabel_function ty =
let ls, tvar = list_labels env ty in
List.for_all ((<>) Nolabel) ls && not tvar
in
if is_optional arg_label && not_nolabel_function ty_res then
Location.prerr_warning (List.hd cases).c_lhs.pat_loc
Warnings.Unerasable_optional_argument;
let param = name_cases "param" cases in
re {
exp_desc = Texp_function { arg_label; param; cases; partial; };
exp_loc = loc; exp_extra = [];
exp_type =
instance (newgenty (Tarrow(arg_label, ty_arg, ty_res, commu_ok)));
exp_attributes = attrs;
exp_env = env }
and type_label_access env srecord usage lid =
if !Clflags.principal then begin_def ();
let record = type_exp ~recarg:Allowed env srecord in
if !Clflags.principal then begin
end_def ();
generalize_structure record.exp_type
end;
let ty_exp = record.exp_type in
let expected_type =
match extract_concrete_record env ty_exp with
| Record_type(p0, p, _) ->
Some(p0, p, is_principal ty_exp)
| Maybe_a_record_type -> None
| Not_a_record_type ->
let error = Expr_not_a_record_type ty_exp in
raise (Error (record.exp_loc, env, error))
in
let labels = Env.lookup_all_labels ~loc:lid.loc usage lid.txt env in
let label =
wrap_disambiguate "This expression has" (mk_expected ty_exp)
(Label.disambiguate usage lid env expected_type) labels in
(record, label, expected_type)
(* Typing format strings for printing or reading.
These formats are used by functions in modules Printf, Format, and Scanf.
(Handling of * modifiers contributed by Thorsten Ohl.) *)
and type_format loc str env =
let loc = {loc with Location.loc_ghost = true} in
try
CamlinternalFormatBasics.(CamlinternalFormat.(
let mk_exp_loc pexp_desc = {
pexp_desc = pexp_desc;
pexp_loc = loc;
pexp_loc_stack = [];
pexp_attributes = [];
} and mk_lid_loc lid = {
txt = lid;
loc = loc;
} in
let mk_constr name args =
let lid = Longident.(Ldot(Lident "CamlinternalFormatBasics", name)) in
let arg = match args with
| [] -> None
| [ e ] -> Some e
| _ :: _ :: _ -> Some (mk_exp_loc (Pexp_tuple args)) in
mk_exp_loc (Pexp_construct (mk_lid_loc lid, arg)) in
let mk_cst cst = mk_exp_loc (Pexp_constant cst) in
let mk_int n = mk_cst (Pconst_integer (Int.to_string n, None))
and mk_string str = mk_cst (Pconst_string (str, loc, None))
and mk_char chr = mk_cst (Pconst_char chr) in
let rec mk_formatting_lit fmting = match fmting with
| Close_box ->
mk_constr "Close_box" []
| Close_tag ->
mk_constr "Close_tag" []
| Break (org, ns, ni) ->
mk_constr "Break" [ mk_string org; mk_int ns; mk_int ni ]
| FFlush ->
mk_constr "FFlush" []
| Force_newline ->
mk_constr "Force_newline" []
| Flush_newline ->
mk_constr "Flush_newline" []
| Magic_size (org, sz) ->
mk_constr "Magic_size" [ mk_string org; mk_int sz ]
| Escaped_at ->
mk_constr "Escaped_at" []
| Escaped_percent ->
mk_constr "Escaped_percent" []
| Scan_indic c ->
mk_constr "Scan_indic" [ mk_char c ]
and mk_formatting_gen : type a b c d e f .
(a, b, c, d, e, f) formatting_gen -> Parsetree.expression =
fun fmting -> match fmting with
| Open_tag (Format (fmt', str')) ->
mk_constr "Open_tag" [ mk_format fmt' str' ]
| Open_box (Format (fmt', str')) ->
mk_constr "Open_box" [ mk_format fmt' str' ]
and mk_format : type a b c d e f .
(a, b, c, d, e, f) CamlinternalFormatBasics.fmt -> string ->
Parsetree.expression = fun fmt str ->
mk_constr "Format" [ mk_fmt fmt; mk_string str ]
and mk_side side = match side with
| Left -> mk_constr "Left" []
| Right -> mk_constr "Right" []
| Zeros -> mk_constr "Zeros" []
and mk_iconv iconv = match iconv with
| Int_d -> mk_constr "Int_d" [] | Int_pd -> mk_constr "Int_pd" []
| Int_sd -> mk_constr "Int_sd" [] | Int_i -> mk_constr "Int_i" []
| Int_pi -> mk_constr "Int_pi" [] | Int_si -> mk_constr "Int_si" []
| Int_x -> mk_constr "Int_x" [] | Int_Cx -> mk_constr "Int_Cx" []
| Int_X -> mk_constr "Int_X" [] | Int_CX -> mk_constr "Int_CX" []
| Int_o -> mk_constr "Int_o" [] | Int_Co -> mk_constr "Int_Co" []
| Int_u -> mk_constr "Int_u" [] | Int_Cd -> mk_constr "Int_Cd" []
| Int_Ci -> mk_constr "Int_Ci" [] | Int_Cu -> mk_constr "Int_Cu" []
and mk_fconv fconv =
let flag = match fst fconv with
| Float_flag_ -> mk_constr "Float_flag_" []
| Float_flag_p -> mk_constr "Float_flag_p" []
| Float_flag_s -> mk_constr "Float_flag_s" [] in
let kind = match snd fconv with
| Float_f -> mk_constr "Float_f" []
| Float_e -> mk_constr "Float_e" []
| Float_E -> mk_constr "Float_E" []
| Float_g -> mk_constr "Float_g" []
| Float_G -> mk_constr "Float_G" []
| Float_h -> mk_constr "Float_h" []
| Float_H -> mk_constr "Float_H" []
| Float_F -> mk_constr "Float_F" []
| Float_CF -> mk_constr "Float_CF" [] in
mk_exp_loc (Pexp_tuple [flag; kind])
and mk_counter cnt = match cnt with
| Line_counter -> mk_constr "Line_counter" []
| Char_counter -> mk_constr "Char_counter" []
| Token_counter -> mk_constr "Token_counter" []
and mk_int_opt n_opt = match n_opt with
| None ->
let lid_loc = mk_lid_loc (Longident.Lident "None") in
mk_exp_loc (Pexp_construct (lid_loc, None))
| Some n ->
let lid_loc = mk_lid_loc (Longident.Lident "Some") in
mk_exp_loc (Pexp_construct (lid_loc, Some (mk_int n)))
and mk_fmtty : type a b c d e f g h i j k l .
(a, b, c, d, e, f, g, h, i, j, k, l) fmtty_rel -> Parsetree.expression
=
fun fmtty -> match fmtty with
| Char_ty rest -> mk_constr "Char_ty" [ mk_fmtty rest ]
| String_ty rest -> mk_constr "String_ty" [ mk_fmtty rest ]
| Int_ty rest -> mk_constr "Int_ty" [ mk_fmtty rest ]
| Int32_ty rest -> mk_constr "Int32_ty" [ mk_fmtty rest ]
| Nativeint_ty rest -> mk_constr "Nativeint_ty" [ mk_fmtty rest ]
| Int64_ty rest -> mk_constr "Int64_ty" [ mk_fmtty rest ]
| Float_ty rest -> mk_constr "Float_ty" [ mk_fmtty rest ]
| Bool_ty rest -> mk_constr "Bool_ty" [ mk_fmtty rest ]
| Alpha_ty rest -> mk_constr "Alpha_ty" [ mk_fmtty rest ]
| Theta_ty rest -> mk_constr "Theta_ty" [ mk_fmtty rest ]
| Any_ty rest -> mk_constr "Any_ty" [ mk_fmtty rest ]
| Reader_ty rest -> mk_constr "Reader_ty" [ mk_fmtty rest ]
| Ignored_reader_ty rest ->
mk_constr "Ignored_reader_ty" [ mk_fmtty rest ]
| Format_arg_ty (sub_fmtty, rest) ->
mk_constr "Format_arg_ty" [ mk_fmtty sub_fmtty; mk_fmtty rest ]
| Format_subst_ty (sub_fmtty1, sub_fmtty2, rest) ->
mk_constr "Format_subst_ty"
[ mk_fmtty sub_fmtty1; mk_fmtty sub_fmtty2; mk_fmtty rest ]
| End_of_fmtty -> mk_constr "End_of_fmtty" []
and mk_ignored : type a b c d e f .
(a, b, c, d, e, f) ignored -> Parsetree.expression =
fun ign -> match ign with
| Ignored_char ->
mk_constr "Ignored_char" []
| Ignored_caml_char ->
mk_constr "Ignored_caml_char" []
| Ignored_string pad_opt ->
mk_constr "Ignored_string" [ mk_int_opt pad_opt ]
| Ignored_caml_string pad_opt ->
mk_constr "Ignored_caml_string" [ mk_int_opt pad_opt ]
| Ignored_int (iconv, pad_opt) ->
mk_constr "Ignored_int" [ mk_iconv iconv; mk_int_opt pad_opt ]
| Ignored_int32 (iconv, pad_opt) ->
mk_constr "Ignored_int32" [ mk_iconv iconv; mk_int_opt pad_opt ]
| Ignored_nativeint (iconv, pad_opt) ->
mk_constr "Ignored_nativeint" [ mk_iconv iconv; mk_int_opt pad_opt ]
| Ignored_int64 (iconv, pad_opt) ->
mk_constr "Ignored_int64" [ mk_iconv iconv; mk_int_opt pad_opt ]
| Ignored_float (pad_opt, prec_opt) ->
mk_constr "Ignored_float" [ mk_int_opt pad_opt; mk_int_opt prec_opt ]
| Ignored_bool pad_opt ->
mk_constr "Ignored_bool" [ mk_int_opt pad_opt ]
| Ignored_format_arg (pad_opt, fmtty) ->
mk_constr "Ignored_format_arg" [ mk_int_opt pad_opt; mk_fmtty fmtty ]
| Ignored_format_subst (pad_opt, fmtty) ->
mk_constr "Ignored_format_subst" [
mk_int_opt pad_opt; mk_fmtty fmtty ]
| Ignored_reader ->
mk_constr "Ignored_reader" []
| Ignored_scan_char_set (width_opt, char_set) ->
mk_constr "Ignored_scan_char_set" [
mk_int_opt width_opt; mk_string char_set ]
| Ignored_scan_get_counter counter ->
mk_constr "Ignored_scan_get_counter" [
mk_counter counter
]
| Ignored_scan_next_char ->
mk_constr "Ignored_scan_next_char" []
and mk_padding : type x y . (x, y) padding -> Parsetree.expression =
fun pad -> match pad with
| No_padding -> mk_constr "No_padding" []
| Lit_padding (s, w) -> mk_constr "Lit_padding" [ mk_side s; mk_int w ]
| Arg_padding s -> mk_constr "Arg_padding" [ mk_side s ]
and mk_precision : type x y . (x, y) precision -> Parsetree.expression =
fun prec -> match prec with
| No_precision -> mk_constr "No_precision" []
| Lit_precision w -> mk_constr "Lit_precision" [ mk_int w ]
| Arg_precision -> mk_constr "Arg_precision" []
and mk_fmt : type a b c d e f .
(a, b, c, d, e, f) fmt -> Parsetree.expression =
fun fmt -> match fmt with
| Char rest ->
mk_constr "Char" [ mk_fmt rest ]
| Caml_char rest ->
mk_constr "Caml_char" [ mk_fmt rest ]
| String (pad, rest) ->
mk_constr "String" [ mk_padding pad; mk_fmt rest ]
| Caml_string (pad, rest) ->
mk_constr "Caml_string" [ mk_padding pad; mk_fmt rest ]
| Int (iconv, pad, prec, rest) ->
mk_constr "Int" [
mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ]
| Int32 (iconv, pad, prec, rest) ->
mk_constr "Int32" [
mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ]
| Nativeint (iconv, pad, prec, rest) ->
mk_constr "Nativeint" [
mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ]
| Int64 (iconv, pad, prec, rest) ->
mk_constr "Int64" [
mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ]
| Float (fconv, pad, prec, rest) ->
mk_constr "Float" [
mk_fconv fconv; mk_padding pad; mk_precision prec; mk_fmt rest ]
| Bool (pad, rest) ->
mk_constr "Bool" [ mk_padding pad; mk_fmt rest ]
| Flush rest ->
mk_constr "Flush" [ mk_fmt rest ]
| String_literal (s, rest) ->
mk_constr "String_literal" [ mk_string s; mk_fmt rest ]
| Char_literal (c, rest) ->
mk_constr "Char_literal" [ mk_char c; mk_fmt rest ]
| Format_arg (pad_opt, fmtty, rest) ->
mk_constr "Format_arg" [
mk_int_opt pad_opt; mk_fmtty fmtty; mk_fmt rest ]
| Format_subst (pad_opt, fmtty, rest) ->
mk_constr "Format_subst" [
mk_int_opt pad_opt; mk_fmtty fmtty; mk_fmt rest ]
| Alpha rest ->
mk_constr "Alpha" [ mk_fmt rest ]
| Theta rest ->
mk_constr "Theta" [ mk_fmt rest ]
| Formatting_lit (fmting, rest) ->
mk_constr "Formatting_lit" [ mk_formatting_lit fmting; mk_fmt rest ]
| Formatting_gen (fmting, rest) ->
mk_constr "Formatting_gen" [ mk_formatting_gen fmting; mk_fmt rest ]
| Reader rest ->
mk_constr "Reader" [ mk_fmt rest ]
| Scan_char_set (width_opt, char_set, rest) ->
mk_constr "Scan_char_set" [
mk_int_opt width_opt; mk_string char_set; mk_fmt rest ]
| Scan_get_counter (cnt, rest) ->
mk_constr "Scan_get_counter" [ mk_counter cnt; mk_fmt rest ]
| Scan_next_char rest ->
mk_constr "Scan_next_char" [ mk_fmt rest ]
| Ignored_param (ign, rest) ->
mk_constr "Ignored_param" [ mk_ignored ign; mk_fmt rest ]
| End_of_format ->
mk_constr "End_of_format" []
| Custom _ ->
(* Custom formatters have no syntax so they will never appear
in formats parsed from strings. *)
assert false
in
let legacy_behavior = not !Clflags.strict_formats in
let Fmt_EBB fmt = fmt_ebb_of_string ~legacy_behavior str in
mk_constr "Format" [ mk_fmt fmt; mk_string str ]
))
with Failure msg ->
raise (Error (loc, env, Invalid_format msg))
and type_label_exp create env loc ty_expected
(lid, label, sarg) =
(* Here also ty_expected may be at generic_level *)
begin_def ();
let separate = !Clflags.principal || Env.has_local_constraints env in
if separate then (begin_def (); begin_def ());
let (vars, ty_arg, ty_res) = instance_label true label in
if separate then begin
end_def ();
(* Generalize label information *)
generalize_structure ty_arg;
generalize_structure ty_res
end;
begin try
unify env (instance ty_res) (instance ty_expected)
with Unify err ->
raise (Error(lid.loc, env, Label_mismatch(lid.txt, err)))
end;
(* Instantiate so that we can generalize internal nodes *)
let ty_arg = instance ty_arg in
if separate then begin
end_def ();
(* Generalize information merged from ty_expected *)
generalize_structure ty_arg
end;
if label.lbl_private = Private then
if create then
raise (Error(loc, env, Private_type ty_expected))
else
raise (Error(lid.loc, env, Private_label(lid.txt, ty_expected)));
let arg =
let snap = if vars = [] then None else Some (Btype.snapshot ()) in
let arg = type_argument env sarg ty_arg (instance ty_arg) in
end_def ();
try
if (vars = []) then arg
else begin
if maybe_expansive arg then
lower_contravariant env arg.exp_type;
generalize_and_check_univars env "field value" arg label.lbl_arg vars;
{arg with exp_type = instance arg.exp_type}
end
with exn when maybe_expansive arg -> try
(* Try to retype without propagating ty_arg, cf PR#4862 *)
Option.iter Btype.backtrack snap;
begin_def ();
let arg = type_exp env sarg in
end_def ();
lower_contravariant env arg.exp_type;
begin_def ();
let arg = {arg with exp_type = instance arg.exp_type} in
unify_exp env arg (instance ty_arg);
end_def ();
generalize_and_check_univars env "field value" arg label.lbl_arg vars;
{arg with exp_type = instance arg.exp_type}
with Error (_, _, Less_general _) as e -> raise e
| _ -> raise exn (* In case of failure return the first error *)
in
(lid, label, arg)
and type_argument ?explanation ?recarg env sarg ty_expected' ty_expected =
(* ty_expected' may be generic *)
let no_labels ty =
let ls, tvar = list_labels env ty in
not tvar && List.for_all ((=) Nolabel) ls
in
let may_coerce =
if not (is_inferred sarg) then None else
let work () =
let te = expand_head env ty_expected' in
match get_desc te with
Tarrow(Nolabel,_,ty_res0,_) ->
Some (no_labels ty_res0, get_level te)
| _ -> None
in
(* Need to be careful not to expand local constraints here *)
if Env.has_local_constraints env then
let snap = Btype.snapshot () in
try_finally ~always:(fun () -> Btype.backtrack snap) work
else work ()
in
match may_coerce with
Some (safe_expect, lv) ->
(* apply optional arguments when expected type is "" *)
(* we must be very careful about not breaking the semantics *)
if !Clflags.principal then begin_def ();
let texp = type_exp env sarg in
if !Clflags.principal then begin
end_def ();
generalize_structure texp.exp_type
end;
let rec make_args args ty_fun =
match get_desc (expand_head env ty_fun) with
| Tarrow (l,ty_arg,ty_fun,_) when is_optional l ->
let ty = option_none env (instance ty_arg) sarg.pexp_loc in
make_args ((l, Some ty) :: args) ty_fun
| Tarrow (l,_,ty_res',_) when l = Nolabel || !Clflags.classic ->
List.rev args, ty_fun, no_labels ty_res'
| Tvar _ -> List.rev args, ty_fun, false
| _ -> [], texp.exp_type, false
in
let args, ty_fun', simple_res = make_args [] texp.exp_type
and texp = {texp with exp_type = instance texp.exp_type} in
if not (simple_res || safe_expect) then begin
unify_exp env texp ty_expected;
texp
end else begin
let warn = !Clflags.principal &&
(lv <> generic_level || get_level ty_fun' <> generic_level)
and ty_fun = instance ty_fun' in
let ty_arg, ty_res =
match get_desc (expand_head env ty_expected) with
Tarrow(Nolabel,ty_arg,ty_res,_) -> ty_arg, ty_res
| _ -> assert false
in
unify_exp env {texp with exp_type = ty_fun} ty_expected;
if args = [] then texp else
(* eta-expand to avoid side effects *)
let var_pair name ty =
let id = Ident.create_local name in
let desc =
{ val_type = ty; val_kind = Val_reg;
val_attributes = [];
val_loc = Location.none;
val_uid = Uid.mk ~current_unit:(Env.get_unit_name ());
}
in
let exp_env = Env.add_value id desc env in
{pat_desc = Tpat_var (id, mknoloc name); pat_type = ty;pat_extra=[];
pat_attributes = [];
pat_loc = Location.none; pat_env = env},
{exp_type = ty; exp_loc = Location.none; exp_env = exp_env;
exp_extra = []; exp_attributes = [];
exp_desc =
Texp_ident(Path.Pident id, mknoloc (Longident.Lident name), desc)}
in
let eta_pat, eta_var = var_pair "eta" ty_arg in
let func texp =
let e =
{texp with exp_type = ty_res; exp_desc =
Texp_apply
(texp,
args @ [Nolabel, Some eta_var])}
in
let cases = [case eta_pat e] in
let param = name_cases "param" cases in
{ texp with exp_type = ty_fun; exp_desc =
Texp_function { arg_label = Nolabel; param; cases;
partial = Total; } }
in
Location.prerr_warning texp.exp_loc
(Warnings.Eliminated_optional_arguments
(List.map (fun (l, _) -> Printtyp.string_of_label l) args));
if warn then Location.prerr_warning texp.exp_loc
(Warnings.Non_principal_labels "eliminated optional argument");
(* let-expand to have side effects *)
let let_pat, let_var = var_pair "arg" texp.exp_type in
re { texp with exp_type = ty_fun; exp_desc =
Texp_let (Nonrecursive,
[{vb_pat=let_pat; vb_expr=texp; vb_attributes=[];
vb_loc=Location.none;
}],
func let_var) }
end
| None ->
let texp = type_expect ?recarg env sarg
(mk_expected ?explanation ty_expected') in
unify_exp env texp ty_expected;
texp
and type_application env funct sargs =
(* funct.exp_type may be generic *)
let result_type omitted ty_fun =
List.fold_left
(fun ty_fun (l,ty,lv) -> newty2 ~level:lv (Tarrow(l,ty,ty_fun,commu_ok)))
ty_fun omitted
in
let has_label l ty_fun =
let ls, tvar = list_labels env ty_fun in
tvar || List.mem l ls
in
let eliminated_optional_arguments = ref [] in
let omitted_parameters = ref [] in
let type_unknown_arg (ty_fun, typed_args) (lbl, sarg) =
let (ty_arg, ty_res) =
let ty_fun = expand_head env ty_fun in
match get_desc ty_fun with
| Tvar _ ->
let t1 = newvar () and t2 = newvar () in
if get_level ty_fun >= get_level t1 &&
not (is_prim ~name:"%identity" funct)
then
Location.prerr_warning sarg.pexp_loc
Warnings.Ignored_extra_argument;
unify env ty_fun (newty (Tarrow(lbl,t1,t2,commu_var ())));
(t1, t2)
| Tarrow (l,t1,t2,_) when l = lbl
|| !Clflags.classic && lbl = Nolabel && not (is_optional l) ->
(t1, t2)
| td ->
let ty_fun = match td with Tarrow _ -> newty td | _ -> ty_fun in
let ty_res =
result_type (!omitted_parameters @ !eliminated_optional_arguments)
ty_fun
in
match get_desc ty_res with
| Tarrow _ ->
if !Clflags.classic || not (has_label lbl ty_fun) then
raise (Error(sarg.pexp_loc, env,
Apply_wrong_label(lbl, ty_res, false)))
else
raise (Error(funct.exp_loc, env, Incoherent_label_order))
| _ ->
raise(Error(funct.exp_loc, env, Apply_non_function
(expand_head env funct.exp_type)))
in
let arg () =
let arg = type_expect env sarg (mk_expected ty_arg) in
if is_optional lbl then
unify_exp env arg (type_option(newvar()));
arg
in
(ty_res, (lbl, Some arg) :: typed_args)
in
let ignore_labels =
!Clflags.classic ||
begin
let ls, tvar = list_labels env funct.exp_type in
not tvar &&
let labels = List.filter (fun l -> not (is_optional l)) ls in
List.length labels = List.length sargs &&
List.for_all (fun (l,_) -> l = Nolabel) sargs &&
List.exists (fun l -> l <> Nolabel) labels &&
(Location.prerr_warning
funct.exp_loc
(Warnings.Labels_omitted
(List.map Printtyp.string_of_label
(List.filter ((<>) Nolabel) labels)));
true)
end
in
let warned = ref false in
let rec type_args args ty_fun ty_fun0 sargs =
let type_unknown_args () =
(* We're not looking at a *known* function type anymore, or there are no
arguments left. *)
let ty_fun, typed_args =
List.fold_left type_unknown_arg (ty_fun0, args) sargs
in
let args =
(* Force typing of arguments.
Careful: the order matters here. Using [List.rev_map] would be
incorrect. *)
List.map
(function
| l, None -> l, None
| l, Some f -> l, Some (f ()))
(List.rev typed_args)
in
let result_ty = instance (result_type !omitted_parameters ty_fun) in
args, result_ty
in
if sargs = [] then type_unknown_args () else
let ty_fun' = expand_head env ty_fun in
match get_desc ty_fun', get_desc (expand_head env ty_fun0) with
| Tarrow (l, ty, ty_fun, com), Tarrow (_, ty0, ty_fun0, _)
when is_commu_ok com ->
let lv = get_level ty_fun' in
let may_warn loc w =
if not !warned && !Clflags.principal && lv <> generic_level
then begin
warned := true;
Location.prerr_warning loc w
end
in
let name = label_name l
and optional = is_optional l in
let use_arg sarg l' =
Some (
if not optional || is_optional l' then
(fun () -> type_argument env sarg ty ty0)
else begin
may_warn sarg.pexp_loc
(Warnings.Not_principal "using an optional argument here");
(fun () -> option_some env (type_argument env sarg
(extract_option_type env ty)
(extract_option_type env ty0)))
end
)
in
let eliminate_optional_arg () =
may_warn funct.exp_loc
(Warnings.Non_principal_labels "eliminated optional argument");
eliminated_optional_arguments :=
(l,ty,lv) :: !eliminated_optional_arguments;
Some (fun () -> option_none env (instance ty) Location.none)
in
let remaining_sargs, arg =
if ignore_labels then begin
(* No reordering is allowed, process arguments in order *)
match sargs with
| [] -> assert false
| (l', sarg) :: remaining_sargs ->
if name = label_name l' || (not optional && l' = Nolabel) then
(remaining_sargs, use_arg sarg l')
else if
optional &&
not (List.exists (fun (l, _) -> name = label_name l)
remaining_sargs) &&
List.exists (function (Nolabel, _) -> true | _ -> false)
sargs
then
(sargs, eliminate_optional_arg ())
else
raise(Error(sarg.pexp_loc, env,
Apply_wrong_label(l', ty_fun', optional)))
end else
(* Arguments can be commuted, try to fetch the argument
corresponding to the first parameter. *)
match extract_label name sargs with
| Some (l', sarg, commuted, remaining_sargs) ->
if commuted then begin
may_warn sarg.pexp_loc
(Warnings.Not_principal "commuting this argument")
end;
if not optional && is_optional l' then
Location.prerr_warning sarg.pexp_loc
(Warnings.Nonoptional_label (Printtyp.string_of_label l));
remaining_sargs, use_arg sarg l'
| None ->
sargs,
if optional && List.mem_assoc Nolabel sargs then
eliminate_optional_arg ()
else begin
(* No argument was given for this parameter, we abstract over
it. *)
may_warn funct.exp_loc
(Warnings.Non_principal_labels "commuted an argument");
omitted_parameters := (l,ty,lv) :: !omitted_parameters;
None
end
in
type_args ((l,arg)::args) ty_fun ty_fun0 remaining_sargs
| _ ->
type_unknown_args ()
in
let is_ignore funct =
is_prim ~name:"%ignore" funct &&
(try ignore (filter_arrow env (instance funct.exp_type) Nolabel); true
with Filter_arrow_failed _ -> false)
in
match sargs with
| (* Special case for ignore: avoid discarding warning *)
[Nolabel, sarg] when is_ignore funct ->
let ty_arg, ty_res = filter_arrow env (instance funct.exp_type) Nolabel in
let exp = type_expect env sarg (mk_expected ty_arg) in
check_partial_application ~statement:false exp;
([Nolabel, Some exp], ty_res)
| _ ->
let ty = funct.exp_type in
type_args [] ty (instance ty) sargs
and type_construct env loc lid sarg ty_expected_explained attrs =
let { ty = ty_expected; explanation } = ty_expected_explained in
let expected_type =
match extract_concrete_variant env ty_expected with
| Variant_type(p0, p,_) ->
Some(p0, p, is_principal ty_expected)
| Maybe_a_variant_type -> None
| Not_a_variant_type ->
let srt = wrong_kind_sort_of_constructor lid.txt in
let ctx = Expression explanation in
let error = Wrong_expected_kind(srt, ctx, ty_expected) in
raise (Error (loc, env, error))
in
let constrs =
Env.lookup_all_constructors ~loc:lid.loc Env.Positive lid.txt env
in
let constr =
wrap_disambiguate "This variant expression is expected to have"
ty_expected_explained
(Constructor.disambiguate Env.Positive lid env expected_type) constrs
in
let sargs =
match sarg with
None -> []
| Some {pexp_desc = Pexp_tuple sel} when
constr.cstr_arity > 1 || Builtin_attributes.explicit_arity attrs
-> sel
| Some se -> [se] in
if List.length sargs <> constr.cstr_arity then
raise(Error(loc, env, Constructor_arity_mismatch
(lid.txt, constr.cstr_arity, List.length sargs)));
let separate = !Clflags.principal || Env.has_local_constraints env in
if separate then (begin_def (); begin_def ());
let (ty_args, ty_res, _) =
instance_constructor Keep_existentials_flexible constr
in
let texp =
re {
exp_desc = Texp_construct(lid, constr, []);
exp_loc = loc; exp_extra = [];
exp_type = ty_res;
exp_attributes = attrs;
exp_env = env } in
if separate then begin
end_def ();
generalize_structure ty_res;
with_explanation explanation (fun () ->
unify_exp env {texp with exp_type = instance ty_res}
(instance ty_expected));
end_def ();
List.iter generalize_structure ty_args;
generalize_structure ty_res;
end;
let ty_args0, ty_res =
match instance_list (ty_res :: ty_args) with
t :: tl -> tl, t
| _ -> assert false
in
let texp = {texp with exp_type = ty_res} in
if not separate then unify_exp env texp (instance ty_expected);
let recarg =
match constr.cstr_inlined with
| None -> Rejected
| Some _ ->
begin match sargs with
| [{pexp_desc =
Pexp_ident _ |
Pexp_record (_, (Some {pexp_desc = Pexp_ident _}| None))}] ->
Required
| _ ->
raise (Error(loc, env, Inlined_record_expected))
end
in
let args =
List.map2 (fun e (t,t0) -> type_argument ~recarg env e t t0) sargs
(List.combine ty_args ty_args0) in
if constr.cstr_private = Private then
begin match constr.cstr_tag with
| Cstr_extension _ ->
raise(Error(loc, env, Private_constructor (constr, ty_res)))
| Cstr_constant _ | Cstr_block _ | Cstr_unboxed ->
raise (Error(loc, env, Private_type ty_res));
end;
(* NOTE: shouldn't we call "re" on this final expression? -- AF *)
{ texp with
exp_desc = Texp_construct(lid, constr, args) }
(* Typing of statements (expressions whose values are discarded) *)
and type_statement ?explanation env sexp =
begin_def();
let exp = type_exp env sexp in
end_def();
let ty = expand_head env exp.exp_type and tv = newvar() in
if is_Tvar ty && get_level ty > get_level tv then
Location.prerr_warning
(final_subexpression exp).exp_loc
Warnings.Nonreturning_statement;
if !Clflags.strict_sequence then
let expected_ty = instance Predef.type_unit in
with_explanation explanation (fun () ->
unify_exp env exp expected_ty);
exp
else begin
check_partial_application ~statement:true exp;
unify_var env tv ty;
exp
end
and type_unpacks ?(in_function : (Location.t * type_expr) option)
env (unpacks : to_unpack list) sbody expected_ty =
if unpacks = [] then type_expect ?in_function env sbody expected_ty else
let ty = newvar() in
(* remember original level *)
let extended_env, tunpacks =
List.fold_left (fun (env, tunpacks) unpack ->
begin_def ();
let context = Typetexp.narrow () in
let modl, md_shape =
!type_module env
Ast_helper.(
Mod.unpack ~loc:unpack.tu_loc
(Exp.ident ~loc:unpack.tu_name.loc
(mkloc (Longident.Lident unpack.tu_name.txt)
unpack.tu_name.loc)))
in
Mtype.lower_nongen (get_level ty) modl.mod_type;
let pres =
match modl.mod_type with
| Mty_alias _ -> Mp_absent
| _ -> Mp_present
in
let scope = create_scope () in
let md =
{ md_type = modl.mod_type; md_attributes = [];
md_loc = unpack.tu_name.loc;
md_uid = unpack.tu_uid; }
in
let (id, env) =
Env.enter_module_declaration ~scope ~shape:md_shape
unpack.tu_name.txt pres md env
in
Typetexp.widen context;
env, (id, unpack.tu_name, pres, modl) :: tunpacks
) (env, []) unpacks
in
(* ideally, we should catch Expr_type_clash errors
in type_expect triggered by escaping identifiers from the local module
and refine them into Scoping_let_module errors
*)
let body = type_expect ?in_function extended_env sbody expected_ty in
let exp_loc = { body.exp_loc with loc_ghost = true } in
let exp_attributes = [Ast_helper.Attr.mk (mknoloc "#modulepat") (PStr [])] in
List.fold_left (fun body (id, name, pres, modl) ->
(* go back to parent level *)
end_def ();
Ctype.unify_var extended_env ty body.exp_type;
re {
exp_desc = Texp_letmodule(Some id, { name with txt = Some name.txt },
pres, modl, body);
exp_loc;
exp_attributes;
exp_extra = [];
exp_type = ty;
exp_env = env }
) body tunpacks
(* Typing of match cases *)
and type_cases
: type k . k pattern_category ->
?in_function:_ -> _ -> _ -> _ -> _ -> _ -> Parsetree.case list ->
k case list * partial
= fun category ?in_function env
ty_arg ty_res_explained partial_flag loc caselist ->
(* ty_arg is _fully_ generalized *)
let { ty = ty_res; explanation } = ty_res_explained in
let patterns = List.map (fun {pc_lhs=p} -> p) caselist in
let contains_polyvars = List.exists contains_polymorphic_variant patterns in
let erase_either = contains_polyvars && contains_variant_either ty_arg in
let may_contain_gadts = List.exists may_contain_gadts patterns in
let ty_arg =
if (may_contain_gadts || erase_either) && not !Clflags.principal
then correct_levels ty_arg else ty_arg
in
let rec is_var spat =
match spat.ppat_desc with
Ppat_any | Ppat_var _ -> true
| Ppat_alias (spat, _) -> is_var spat
| _ -> false in
let needs_exhaust_check =
match caselist with
[{pc_rhs = {pexp_desc = Pexp_unreachable}}] -> true
| [{pc_lhs}] when is_var pc_lhs -> false
| _ -> true
in
let outer_level = get_current_level () in
let lev =
if may_contain_gadts then begin_def ();
get_current_level ()
in
let take_partial_instance =
if erase_either
then Some false else None
in
begin_def (); (* propagation of the argument *)
let pattern_force = ref [] in
(* Format.printf "@[%i %i@ %a@]@." lev (get_current_level())
Printtyp.raw_type_expr ty_arg; *)
let half_typed_cases =
List.map
(fun ({pc_lhs; pc_guard = _; pc_rhs = _} as case) ->
if !Clflags.principal then begin_def (); (* propagation of pattern *)
begin_def ();
let ty_arg = instance ?partial:take_partial_instance ty_arg in
end_def ();
generalize_structure ty_arg;
let (pat, ext_env, force, pvs, unpacks) =
type_pattern category ~lev env pc_lhs ty_arg
in
pattern_force := force @ !pattern_force;
let pat =
if !Clflags.principal then begin
end_def ();
iter_pattern_variables_type generalize_structure pvs;
{ pat with pat_type = instance pat.pat_type }
end else pat
in
(* Ensure that no ambivalent pattern type escapes its branch *)
check_scope_escape pat.pat_loc env outer_level ty_arg;
{ typed_pat = pat;
pat_type_for_unif = ty_arg;
untyped_case = case;
branch_env = ext_env;
pat_vars = pvs;
unpacks;
contains_gadt = contains_gadt (as_comp_pattern category pat); }
)
caselist in
let patl = List.map (fun { typed_pat; _ } -> typed_pat) half_typed_cases in
let does_contain_gadt =
List.exists (fun { contains_gadt; _ } -> contains_gadt) half_typed_cases
in
let ty_res, do_copy_types =
if does_contain_gadt && not !Clflags.principal then
correct_levels ty_res, Env.make_copy_of_types env
else ty_res, (fun env -> env)
in
(* Unify all cases (delayed to keep it order-free) *)
let ty_arg' = newvar () in
let unify_pats ty =
List.iter (fun { typed_pat = pat; pat_type_for_unif = pat_ty; _ } ->
unify_pat_types pat.pat_loc (ref env) pat_ty ty
) half_typed_cases
in
unify_pats ty_arg';
(* Check for polymorphic variants to close *)
if List.exists has_variants patl then begin
Parmatch.pressure_variants_in_computation_pattern env
(List.map (as_comp_pattern category) patl);
List.iter finalize_variants patl
end;
(* `Contaminating' unifications start here *)
List.iter (fun f -> f()) !pattern_force;
(* Post-processing and generalization *)
if take_partial_instance <> None then unify_pats (instance ty_arg);
List.iter (fun { pat_vars; _ } ->
iter_pattern_variables_type (fun t -> unify_var env (newvar()) t) pat_vars
) half_typed_cases;
end_def ();
generalize ty_arg';
List.iter (fun { pat_vars; _ } ->
iter_pattern_variables_type generalize pat_vars
) half_typed_cases;
(* type bodies *)
let in_function = if List.length caselist = 1 then in_function else None in
let ty_res' = instance ty_res in
if !Clflags.principal then begin_def ();
let cases =
List.map
(fun { typed_pat = pat; branch_env = ext_env; pat_vars = pvs; unpacks;
untyped_case = {pc_lhs = _; pc_guard; pc_rhs};
contains_gadt; _ } ->
let ext_env =
if contains_gadt then
do_copy_types ext_env
else
ext_env
in
let ext_env =
add_pattern_variables ext_env pvs
~check:(fun s -> Warnings.Unused_var_strict s)
~check_as:(fun s -> Warnings.Unused_var s)
in
let unpacks =
List.map (fun (name, loc) ->
{tu_name = name; tu_loc = loc;
tu_uid = Uid.mk ~current_unit:(Env.get_unit_name ())}
) unpacks
in
let ty_expected =
if contains_gadt && not !Clflags.principal then
(* allow propagation from preceding branches *)
correct_levels ty_res
else ty_res in
let guard =
match pc_guard with
| None -> None
| Some scond ->
Some
(type_unpacks ext_env unpacks scond
(mk_expected ~explanation:When_guard Predef.type_bool))
in
let exp =
type_unpacks ?in_function ext_env
unpacks pc_rhs (mk_expected ?explanation ty_expected)
in
{
c_lhs = pat;
c_guard = guard;
c_rhs = {exp with exp_type = ty_res'}
}
)
half_typed_cases
in
if !Clflags.principal then end_def ();
let do_init = may_contain_gadts || needs_exhaust_check in
let ty_arg_check =
if do_init then
(* Hack: use for_saving to copy variables too *)
Subst.type_expr (Subst.for_saving Subst.identity) ty_arg'
else ty_arg'
in
let val_cases, exn_cases =
match category with
| Value -> (cases : value case list), []
| Computation -> split_cases env cases in
if val_cases = [] && exn_cases <> [] then
raise (Error (loc, env, No_value_clauses));
let partial =
if partial_flag then
check_partial ~lev env ty_arg_check loc val_cases
else
Partial
in
let unused_check delayed =
List.iter (fun { typed_pat; branch_env; _ } ->
check_absent_variant branch_env (as_comp_pattern category typed_pat)
) half_typed_cases;
if delayed then (begin_def (); init_def lev);
check_unused ~lev env ty_arg_check val_cases ;
check_unused ~lev env Predef.type_exn exn_cases ;
if delayed then end_def ();
Parmatch.check_ambiguous_bindings val_cases ;
Parmatch.check_ambiguous_bindings exn_cases
in
if contains_polyvars then
add_delayed_check (fun () -> unused_check true)
else
(* Check for unused cases, do not delay because of gadts *)
unused_check false;
if may_contain_gadts then begin
end_def ();
(* Ensure that existential types do not escape *)
unify_exp_types loc env ty_res' (newvar ()) ;
end;
cases, partial
(* Typing of let bindings *)
and type_let
?(check = fun s -> Warnings.Unused_var s)
?(check_strict = fun s -> Warnings.Unused_var_strict s)
existential_context
env rec_flag spat_sexp_list allow =
let open Ast_helper in
begin_def();
if !Clflags.principal then begin_def ();
let is_fake_let =
match spat_sexp_list with
| [{pvb_expr={pexp_desc=Pexp_match(
{pexp_desc=Pexp_ident({ txt = Longident.Lident "*opt*"})},_)}}] ->
true (* the fake let-declaration introduced by fun ?(x = e) -> ... *)
| _ ->
false
in
let check = if is_fake_let then check_strict else check in
let spatl =
List.map
(fun {pvb_pat=spat; pvb_expr=sexp; pvb_attributes=attrs} ->
attrs,
match spat.ppat_desc, sexp.pexp_desc with
(Ppat_any | Ppat_constraint _), _ -> spat
| _, Pexp_coerce (_, _, sty)
| _, Pexp_constraint (_, sty) when !Clflags.principal ->
(* propagate type annotation to pattern,
to allow it to be generalized in -principal mode *)
Pat.constraint_
~loc:{spat.ppat_loc with Location.loc_ghost=true}
spat
sty
| _ -> spat)
spat_sexp_list in
let nvs = List.map (fun _ -> newvar ()) spatl in
let (pat_list, new_env, force, pvs, unpacks) =
type_pattern_list Value existential_context env spatl nvs allow in
let attrs_list = List.map fst spatl in
let is_recursive = (rec_flag = Recursive) in
(* If recursive, first unify with an approximation of the expression *)
if is_recursive then
List.iter2
(fun pat binding ->
let pat =
match get_desc pat.pat_type with
| Tpoly (ty, tl) ->
{pat with pat_type =
snd (instance_poly ~keep_names:true false tl ty)}
| _ -> pat
in unify_pat (ref env) pat (type_approx env binding.pvb_expr))
pat_list spat_sexp_list;
(* Polymorphic variant processing *)
List.iter
(fun pat ->
if has_variants pat then begin
Parmatch.pressure_variants env [pat];
finalize_variants pat
end)
pat_list;
(* Generalize the structure *)
let pat_list =
if !Clflags.principal then begin
end_def ();
iter_pattern_variables_type generalize_structure pvs;
List.map (fun pat ->
generalize_structure pat.pat_type;
{pat with pat_type = instance pat.pat_type}
) pat_list
end else
pat_list
in
(* Only bind pattern variables after generalizing *)
List.iter (fun f -> f()) force;
let sexp_is_fun { pvb_expr = sexp; _ } =
match sexp.pexp_desc with
| Pexp_fun _ | Pexp_function _ -> true
| _ -> false
in
let exp_env =
if is_recursive then new_env
else if List.for_all sexp_is_fun spat_sexp_list
then begin
(* Add ghost bindings to help detecting missing "rec" keywords.
We only add those if the body of the definition is obviously a
function. The rationale is that, in other cases, the hint is probably
wrong (and the user is using "advanced features" anyway (lazy,
recursive values...)).
[pvb_loc] (below) is the location of the first let-binding (in case of
a let .. and ..), and is where the missing "rec" hint suggests to add a
"rec" keyword. *)
match spat_sexp_list with
| {pvb_loc; _} :: _ -> maybe_add_pattern_variables_ghost pvb_loc env pvs
| _ -> assert false
end
else env in
let current_slot = ref None in
let rec_needed = ref false in
let warn_about_unused_bindings =
List.exists
(fun attrs ->
Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () ->
Warnings.is_active (check "") || Warnings.is_active (check_strict "")
|| (is_recursive && (Warnings.is_active Warnings.Unused_rec_flag))))
attrs_list
in
let pat_slot_list =
(* Algorithm to detect unused declarations in recursive bindings:
- During type checking of the definitions, we capture the 'value_used'
events on the bound identifiers and record them in a slot corresponding
to the current definition (!current_slot).
In effect, this creates a dependency graph between definitions.
- After type checking the definition (!current_slot = None),
when one of the bound identifier is effectively used, we trigger
again all the events recorded in the corresponding slot.
The effect is to traverse the transitive closure of the graph created
in the first step.
We also keep track of whether *all* variables in a given pattern
are unused. If this is the case, for local declarations, the issued
warning is 26, not 27.
*)
List.map2
(fun attrs pat ->
Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () ->
if not warn_about_unused_bindings then pat, None
else
let some_used = ref false in
(* has one of the identifier of this pattern been used? *)
let slot = ref [] in
List.iter
(fun id ->
let vd = Env.find_value (Path.Pident id) new_env in
(* note: Env.find_value does not trigger the value_used
event *)
let name = Ident.name id in
let used = ref false in
if not (name = "" || name.[0] = '_' || name.[0] = '#') then
add_delayed_check
(fun () ->
if not !used then
Location.prerr_warning vd.Types.val_loc
((if !some_used then check_strict else check) name)
);
Env.set_value_used_callback
vd
(fun () ->
match !current_slot with
| Some slot ->
slot := vd.val_uid :: !slot; rec_needed := true
| None ->
List.iter Env.mark_value_used (get_ref slot);
used := true;
some_used := true
)
)
(Typedtree.pat_bound_idents pat);
pat, Some slot
))
attrs_list
pat_list
in
let exp_list =
List.map2
(fun {pvb_expr=sexp; pvb_attributes; _} (pat, slot) ->
if is_recursive then current_slot := slot;
match get_desc pat.pat_type with
| Tpoly (ty, tl) ->
if !Clflags.principal then begin_def ();
let vars, ty' = instance_poly ~keep_names:true true tl ty in
if !Clflags.principal then begin
end_def ();
generalize_structure ty'
end;
let exp =
Builtin_attributes.warning_scope pvb_attributes (fun () ->
if rec_flag = Recursive then
type_unpacks exp_env unpacks sexp (mk_expected ty')
else
type_expect exp_env sexp (mk_expected ty')
)
in
exp, Some vars
| _ ->
let exp =
Builtin_attributes.warning_scope pvb_attributes (fun () ->
if rec_flag = Recursive then
type_unpacks exp_env unpacks sexp (mk_expected pat.pat_type)
else
type_expect exp_env sexp (mk_expected pat.pat_type))
in
exp, None)
spat_sexp_list pat_slot_list in
current_slot := None;
if is_recursive && not !rec_needed then begin
let {pvb_pat; pvb_attributes} = List.hd spat_sexp_list in
(* See PR#6677 *)
Builtin_attributes.warning_scope ~ppwarning:false pvb_attributes
(fun () ->
Location.prerr_warning pvb_pat.ppat_loc Warnings.Unused_rec_flag
)
end;
List.iter2
(fun pat (attrs, exp) ->
Builtin_attributes.warning_scope ~ppwarning:false attrs
(fun () ->
ignore(check_partial env pat.pat_type pat.pat_loc
[case pat exp])
)
)
pat_list
(List.map2 (fun (attrs, _) (e, _) -> attrs, e) spatl exp_list);
let pvs = List.map (fun pv -> { pv with pv_type = instance pv.pv_type}) pvs in
end_def();
List.iter2
(fun pat (exp, _) ->
if maybe_expansive exp then
lower_contravariant env pat.pat_type)
pat_list exp_list;
iter_pattern_variables_type generalize pvs;
List.iter2
(fun pat (exp, vars) ->
match vars with
| None ->
(* We generalize expressions even if they are not bound to a variable
and do not have an expliclit polymorphic type annotation. This is
not needed in general, however those types may be shown by the
interactive toplevel, for example:
{[
let _ = Array.get;;
- : 'a array -> int -> 'a = <fun>
]}
so we do it anyway. *)
generalize exp.exp_type
| Some vars ->
if maybe_expansive exp then
lower_contravariant env exp.exp_type;
generalize_and_check_univars env "definition" exp pat.pat_type vars)
pat_list exp_list;
let l = List.combine pat_list exp_list in
let l =
List.map2
(fun (p, (e, _)) pvb ->
{vb_pat=p; vb_expr=e; vb_attributes=pvb.pvb_attributes;
vb_loc=pvb.pvb_loc;
})
l spat_sexp_list
in
if is_recursive then
List.iter
(fun {vb_pat=pat} -> match pat.pat_desc with
Tpat_var _ -> ()
| Tpat_alias ({pat_desc=Tpat_any}, _, _) -> ()
| _ -> raise(Error(pat.pat_loc, env, Illegal_letrec_pat)))
l;
List.iter (function
| {vb_pat = {pat_desc = Tpat_any; pat_extra; _}; vb_expr; _} ->
if not (List.exists (function (Tpat_constraint _, _, _) -> true
| _ -> false) pat_extra) then
check_partial_application ~statement:false vb_expr
| _ -> ()) l;
(l, new_env, unpacks)
and type_andops env sarg sands expected_ty =
let rec loop env let_sarg rev_sands expected_ty =
match rev_sands with
| [] -> type_expect env let_sarg (mk_expected expected_ty), []
| { pbop_op = sop; pbop_exp = sexp; pbop_loc = loc; _ } :: rest ->
if !Clflags.principal then begin_def ();
let op_path, op_desc = type_binding_op_ident env sop in
let op_type = instance op_desc.val_type in
let ty_arg = newvar () in
let ty_rest = newvar () in
let ty_result = newvar() in
let ty_rest_fun =
newty (Tarrow(Nolabel, ty_arg, ty_result, commu_ok)) in
let ty_op = newty (Tarrow(Nolabel, ty_rest, ty_rest_fun, commu_ok)) in
begin try
unify env op_type ty_op
with Unify err ->
raise(Error(sop.loc, env, Andop_type_clash(sop.txt, err)))
end;
if !Clflags.principal then begin
end_def ();
generalize_structure ty_rest;
generalize_structure ty_arg;
generalize_structure ty_result
end;
let let_arg, rest = loop env let_sarg rest ty_rest in
let exp = type_expect env sexp (mk_expected ty_arg) in
begin try
unify env (instance ty_result) (instance expected_ty)
with Unify err ->
raise(Error(loc, env, Bindings_type_clash(err)))
end;
let andop =
{ bop_op_name = sop;
bop_op_path = op_path;
bop_op_val = op_desc;
bop_op_type = op_type;
bop_exp = exp;
bop_loc = loc }
in
let_arg, andop :: rest
in
let let_arg, rev_ands = loop env sarg (List.rev sands) expected_ty in
let_arg, List.rev rev_ands
(* Typing of toplevel bindings *)
let type_binding env rec_flag spat_sexp_list =
Typetexp.reset_type_variables();
let (pat_exp_list, new_env, _unpacks) =
type_let
~check:(fun s -> Warnings.Unused_value_declaration s)
~check_strict:(fun s -> Warnings.Unused_value_declaration s)
At_toplevel
env rec_flag spat_sexp_list false
in
(pat_exp_list, new_env)
let type_let existential_ctx env rec_flag spat_sexp_list =
let (pat_exp_list, new_env, _unpacks) =
type_let existential_ctx env rec_flag spat_sexp_list false in
(pat_exp_list, new_env)
(* Typing of toplevel expressions *)
let type_expression env sexp =
Typetexp.reset_type_variables();
begin_def();
let exp = type_exp env sexp in
end_def();
if maybe_expansive exp then lower_contravariant env exp.exp_type;
generalize exp.exp_type;
match sexp.pexp_desc with
Pexp_ident lid ->
let loc = sexp.pexp_loc in
(* Special case for keeping type variables when looking-up a variable *)
let (_path, desc) = Env.lookup_value ~use:false ~loc lid.txt env in
{exp with exp_type = desc.val_type}
| _ -> exp
(* Error report *)
let spellcheck ppf unbound_name valid_names =
Misc.did_you_mean ppf (fun () ->
Misc.spellcheck valid_names unbound_name
)
let spellcheck_idents ppf unbound valid_idents =
spellcheck ppf (Ident.name unbound) (List.map Ident.name valid_idents)
open Format
let longident = Printtyp.longident
(* Returns the first diff of the trace *)
let type_clash_of_trace trace =
Errortrace.(explain trace (fun ~prev:_ -> function
| Diff diff -> Some diff
| _ -> None
))
(* Hint on type error on integer literals
To avoid confusion, it is disabled on float literals
and when the expected type is `int` *)
let report_literal_type_constraint expected_type const =
let const_str = match const with
| Const_int n -> Some (Int.to_string n)
| Const_int32 n -> Some (Int32.to_string n)
| Const_int64 n -> Some (Int64.to_string n)
| Const_nativeint n -> Some (Nativeint.to_string n)
| _ -> None
in
let suffix =
if Path.same expected_type Predef.path_int32 then
Some 'l'
else if Path.same expected_type Predef.path_int64 then
Some 'L'
else if Path.same expected_type Predef.path_nativeint then
Some 'n'
else if Path.same expected_type Predef.path_float then
Some '.'
else None
in
match const_str, suffix with
| Some c, Some s -> [ Location.msg "@[Hint: Did you mean `%s%c'?@]" c s ]
| _, _ -> []
let report_literal_type_constraint const = function
| Some tr ->
begin match get_desc Errortrace.(tr.expected.ty) with
Tconstr (typ, [], _) ->
report_literal_type_constraint typ const
| _ -> []
end
| None -> []
let report_expr_type_clash_hints exp diff =
match exp with
| Some (Texp_constant const) -> report_literal_type_constraint const diff
| _ -> []
let report_pattern_type_clash_hints
(type k) (pat : k pattern_desc option) diff =
match pat with
| Some (Tpat_constant const) -> report_literal_type_constraint const diff
| _ -> []
let report_type_expected_explanation expl ppf =
let because expl_str = fprintf ppf "@ because it is in %s" expl_str in
match expl with
| If_conditional ->
because "the condition of an if-statement"
| If_no_else_branch ->
because "the result of a conditional with no else branch"
| While_loop_conditional ->
because "the condition of a while-loop"
| While_loop_body ->
because "the body of a while-loop"
| For_loop_start_index ->
because "a for-loop start index"
| For_loop_stop_index ->
because "a for-loop stop index"
| For_loop_body ->
because "the body of a for-loop"
| Assert_condition ->
because "the condition of an assertion"
| Sequence_left_hand_side ->
because "the left-hand side of a sequence"
| When_guard ->
because "a when-guard"
let report_type_expected_explanation_opt expl ppf =
match expl with
| None -> ()
| Some expl -> report_type_expected_explanation expl ppf
let report_unification_error ~loc ?sub env err
?type_expected_explanation txt1 txt2 =
Location.error_of_printer ~loc ?sub (fun ppf () ->
Printtyp.report_unification_error ppf env err
?type_expected_explanation txt1 txt2
) ()
let report_error ~loc env = function
| Constructor_arity_mismatch(lid, expected, provided) ->
Location.errorf ~loc
"@[The constructor %a@ expects %i argument(s),@ \
but is applied here to %i argument(s)@]"
longident lid expected provided
| Label_mismatch(lid, err) ->
report_unification_error ~loc env err
(function ppf ->
fprintf ppf "The record field %a@ belongs to the type"
longident lid)
(function ppf ->
fprintf ppf "but is mixed here with fields of type")
| Pattern_type_clash (err, pat) ->
let diff = type_clash_of_trace err.trace in
let sub = report_pattern_type_clash_hints pat diff in
report_unification_error ~loc ~sub env err
(function ppf ->
fprintf ppf "This pattern matches values of type")
(function ppf ->
fprintf ppf "but a pattern was expected which matches values of \
type");
| Or_pattern_type_clash (id, err) ->
report_unification_error ~loc env err
(function ppf ->
fprintf ppf "The variable %s on the left-hand side of this \
or-pattern has type" (Ident.name id))
(function ppf ->
fprintf ppf "but on the right-hand side it has type")
| Multiply_bound_variable name ->
Location.errorf ~loc
"Variable %s is bound several times in this matching"
name
| Orpat_vars (id, valid_idents) ->
Location.error_of_printer ~loc (fun ppf () ->
fprintf ppf
"Variable %s must occur on both sides of this | pattern"
(Ident.name id);
spellcheck_idents ppf id valid_idents
) ()
| Expr_type_clash (err, explanation, exp) ->
let diff = type_clash_of_trace err.trace in
let sub = report_expr_type_clash_hints exp diff in
report_unification_error ~loc ~sub env err
~type_expected_explanation:
(report_type_expected_explanation_opt explanation)
(function ppf ->
fprintf ppf "This expression has type")
(function ppf ->
fprintf ppf "but an expression was expected of type");
| Apply_non_function typ ->
begin match get_desc typ with
Tarrow _ ->
Location.errorf ~loc
"@[<v>@[<2>This function has type@ %a@]\
@ @[It is applied to too many arguments;@ %s@]@]"
Printtyp.type_expr typ "maybe you forgot a `;'.";
| _ ->
Location.errorf ~loc "@[<v>@[<2>This expression has type@ %a@]@ %s@]"
Printtyp.type_expr typ
"This is not a function; it cannot be applied."
end
| Apply_wrong_label (l, ty, extra_info) ->
let print_label ppf = function
| Nolabel -> fprintf ppf "without label"
| l -> fprintf ppf "with label %s" (prefixed_label_name l)
in
let extra_info =
if not extra_info then
[]
else
[ Location.msg
"Since OCaml 4.11, optional arguments do not commute when \
-nolabels is given" ]
in
Location.errorf ~loc ~sub:extra_info
"@[<v>@[<2>The function applied to this argument has type@ %a@]@.\
This argument cannot be applied %a@]"
Printtyp.type_expr ty print_label l
| Label_multiply_defined s ->
Location.errorf ~loc "The record field label %s is defined several times"
s
| Label_missing labels ->
let print_labels ppf =
List.iter (fun lbl -> fprintf ppf "@ %s" (Ident.name lbl)) in
Location.errorf ~loc "@[<hov>Some record fields are undefined:%a@]"
print_labels labels
| Label_not_mutable lid ->
Location.errorf ~loc "The record field %a is not mutable" longident lid
| Wrong_name (eorp, ty_expected, { type_path; kind; name; valid_names; }) ->
Location.error_of_printer ~loc (fun ppf () ->
Printtyp.wrap_printing_env ~error:true env (fun () ->
let { ty; explanation } = ty_expected in
if Path.is_constructor_typath type_path then begin
fprintf ppf
"@[The field %s is not part of the record \
argument for the %a constructor@]"
name.txt
Printtyp.type_path type_path;
end else begin
fprintf ppf
"@[@[<2>%s type@ %a%t@]@ \
There is no %s %s within type %a@]"
eorp Printtyp.type_expr ty
(report_type_expected_explanation_opt explanation)
(Datatype_kind.label_name kind)
name.txt (*kind*) Printtyp.type_path type_path;
end;
spellcheck ppf name.txt valid_names
)) ()
| Name_type_mismatch (kind, lid, tp, tpl) ->
let type_name = Datatype_kind.type_name kind in
let name = Datatype_kind.label_name kind in
Location.error_of_printer ~loc (fun ppf () ->
Printtyp.report_ambiguous_type_error ppf env tp tpl
(function ppf ->
fprintf ppf "The %s %a@ belongs to the %s type"
name longident lid type_name)
(function ppf ->
fprintf ppf "The %s %a@ belongs to one of the following %s types:"
name longident lid type_name)
(function ppf ->
fprintf ppf "but a %s was expected belonging to the %s type"
name type_name)
) ()
| Invalid_format msg ->
Location.errorf ~loc "%s" msg
| Not_an_object (ty, explanation) ->
Location.error_of_printer ~loc (fun ppf () ->
fprintf ppf "This expression is not an object;@ \
it has type %a"
Printtyp.type_expr ty;
report_type_expected_explanation_opt explanation ppf
) ()
| Undefined_method (ty, me, valid_methods) ->
Location.error_of_printer ~loc (fun ppf () ->
Printtyp.wrap_printing_env ~error:true env (fun () ->
fprintf ppf
"@[<v>@[This expression has type@;<1 2>%a@]@,\
It has no method %s@]" Printtyp.type_expr ty me;
begin match valid_methods with
| None -> ()
| Some valid_methods -> spellcheck ppf me valid_methods
end
)) ()
| Undefined_self_method (me, valid_methods) ->
Location.error_of_printer ~loc (fun ppf () ->
fprintf ppf "This expression has no method %s" me;
spellcheck ppf me valid_methods;
) ()
| Virtual_class cl ->
Location.errorf ~loc "Cannot instantiate the virtual class %a"
longident cl
| Unbound_instance_variable (var, valid_vars) ->
Location.error_of_printer ~loc (fun ppf () ->
fprintf ppf "Unbound instance variable %s" var;
spellcheck ppf var valid_vars;
) ()
| Instance_variable_not_mutable v ->
Location.errorf ~loc "The instance variable %s is not mutable" v
| Not_subtype err ->
Location.error_of_printer ~loc (fun ppf () ->
Printtyp.Subtype.report_error ppf env err "is not a subtype of"
) ()
| Outside_class ->
Location.errorf ~loc
"This object duplication occurs outside a method definition"
| Value_multiply_overridden v ->
Location.errorf ~loc
"The instance variable %s is overridden several times"
v
| Coercion_failure (ty_exp, err, b) ->
Location.error_of_printer ~loc (fun ppf () ->
Printtyp.report_unification_error ppf env err
(function ppf ->
let ty_exp = Printtyp.prepare_expansion ty_exp in
fprintf ppf "This expression cannot be coerced to type@;<1 2>%a;@ \
it has type"
(Printtyp.type_expansion Type) ty_exp)
(function ppf ->
fprintf ppf "but is here used with type");
if b then
fprintf ppf ".@.@[<hov>%s@ %s@ %s@]"
"This simple coercion was not fully general."
"Hint: Consider using a fully explicit coercion"
"of the form: `(foo : ty1 :> ty2)'."
) ()
| Not_a_function (ty, explanation) ->
Location.errorf ~loc
"This expression should not be a function,@ \
the expected type is@ %a%t"
Printtyp.type_expr ty
(report_type_expected_explanation_opt explanation)
| Too_many_arguments (ty, explanation) ->
Location.errorf ~loc
"This function expects too many arguments,@ \
it should have type@ %a%t"
Printtyp.type_expr ty
(report_type_expected_explanation_opt explanation)
| Abstract_wrong_label {got; expected; expected_type; explanation} ->
let label ~long = function
| Nolabel -> "unlabeled"
| l -> (if long then "labeled " else "") ^ prefixed_label_name l
in
let second_long = match got, expected with
| Nolabel, _ | _, Nolabel -> true
| _ -> false
in
Location.errorf ~loc
"@[<v>@[<2>This function should have type@ %a%t@]@,\
@[but its first argument is %s@ instead of %s%s@]@]"
Printtyp.type_expr expected_type
(report_type_expected_explanation_opt explanation)
(label ~long:true got)
(if second_long then "being " else "")
(label ~long:second_long expected)
| Scoping_let_module(id, ty) ->
Location.errorf ~loc
"This `let module' expression has type@ %a@ \
In this type, the locally bound module name %s escapes its scope"
Printtyp.type_expr ty id
| Private_type ty ->
Location.errorf ~loc "Cannot create values of the private type %a"
Printtyp.type_expr ty
| Private_label (lid, ty) ->
Location.errorf ~loc "Cannot assign field %a of the private type %a"
longident lid Printtyp.type_expr ty
| Private_constructor (constr, ty) ->
Location.errorf ~loc
"Cannot use private constructor %s to create values of type %a"
constr.cstr_name Printtyp.type_expr ty
| Not_a_polymorphic_variant_type lid ->
Location.errorf ~loc "The type %a@ is not a variant type" longident lid
| Incoherent_label_order ->
Location.errorf ~loc
"This function is applied to arguments@ \
in an order different from other calls.@ \
This is only allowed when the real type is known."
| Less_general (kind, err) ->
report_unification_error ~loc env err
(fun ppf -> fprintf ppf "This %s has type" kind)
(fun ppf -> fprintf ppf "which is less general than")
| Modules_not_allowed ->
Location.errorf ~loc "Modules are not allowed in this pattern."
| Cannot_infer_signature ->
Location.errorf ~loc
"The signature for this packaged module couldn't be inferred."
| Not_a_packed_module ty ->
Location.errorf ~loc
"This expression is packed module, but the expected type is@ %a"
Printtyp.type_expr ty
| Unexpected_existential (reason, name, types) ->
let reason_str =
match reason with
| In_class_args ->
"Existential types are not allowed in class arguments"
| In_class_def ->
"Existential types are not allowed in bindings inside \
class definition"
| In_self_pattern ->
"Existential types are not allowed in self patterns"
| At_toplevel ->
"Existential types are not allowed in toplevel bindings"
| In_group ->
"Existential types are not allowed in \"let ... and ...\" bindings"
| In_rec ->
"Existential types are not allowed in recursive bindings"
| With_attributes ->
"Existential types are not allowed in presence of attributes"
in
begin match List.find (fun ty -> ty <> "$" ^ name) types with
| example ->
Location.errorf ~loc
"%s,@ but this pattern introduces the existential type %s."
reason_str example
| exception Not_found ->
Location.errorf ~loc
"%s,@ but the constructor %s introduces existential types."
reason_str name
end
| Invalid_interval ->
Location.errorf ~loc
"@[Only character intervals are supported in patterns.@]"
| Invalid_for_loop_index ->
Location.errorf ~loc
"@[Invalid for-loop index: only variables and _ are allowed.@]"
| No_value_clauses ->
Location.errorf ~loc
"None of the patterns in this 'match' expression match values."
| Exception_pattern_disallowed ->
Location.errorf ~loc
"@[Exception patterns are not allowed in this position.@]"
| Mixed_value_and_exception_patterns_under_guard ->
Location.errorf ~loc
"@[Mixing value and exception patterns under when-guards is not \
supported.@]"
| Inlined_record_escape ->
Location.errorf ~loc
"@[This form is not allowed as the type of the inlined record could \
escape.@]"
| Inlined_record_expected ->
Location.errorf ~loc
"@[This constructor expects an inlined record argument.@]"
| Unrefuted_pattern pat ->
Location.errorf ~loc
"@[%s@ %s@ %a@]"
"This match case could not be refuted."
"Here is an example of a value that would reach it:"
Printpat.top_pretty pat
| Invalid_extension_constructor_payload ->
Location.errorf ~loc
"Invalid [%%extension_constructor] payload, a constructor is expected."
| Not_an_extension_constructor ->
Location.errorf ~loc
"This constructor is not an extension constructor."
| Literal_overflow ty ->
Location.errorf ~loc
"Integer literal exceeds the range of representable integers of type %s"
ty
| Unknown_literal (n, m) ->
Location.errorf ~loc "Unknown modifier '%c' for literal %s%c" m n m
| Illegal_letrec_pat ->
Location.errorf ~loc
"Only variables are allowed as left-hand side of `let rec'"
| Illegal_letrec_expr ->
Location.errorf ~loc
"This kind of expression is not allowed as right-hand side of `let rec'"
| Illegal_class_expr ->
Location.errorf ~loc
"This kind of recursive class expression is not allowed"
| Letop_type_clash(name, err) ->
report_unification_error ~loc env err
(function ppf ->
fprintf ppf "The operator %s has type" name)
(function ppf ->
fprintf ppf "but it was expected to have type")
| Andop_type_clash(name, err) ->
report_unification_error ~loc env err
(function ppf ->
fprintf ppf "The operator %s has type" name)
(function ppf ->
fprintf ppf "but it was expected to have type")
| Bindings_type_clash(err) ->
report_unification_error ~loc env err
(function ppf ->
fprintf ppf "These bindings have type")
(function ppf ->
fprintf ppf "but bindings were expected of type")
| Unbound_existential (ids, ty) ->
Location.errorf ~loc
"@[<2>%s:@ @[type %s.@ %a@]@]"
"This type does not bind all existentials in the constructor"
(String.concat " " (List.map Ident.name ids))
Printtyp.type_expr ty
| Missing_type_constraint ->
Location.errorf ~loc
"@[%s@ %s@]"
"Existential types introduced in a constructor pattern"
"must be bound by a type constraint on the argument."
| Wrong_expected_kind(sort, ctx, ty) ->
let ctx, explanation =
match ctx with
| Expression explanation -> "expression", explanation
| Pattern -> "pattern", None
in
let sort =
match sort with
| Constructor -> "constructor"
| Boolean -> "boolean literal"
| List -> "list literal"
| Unit -> "unit literal"
| Record -> "record"
in
Location.errorf ~loc
"This %s should not be a %s,@ \
the expected type is@ %a%t"
ctx sort Printtyp.type_expr ty
(report_type_expected_explanation_opt explanation)
| Expr_not_a_record_type ty ->
Location.errorf ~loc
"This expression has type %a@ \
which is not a record type."
Printtyp.type_expr ty
let report_error ~loc env err =
Printtyp.wrap_printing_env ~error:true env
(fun () -> report_error ~loc env err)
let () =
Location.register_error_of_exn
(function
| Error (loc, env, err) ->
Some (report_error ~loc env err)
| Error_forward err ->
Some err
| _ ->
None
)
let () =
Persistent_env.add_delayed_check_forward := add_delayed_check;
Env.add_delayed_check_forward := add_delayed_check;
()
(* drop ?recarg argument from the external API *)
let type_expect ?in_function env e ty = type_expect ?in_function env e ty
let type_exp env e = type_exp env e
let type_argument env e t1 t2 = type_argument env e t1 t2
| (**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *)
(* *)
(* Copyright 1996 Institut National de Recherche en Informatique et *)
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
(* the GNU Lesser General Public License version 2.1, with the *)
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
|
time.ml |
type tz_offset_s = int
let tz_offset_s_utc = 0
type weekday =
[ `Sun
| `Mon
| `Tue
| `Wed
| `Thu
| `Fri
| `Sat
]
type month =
[ `Jan
| `Feb
| `Mar
| `Apr
| `May
| `Jun
| `Jul
| `Aug
| `Sep
| `Oct
| `Nov
| `Dec
]
type weekday_range = weekday Range.range
type month_day_range = int Range.range
type day_range =
| Weekday_range of weekday_range
| Month_day_range of month_day_range
let first_mday = 1
let tm_year_offset = 1900
module Int64_multipliers = struct
let minute_to_seconds = 60L
let hour_to_seconds = Int64.mul 60L minute_to_seconds
let day_to_seconds = Int64.mul 24L hour_to_seconds
end
module Float_multipliers = struct
let minute_to_seconds = Int64.to_float Int64_multipliers.minute_to_seconds
let hour_to_seconds = Int64.to_float Int64_multipliers.hour_to_seconds
let day_to_seconds = Int64.to_float Int64_multipliers.day_to_seconds
end
let resolve_current_tz_offset_s (x : tz_offset_s option) : tz_offset_s =
Option.value ~default:0 x
let next_weekday (wday : weekday) : weekday =
match wday with
| `Sun -> `Mon
| `Mon -> `Tue
| `Tue -> `Wed
| `Wed -> `Thu
| `Thu -> `Fri
| `Fri -> `Sat
| `Sat -> `Sun
let tm_int_of_weekday (wday : weekday) : int =
match wday with
| `Sun -> 0
| `Mon -> 1
| `Tue -> 2
| `Wed -> 3
| `Thu -> 4
| `Fri -> 5
| `Sat -> 6
let weekday_of_tm_int (x : int) : (weekday, unit) result =
match x with
| 0 -> Ok `Sun
| 1 -> Ok `Mon
| 2 -> Ok `Tue
| 3 -> Ok `Wed
| 4 -> Ok `Thu
| 5 -> Ok `Fri
| 6 -> Ok `Sat
| _ -> Error ()
let tm_int_of_month (month : month) : int =
match month with
| `Jan -> 0
| `Feb -> 1
| `Mar -> 2
| `Apr -> 3
| `May -> 4
| `Jun -> 5
| `Jul -> 6
| `Aug -> 7
| `Sep -> 8
| `Oct -> 9
| `Nov -> 10
| `Dec -> 11
let month_of_tm_int (x : int) : (month, unit) result =
match x with
| 0 -> Ok `Jan
| 1 -> Ok `Feb
| 2 -> Ok `Mar
| 3 -> Ok `Apr
| 4 -> Ok `May
| 5 -> Ok `Jun
| 6 -> Ok `Jul
| 7 -> Ok `Aug
| 8 -> Ok `Sep
| 9 -> Ok `Oct
| 10 -> Ok `Nov
| 11 -> Ok `Dec
| _ -> Error ()
let human_int_of_month (month : month) : int = tm_int_of_month month + 1
let month_of_human_int (x : int) : (month, unit) result = month_of_tm_int (x - 1)
let compare_month (m1 : month) (m2 : month) : int =
compare (tm_int_of_month m1) (tm_int_of_month m2)
let month_lt m1 m2 = tm_int_of_month m1 < tm_int_of_month m2
let month_le m1 m2 = tm_int_of_month m1 <= tm_int_of_month m2
let month_gt m1 m2 = tm_int_of_month m1 > tm_int_of_month m2
let month_ge m1 m2 = tm_int_of_month m1 >= tm_int_of_month m2
let compare_weekday (d1 : weekday) (d2 : weekday) : int =
compare (tm_int_of_weekday d1) (tm_int_of_weekday d2)
let weekday_lt d1 d2 = tm_int_of_weekday d1 < tm_int_of_weekday d2
let weekday_le d1 d2 = tm_int_of_weekday d1 <= tm_int_of_weekday d2
let weekday_gt d1 d2 = tm_int_of_weekday d1 > tm_int_of_weekday d2
let weekday_ge d1 d2 = tm_int_of_weekday d1 >= tm_int_of_weekday d2
let zero_tm_sec tm = Unix.{ tm with tm_sec = 0 }
(* let tm_of_date_time (x : date_time) : Unix.tm =
{
tm_sec = x.second;
tm_min = x.minute;
tm_hour = x.hour;
tm_mday = x.day;
tm_mon = tm_int_of_month x.month;
tm_year = x.year;
tm_wday = 0;
tm_yday = 0;
tm_isdst = false;
} *)
(* let tm_of_unix_second ~(time_zone_of_tm : time_zone) (time : int64) : (Unix.tm, unit) result =
let time = Int64.to_float time in
match time_zone_of_tm with
| `Local -> Ok (Unix.localtime time)
| `UTC -> Ok (Unix.gmtime time)
| `UTC_plus_sec tz_offset_s ->
match Ptime.of_float_s time with
| None -> Error ()
| Ok x ->
x
|> Ptime.to_date_time ~tz_offset_s
|> date_time_of_ptime_date_time
let date_time = Ptime.of_float_s time in
CalendarLib.Calendar.convert date_time CalendarLib.Time_Zone.UTC
CalendarLib.Time_Zone.(UTC_Plus x)
|> CalendarLib.Calendar.to_unixtm
let unix_second_of_tm ~(time_zone_of_tm : time_zone) (tm : Unix.tm) : int64 =
tm
|> (fun x ->
match time_zone_of_tm with
| `Local ->
let time, _ = Unix.mktime tm in
time
| `UTC ->
x
|> CalendarLib.Calendar.from_unixtm
|> CalendarLib.Calendar.from_gmt
|> CalendarLib.Calendar.to_unixfloat
| `UTC_plus _ ->
let date_time = CalendarLib.Calendar.from_unixtm tm in
let tz = cal_time_zone_of_time_zone time_zone_of_tm in
CalendarLib.Calendar.convert date_time tz CalendarLib.Time_Zone.UTC
|> CalendarLib.Calendar.to_unixfloat)
|> fun time -> time |> Int64.of_float *)
(* let normalize_tm tm =
tm
|> zero_tm_sec
|> CalendarLib.Calendar.from_unixtm
|> CalendarLib.Calendar.to_unixtm
let tm_change_time_zone ~(from_time_zone : time_zone)
~(to_time_zone : time_zone) (tm : Unix.tm) : Unix.tm =
if from_time_zone = to_time_zone then tm
else
let time = unix_second_of_tm ~time_zone_of_tm:from_time_zone tm in
tm_of_unix_second ~time_zone_of_tm:to_time_zone time *)
let is_leap_year ~year =
assert (year >= 0);
let divisible_by_4 = year mod 4 = 0 in
let divisible_by_100 = year mod 100 = 0 in
let divisible_by_400 = year mod 400 = 0 in
divisible_by_4 && ((not divisible_by_100) || divisible_by_400)
let day_count_of_year ~year = if is_leap_year ~year then 366 else 365
let day_count_of_month ~year ~(month : month) =
match month with
| `Jan -> 31
| `Feb -> if is_leap_year ~year then 29 else 28
| `Mar -> 31
| `Apr -> 30
| `May -> 31
| `Jun -> 30
| `Jul -> 31
| `Aug -> 31
| `Sep -> 30
| `Oct -> 31
| `Nov -> 30
| `Dec -> 31
let weekday_of_month_day ~(year : int) ~(month : month) ~(mday : int) :
(weekday, unit) result =
match Ptime.(of_date (year, human_int_of_month month, mday)) with
| None -> Error ()
| Some wday -> Ok (Ptime.weekday wday)
(* let local_tm_to_utc_tm (tm : Unix.tm) : Unix.tm =
let timestamp, _ = Unix.mktime tm in
Unix.gmtime timestamp *)
module Second_ranges = Ranges_small.Make (struct
type t = int
let modulo = None
let to_int x = x
let of_int x = x
end)
module Minute_ranges = Ranges_small.Make (struct
type t = int
let modulo = None
let to_int x = x
let of_int x = x
end)
module Hour_ranges = Ranges_small.Make (struct
type t = int
let modulo = None
let to_int x = x
let of_int x = x
end)
module Weekday_tm_int_ranges = Ranges_small.Make (struct
type t = int
let modulo = Some 7
let to_int x = x
let of_int x = x
end)
module Weekday_ranges = Ranges_small.Make (struct
type t = weekday
let modulo = Some 7
let to_int = tm_int_of_weekday
let of_int x = x |> weekday_of_tm_int |> Result.get_ok
end)
module Month_day_ranges = Ranges_small.Make (struct
type t = int
let modulo = None
let to_int x = x
let of_int x = x
end)
module Month_tm_int_ranges = Ranges_small.Make (struct
type t = int
let modulo = None
let to_int x = x
let of_int x = x
end)
module Month_ranges = Ranges_small.Make (struct
type t = month
let modulo = None
let to_int = human_int_of_month
let of_int x = x |> month_of_human_int |> Result.get_ok
end)
module Year_ranges = Ranges_small.Make (struct
type t = int
let modulo = None
let to_int x = x
let of_int x = x
end)
module Date_time = struct
type t = {
year : int;
month : month;
day : int;
hour : int;
minute : int;
second : int;
tz_offset_s : int;
}
let to_ptime_date_time (x : t) : Ptime.date * Ptime.time =
( (x.year, human_int_of_month x.month, x.day),
((x.hour, x.minute, x.second), x.tz_offset_s) )
let of_ptime_date_time
(((year, month, day), ((hour, minute, second), tz_offset_s)) :
Ptime.date * Ptime.time) : (t, unit) result =
match month_of_human_int month with
| Ok month -> Ok { year; month; day; hour; minute; second; tz_offset_s }
| Error () -> Error ()
let to_unix_second (x : t) : (int64, unit) result =
match Ptime.of_date_time (to_ptime_date_time x) with
| None -> Error ()
| Some x -> x |> Ptime.to_float_s |> Int64.of_float |> Result.ok
let of_unix_second ~(tz_offset_s_of_date_time : tz_offset_s option)
(x : int64) : (t, unit) result =
match Ptime.of_float_s (Int64.to_float x) with
| None -> Error ()
| Some x ->
let tz_offset_s =
resolve_current_tz_offset_s tz_offset_s_of_date_time
in
x |> Ptime.to_date_time ~tz_offset_s |> of_ptime_date_time
let min =
Ptime.min |> Ptime.to_date_time |> of_ptime_date_time |> Result.get_ok
let max =
Ptime.max |> Ptime.to_date_time |> of_ptime_date_time |> Result.get_ok
let compare (x : t) (y : t) : int =
match compare x.year y.year with
| 0 -> (
match
compare (human_int_of_month x.month) (human_int_of_month y.month)
with
| 0 -> (
match compare x.day y.day with
| 0 -> (
match compare x.hour y.hour with
| 0 -> (
match compare x.minute y.minute with
| 0 -> compare x.second y.second
| n -> n )
| n -> n )
| n -> n )
| n -> n )
| n -> n
let set_to_first_sec (x : t) : t = { x with second = 0 }
let set_to_last_sec (x : t) : t = { x with second = 59 }
let set_to_first_min_sec (x : t) : t =
{ x with minute = 0 } |> set_to_first_sec
let set_to_last_min_sec (x : t) : t =
{ x with minute = 59 } |> set_to_last_sec
let set_to_first_hour_min_sec (x : t) : t =
{ x with hour = 0 } |> set_to_first_min_sec
let set_to_last_hour_min_sec (x : t) : t =
{ x with hour = 23 } |> set_to_last_min_sec
let set_to_first_day_hour_min_sec (x : t) : t =
{ x with day = 1 } |> set_to_first_hour_min_sec
let set_to_last_day_hour_min_sec (x : t) : t =
{ x with day = day_count_of_month ~year:x.year ~month:x.month }
|> set_to_last_hour_min_sec
let set_to_first_month_day_hour_min_sec (x : t) : t =
{ x with month = `Jan } |> set_to_first_day_hour_min_sec
let set_to_last_month_day_hour_min_sec (x : t) : t =
{ x with month = `Dec } |> set_to_last_day_hour_min_sec
end
module Check = struct
let unix_second_is_valid (x : int64) : bool =
match Date_time.of_unix_second ~tz_offset_s_of_date_time:None x with
| Ok _ -> true
| Error () -> false
let second_is_valid ~(second : int) : bool = 0 <= second && second < 60
let minute_second_is_valid ~(minute : int) ~(second : int) : bool =
0 <= minute && minute < 60 && second_is_valid ~second
let hour_minute_second_is_valid ~(hour : int) ~(minute : int) ~(second : int)
: bool =
(0 <= hour && hour < 24) && minute_second_is_valid ~minute ~second
let date_time_is_valid (x : Date_time.t) : bool =
match Date_time.to_unix_second x with Ok _ -> true | Error () -> false
end
let next_hour_minute ~(hour : int) ~(minute : int) : (int * int, unit) result =
if Check.hour_minute_second_is_valid ~hour ~minute ~second:0 then
if minute < 59 then Ok (hour, succ minute) else Ok (succ hour mod 24, 0)
else Error ()
module Current = struct
let cur_unix_second () : int64 = Unix.time () |> Int64.of_float
let cur_date_time ~tz_offset_s_of_date_time : (Date_time.t, unit) result =
cur_unix_second () |> Date_time.of_unix_second ~tz_offset_s_of_date_time
let cur_tm_local () : Unix.tm = Unix.time () |> Unix.localtime
let cur_tm_utc () : Unix.tm = Unix.time () |> Unix.gmtime
end
module Of_string = struct
let weekdays : (string * weekday) list =
[
("sunday", `Sun);
("monday", `Mon);
("tuesday", `Tue);
("wednesday", `Wed);
("thursday", `Thu);
("friday", `Fri);
("saturday", `Sat);
]
let months : (string * month) list =
[
("january", `Jan);
("february", `Feb);
("march", `Mar);
("april", `Apr);
("may", `May);
("june", `Jun);
("july", `Jul);
("august", `Aug);
("september", `Sep);
("october", `Oct);
("november", `Nov);
("december", `Dec);
]
let weekday_of_string (s : string) : (weekday, unit) result =
match Misc_utils.prefix_string_match weekdays s with
| [ (_, x) ] -> Ok x
| _ -> Error ()
let month_of_string (s : string) : (month, unit) result =
match Misc_utils.prefix_string_match months s with
| [ (_, x) ] -> Ok x
| _ -> Error ()
end
module Add = struct
let add_days_unix_second ~(days : int) (x : int64) : int64 =
Int64.add (Int64.mul (Int64.of_int days) Int64_multipliers.day_to_seconds) x
end
module Serialize = struct
let pack_weekday (x : weekday) : Time_t.weekday = x
let pack_month (x : month) : Time_t.month = x
end
module Deserialize = struct
let unpack_weekday (x : Time_t.weekday) : weekday = x
let unpack_month (x : Time_t.month) : month = x
end
module To_string = struct
type case =
| Upper
| Lower
type size_and_casing =
| Abbreviated of case * case * case
| Full of case * case
let map_char_to_case (case : case) (c : char) =
match case with
| Upper -> Char.uppercase_ascii c
| Lower -> Char.lowercase_ascii c
let map_string_to_size_and_casing (x : size_and_casing) (s : string) : string
=
match x with
| Abbreviated (case1, case2, case3) ->
let c1 = map_char_to_case case1 s.[0] in
let c2 = map_char_to_case case2 s.[1] in
let c3 = map_char_to_case case3 s.[2] in
Printf.sprintf "%c%c%c" c1 c2 c3
| Full (case1, case2) ->
String.mapi
(fun i c ->
if i = 0 then map_char_to_case case1 c else map_char_to_case case2 c)
s
let pad_int (c : char option) (x : int) : string =
match c with
| None -> string_of_int x
| Some c -> if x < 10 then Printf.sprintf "%c%d" c x else string_of_int x
let full_string_of_weekday (wday : weekday) : string =
match wday with
| `Sun -> "Sunday"
| `Mon -> "Monday"
| `Tue -> "Tuesday"
| `Wed -> "Wednesday"
| `Thu -> "Thursday"
| `Fri -> "Friday"
| `Sat -> "Saturday"
let abbreviated_string_of_weekday (wday : weekday) : string =
String.sub (full_string_of_weekday wday) 0 3
let full_string_of_month (month : month) : string =
match month with
| `Jan -> "January"
| `Feb -> "February"
| `Mar -> "March"
| `Apr -> "April"
| `May -> "May"
| `Jun -> "June"
| `Jul -> "July"
| `Aug -> "August"
| `Sep -> "September"
| `Oct -> "October"
| `Nov -> "November"
| `Dec -> "December"
let abbreviated_string_of_month (month : month) : string =
String.sub (full_string_of_month month) 0 3
let yyyymondd_hhmmss_string_of_tm (tm : Unix.tm) : (string, unit) result =
match month_of_tm_int tm.tm_mon with
| Ok mon ->
let mon = abbreviated_string_of_month mon in
Ok
(Printf.sprintf "%04d %s %02d %02d:%02d:%02d"
(tm.tm_year + tm_year_offset)
mon tm.tm_mday tm.tm_hour tm.tm_min tm.tm_sec)
| Error () -> Error ()
let yyyymondd_hhmmss_string_of_date_time (x : Date_time.t) : string =
let mon = abbreviated_string_of_month x.month in
Printf.sprintf "%04d %s %02d %02d:%02d:%02d" x.year mon x.day x.hour
x.minute x.second
let yyyymondd_hhmmss_string_of_unix_second
~(display_using_tz_offset_s : tz_offset_s option) (time : int64) :
(string, unit) result =
Date_time.of_unix_second ~tz_offset_s_of_date_time:display_using_tz_offset_s
time
|> Result.map yyyymondd_hhmmss_string_of_date_time
(* let yyyymmdd_hhmmss_string_of_tm (tm : Unix.tm) : (string, unit) result =
match month_of_tm_int tm.tm_mon with
| Ok mon ->
let mon = human_int_of_month mon in
Ok
(Printf.sprintf "%04d-%02d-%02d %02d:%02d:%02d"
(tm.tm_year + tm_year_offset)
mon tm.tm_mday tm.tm_hour tm.tm_min tm.tm_sec)
| Error () -> Error () *)
let yyyymmdd_hhmmss_string_of_date_time (x : Date_time.t) : string =
let mon = human_int_of_month x.month in
Printf.sprintf "%04d-%02d-%02d %02d:%02d:%02d" x.year mon x.day x.hour
x.minute x.second
let yyyymmdd_hhmmss_string_of_unix_second
~(display_using_tz_offset_s : tz_offset_s option) (time : int64) :
(string, unit) result =
Date_time.of_unix_second ~tz_offset_s_of_date_time:display_using_tz_offset_s
time
|> Result.map yyyymmdd_hhmmss_string_of_date_time
(*let yyyymondd_hhmm_string_of_tm (tm : Unix.tm) : (string, unit) result =
match month_of_tm_int tm.tm_mon with
| Ok mon ->
let mon = string_of_month mon in
Ok
(Printf.sprintf "%04d %s %02d %02d:%02d"
(tm.tm_year + tm_year_offset)
mon tm.tm_mday tm.tm_hour tm.tm_min)
| Error () -> Error ()
*)
let yyyymondd_hhmm_string_of_date_time (x : Date_time.t) : string =
let mon = abbreviated_string_of_month x.month in
Printf.sprintf "%04d %s %02d %02d:%02d" x.year mon x.day x.hour x.minute
let yyyymondd_hhmm_string_of_unix_second
~(display_using_tz_offset_s : tz_offset_s option) (time : int64) :
(string, unit) result =
Date_time.of_unix_second ~tz_offset_s_of_date_time:display_using_tz_offset_s
time
|> Result.map yyyymondd_hhmm_string_of_date_time
(* let yyyymmdd_hhmm_string_of_tm (tm : Unix.tm) : (string, unit) result =
match month_of_tm_int tm.tm_mon with
| Ok mon ->
let mon = human_int_of_month mon in
Ok
(Printf.sprintf "%04d-%02d-%02d %02d:%02d"
(tm.tm_year + tm_year_offset)
mon tm.tm_mday tm.tm_hour tm.tm_min)
| Error () -> Error () *)
let yyyymmdd_hhmm_string_of_date_time (x : Date_time.t) : string =
let mon = human_int_of_month x.month in
Printf.sprintf "%04d-%02d-%02d %02d:%02d" x.year mon x.day x.hour x.minute
let yyyymmdd_hhmm_string_of_unix_second
~(display_using_tz_offset_s : tz_offset_s option) (time : int64) :
(string, unit) result =
Date_time.of_unix_second ~tz_offset_s_of_date_time:display_using_tz_offset_s
time
|> Result.map yyyymmdd_hhmm_string_of_date_time
module Format_string_parsers = struct
open MParser
let case : (case, unit) t =
attempt (char 'x' >> return Lower) <|> (char 'X' >> return Upper)
let size_and_casing : (size_and_casing, unit) t =
case
>>= fun c1 ->
case
>>= fun c2 ->
attempt (char '*' >> return (Full (c1, c2)))
<|> (case >>= fun c3 -> return (Abbreviated (c1, c2, c3)))
let padding : (char option, unit) t =
attempt
( satisfy (fun _ -> true)
>>= fun padding -> char 'X' >> return (Some padding) )
<|> (char 'X' >> return None)
let inner (date_time : Date_time.t) : (string, unit) t =
choice
[
attempt (string "year") >> return (string_of_int date_time.year);
( attempt (string "mon:")
>> size_and_casing
>>= fun x ->
return
(map_string_to_size_and_casing x
(full_string_of_month date_time.month)) );
( attempt (string "mday:")
>> padding
>>= fun padding -> return (pad_int padding date_time.day) );
( attempt (string "wday:")
>> size_and_casing
>>= fun x ->
match
weekday_of_month_day ~year:date_time.year ~month:date_time.month
~mday:date_time.day
with
| Error () -> fail "Invalid date time"
| Ok wday ->
return
(map_string_to_size_and_casing x (full_string_of_weekday wday))
);
attempt
( string "hour:"
>> padding
>>= fun padding -> return (pad_int padding date_time.hour) );
attempt
( string "12hour:"
>> padding
>>= fun padding ->
let hour =
if date_time.hour = 0 then 12 else date_time.hour mod 12
in
return (pad_int padding hour) );
attempt
( string "min:"
>> padding
>>= fun padding -> return (pad_int padding date_time.minute) );
attempt
( string "sec:"
>> padding
>>= fun padding -> return (pad_int padding date_time.second) );
( string "unix"
>>
match Date_time.to_unix_second date_time with
| Error () -> fail "Invalid date time"
| Ok sec -> return (Int64.to_string sec) );
]
end
let string_of_date_time ~(format : string) (x : Date_time.t) :
(string, string) result =
let open MParser in
let open Parser_components in
let single (date_time : Date_time.t) : (string, unit) t =
choice
[
attempt (string "{{" >> return "{");
attempt (char '{')
>> Format_string_parsers.inner date_time
<< char '}';
(many1_satisfy (function '{' -> false | _ -> true) |>> fun s -> s);
]
in
let p (date_time : Date_time.t) : (string list, unit) t =
many (single date_time)
in
parse_string (p x << eof) format ()
|> result_of_mparser_result
|> Result.map (fun l -> String.concat "" l)
let string_of_unix_second ~format
~(display_using_tz_offset_s : tz_offset_s option) (time : int64) :
(string, string) result =
match
Date_time.of_unix_second
~tz_offset_s_of_date_time:display_using_tz_offset_s time
with
| Error () -> Error "Invalid unix second"
| Ok dt -> string_of_date_time ~format dt
let string_of_time_slot ~(format : string)
~(display_using_tz_offset_s : tz_offset_s option) ((s, e) : Time_slot.t) :
(string, string) result =
let open MParser in
let open Parser_components in
let single (start_date_time : Date_time.t) (end_date_time : Date_time.t) :
(string, unit) t =
choice
[
attempt (string "{{" >> return "{");
( attempt (char '{')
>> ( attempt (char 's' >> return start_date_time)
<|> (char 'e' >> return end_date_time) )
>>= fun date_time -> Format_string_parsers.inner date_time << char '}'
);
( many1_satisfy (function '{' -> false | _ -> true)
>>= fun s -> return s );
]
in
let p (start_date_time : Date_time.t) (end_date_time : Date_time.t) :
(string list, unit) t =
many (single start_date_time end_date_time)
in
match
Date_time.of_unix_second
~tz_offset_s_of_date_time:display_using_tz_offset_s s
with
| Error () -> Error "Invalid start unix time"
| Ok s -> (
match
Date_time.of_unix_second
~tz_offset_s_of_date_time:display_using_tz_offset_s e
with
| Error () -> Error "Invalid end unix time"
| Ok e ->
parse_string
( p s e
>>= fun s ->
get_pos
>>= fun pos ->
attempt eof
>> return s
<|> fail
(Printf.sprintf "Expected EOI, pos: %s" (string_of_pos pos))
)
format ()
|> result_of_mparser_result
|> Result.map (fun l -> String.concat "" l) )
let debug_string_of_time ?(indent_level = 0) ?(buffer = Buffer.create 4096)
~(display_using_tz_offset_s : tz_offset_s option) (time : int64) : string
=
( match
yyyymondd_hhmmss_string_of_unix_second ~display_using_tz_offset_s time
with
| Error () -> Debug_print.bprintf ~indent_level buffer "Invalid time\n"
| Ok s -> Debug_print.bprintf ~indent_level buffer "%s\n" s );
Buffer.contents buffer
end
module Print = struct
let debug_print_time ?(indent_level = 0)
~(display_using_tz_offset_s : tz_offset_s option) (time : int64) : unit =
print_string
(To_string.debug_string_of_time ~indent_level ~display_using_tz_offset_s
time)
end
module Date_time_set = Set.Make (struct
type t = Date_time.t
let compare = Date_time.compare
end)
| |
test.ml |
let comma =
let open Farfadet in
(fun e () -> string e ","), ()
let memcpy s ~src_off d ~dst_off ~len =
for i = 0 to len - 1
do Bigarray.Array1.set d (dst_off + i) (String.get s (src_off + i)) done
(* XXX(dinosaure): this code is WRONG. It's just a test and we don't care if we
respect the standard. Don't copy/paste. *)
let rec value : Ezjsonm.value Farfadet.t = fun e x ->
let open Farfadet in
let binding e (k, v) = eval e [ char $ '"'; !!string; char $ '"'; char $ ':'; !!value ] k v in
let arr = list ~sep:comma value in
let obj = list ~sep:comma binding in
match x with
| `Bool true -> string e "true"
| `Bool false -> string e "false"
| `Float f -> string e (Format.sprintf "%.16g" f)
| `Null -> string e "null"
| `String s -> eval e [ char $ '"'; !!(whole @@ blitter String.length memcpy); char $ '"' ] s (* just for fun *)
| `A a -> eval e [ char $ '['; !!arr; char $ ']' ] a
| `O o -> eval e [ char $ '{'; !!obj; char $ '}' ] o
let json e x =
let open Farfadet in
let binding e (k, v) = eval e [ char $ '"'; !!string; char $ '"'; char $ ':'; !!value ] k v in
let arr = list ~sep:comma value in
let obj = list ~sep:comma binding in
match x with
| `A a -> eval e [ char $ '['; !!arr; char $ ']' ] a
| `O o -> eval e [ char $ '{'; !!obj; char $ '}' ] o
let tests =
[ `A [ `Bool true ]
; `O [ ("a", `A [ `Bool true; `Bool false]) ]
; `A [ `O [ ("a", `Bool true); ("b", `Bool false) ] ] ]
let ezjsonm =
Alcotest.testable
(fun fmt value -> Format.fprintf fmt "%s" (Ezjsonm.to_string value))
(=)
let make_test value =
Format.sprintf "%s" (Ezjsonm.to_string value),
`Slow,
(fun () ->
let enc = Faraday.create 0x800 in
Farfadet.(eval enc [ !!json ] value);
let str = Faraday.serialize_to_string enc in
let res = Ezjsonm.from_string str in
Alcotest.(check ezjsonm) str value res)
let () =
Alcotest.run "farfadet test"
[ "simple", List.map make_test tests ]
| |
smt2_parser.c |
/*
* Parser for the SMT-LIB 2.0 language
*/
#include <setjmp.h>
#include <inttypes.h>
#include "frontend/smt2/smt2_commands.h"
#include "frontend/smt2/smt2_lexer.h"
#include "frontend/smt2/smt2_parse_tables.h"
#include "frontend/smt2/smt2_parser.h"
#include "frontend/smt2/smt2_term_stack.h"
/*
* Short cuts to save typing
*/
static inline char *tkval(lexer_t *lex) {
return current_token_value(lex);
}
static inline uint32_t tklen(lexer_t *lex) {
return current_token_length(lex);
}
/*
* Marker for the bottom of the state stack
*/
enum {
done = NSTATES,
};
/*
* Read action from the tables in smt2_parse_tables.h
*/
static smt2_action_t get_action(state_t s, smt2_token_t tk) {
int32_t i;
i = base[s] + tk;
if (check[i] == s) {
return value[i];
} else {
return default_value[s];
}
}
/*
* Main parsing procedure
* - start = initial state
* return -1 on error, 0 otherwise
*/
static int32_t smt2_parse(parser_t *parser, state_t start) {
smt2_token_t token;
smt2_keyword_t kw;
parser_state_t state;
parser_stack_t *stack;
lexer_t *lex;
tstack_t *tstack;
int exception;
loc_t loc;
loc_t saved_loc; // used to store location of (as ...
bool keep_tokens;
etk_queue_t *token_queue;
stack = &parser->pstack;
lex = parser->lex;
tstack = parser->tstack;
assert(parser_stack_is_empty(stack));
assert(tstack_is_empty(tstack));
/*
* keep_tokens: when true, all tokens received from the lexer are
* pushed into the SMT2 global token queue. This enables SMT2
* commands to print SMT2 expressions as they appear in the input.
*/
keep_tokens = false;
token_queue = NULL;
// To catch exceptions in term-stack operations
exception = setjmp(tstack->env);
if (exception == 0) {
parser_push_state(stack, done);
state = start;
loop:
// jump here for actions that consume the current token
token = next_smt2_token(lex);
loc.line = current_token_line(lex);
loc.column = current_token_column(lex);
if (keep_tokens) {
assert(token_queue != NULL);
push_smt2_token(token_queue, token, tkval(lex), tklen(lex));
}
skip_token:
// jump here for actions that don't consume the token
switch (get_action(state, token)) {
case next_goto_c1:
state = c1;
goto loop;
case empty_command_return:
tstack_push_op(tstack, SMT2_SILENT_EXIT, &loc);
tstack_eval(tstack);
state = parser_pop_state(stack);
assert(state == done);
goto the_end;
case check_sat_next_goto_r0:
tstack_push_op(tstack, SMT2_CHECK_SAT, &loc);
state = r0;
goto loop;
case get_assertions_next_goto_r0:
tstack_push_op(tstack, SMT2_GET_ASSERTIONS, &loc);
state = r0;
goto loop;
case get_proof_next_goto_r0:
tstack_push_op(tstack, SMT2_GET_PROOF, &loc);
state = r0;
goto loop;
case get_unsat_core_next_goto_r0:
tstack_push_op(tstack, SMT2_GET_UNSAT_CORE, &loc);
state = r0;
goto loop;
case get_assignment_next_goto_r0:
tstack_push_op(tstack, SMT2_GET_ASSIGNMENT, &loc);
state = r0;
goto loop;
case exit_next_goto_r0:
tstack_push_op(tstack, SMT2_EXIT, &loc);
state = r0;
goto loop;
case push_next_goto_c3:
tstack_push_op(tstack, SMT2_PUSH, &loc);
state = c3;
goto loop;
case pop_next_goto_c3:
tstack_push_op(tstack, SMT2_POP, &loc);
state = c3;
goto loop;
case get_option_next_goto_c4:
tstack_push_op(tstack, SMT2_GET_OPTION, &loc);
state = c4;
goto loop;
case get_info_next_goto_c4:
tstack_push_op(tstack, SMT2_GET_INFO, &loc);
state = c4;
goto loop;
case set_logic_next_goto_c5:
tstack_push_op(tstack, SMT2_SET_LOGIC, &loc);
state = c5;
goto loop;
case set_option_next_goto_c6:
tstack_push_op(tstack, SMT2_SET_OPTION, &loc);
state = c6;
goto loop;
case set_info_next_goto_c6:
tstack_push_op(tstack, SMT2_SET_INFO, &loc);
state = c6;
goto loop;
case assert_next_push_r0_goto_t0:
tstack_push_op(tstack, SMT2_ASSERT, &loc);
parser_push_state(stack, r0);
state = t0;
goto loop;
case declare_sort_next_goto_c8:
tstack_push_op(tstack, SMT2_DECLARE_SORT, &loc);
state = c8;
goto loop;
case define_sort_next_goto_c9:
tstack_push_op(tstack, SMT2_DEFINE_SORT, &loc);
state = c9;
goto loop;
case declare_fun_next_goto_c10:
tstack_push_op(tstack, SMT2_DECLARE_FUN, &loc);
state = c10;
goto loop;
case define_fun_next_goto_c11:
tstack_push_op(tstack, SMT2_DEFINE_FUN, &loc);
state = c11;
goto loop;
case get_value_next_goto_c12:
/*
* Activate the keep_tokens hack here
* We push the two tokens '(' 'get-value' into the token_queue
*/
keep_tokens = true;
token_queue = smt2_token_queue();
push_smt2_token(token_queue, SMT2_TK_LP, NULL, 0);
push_smt2_token(token_queue, token, tkval(lex), tklen(lex));
// now proceed as normal: push the command
tstack_push_op(tstack, SMT2_GET_VALUE, &loc);
state = c12;
goto loop;
case get_model_next_goto_r0:
tstack_push_op(tstack, SMT2_GET_MODEL, &loc);
state = r0;
goto loop;
case echo_next_goto_c13:
tstack_push_op(tstack, SMT2_ECHO, &loc);
state = c13;
goto loop;
case reset_next_goto_r0:
tstack_push_op(tstack, SMT2_RESET, &loc);
state = r0;
goto loop;
case numeral_next_goto_r0:
tstack_push_rational(tstack, tkval(lex), &loc);
state = r0;
goto loop;
case keyword_next_goto_r0:
case symbol_next_goto_r0:
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
state = r0;
goto loop;
case keyword_next_goto_c6a:
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
state = c6a;
goto loop;
case next_return:
// eval current command
assert(! parser_stack_is_empty(stack));
tstack_eval(tstack);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case push_r0_goto_a0:
parser_push_state(stack, r0);
state = a0;
goto skip_token;
case symbol_next_goto_c3:
// in (declare-sort <symbol> ..)
// tstack_push_free_type_or_macro_name(tstack, tkval(lex), tklen(lex), &loc);
tstack_push_free_sort_name(tstack, tkval(lex), tklen(lex), &loc);
state = c3;
goto loop;
case symbol_next_goto_c9a:
// in (define-sort <symbol> ...)
// tstack_push_free_type_or_macro_name(tstack, tkval(lex), tklen(lex), &loc);
tstack_push_free_sort_name(tstack, tkval(lex), tklen(lex), &loc);
state = c9a;
goto loop;
case next_goto_c9b:
state = c9b;
goto loop;
case next_push_r0_goto_s0:
parser_push_state(stack, r0);
state = s0;
goto loop;
case symbol_next_goto_c9b:
// in (define-sort .. (... <symbol> ...) ...)
// type variable
tstack_push_op(tstack, DECLARE_TYPE_VAR, &loc);
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
tstack_eval(tstack); // eval DECLARE_TYPE_VAR
state = c9b;
goto loop;
case symbol_next_goto_c10a:
// in (declare-fun <symbol> ...)
// tstack_push_free_termname(tstack, tkval(lex), tklen(lex), &loc);
tstack_push_free_fun_name(tstack, tkval(lex), tklen(lex), &loc);
state = c10a;
goto loop;
case next_goto_c10b:
state = c10b;
goto loop;
case push_c10b_goto_s0:
parser_push_state(stack, c10b);
state = s0;
goto skip_token;
case symbol_next_goto_c11a:
// in (define-fun <symbol> ...)
// tstack_push_free_termname(tstack, tkval(lex), tklen(lex), &loc);
tstack_push_free_fun_name(tstack, tkval(lex), tklen(lex), &loc);
state = c11a;
goto loop;
case next_goto_c11b:
state = c11b;
goto loop;
case next_push_r0_push_t0_goto_s0:
parser_push_state(stack, r0);
parser_push_state(stack, t0);
state = s0;
goto loop;
case next_goto_c11d:
state = c11d;
goto loop;
case symbol_next_push_c11f_goto_s0:
// in (define-fun ... ( .. (<symbol> <sort> ) ... ) ...)
// variable of the given <sort>
tstack_push_op(tstack, DECLARE_VAR, &loc);
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
parser_push_state(stack, c11f);
state = s0;
goto loop;
case eval_next_goto_c11b:
// evaluate the DECLARE_VAR
tstack_eval(tstack);
state = c11b;
goto loop;
case next_push_c12b_goto_t0:
parser_push_state(stack, c12b);
state = t0;
goto loop;
case string_next_goto_r0:
// string argument to echo
tstack_push_string(tstack, tkval(lex), tklen(lex), &loc);
state = r0;
goto loop;
case next_goto_r0:
state = r0;
goto loop;
case push_c12b_goto_t0:
parser_push_state(stack, c12b);
state = t0;
goto skip_token;
case numeral_next_return:
tstack_push_rational(tstack, tkval(lex), &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case decimal_next_return:
tstack_push_float(tstack, tkval(lex), &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case hexadecimal_next_return:
// skip the prefix '#x'
assert(tklen(lex) > 2);
tstack_push_bvhex(tstack, tkval(lex) + 2, tklen(lex) - 2, &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case binary_next_return:
// skip the prefix '#b'
assert(tklen(lex) > 2);
tstack_push_bvbin(tstack, tkval(lex) + 2, tklen(lex) - 2, &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case string_next_return:
tstack_push_string(tstack, tkval(lex), tklen(lex), &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case symbol_next_return:
// in attribute value
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case next_goto_a1:
// start of s-expression as attribute value
tstack_push_op(tstack, SMT2_MAKE_ATTR_LIST, &loc);
state = a1;
goto loop;
case push_a1_goto_v0:
parser_push_state(stack, a1);
state = v0;
goto skip_token;
case keyword_next_return:
// in attribute value
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case sort_symbol_next_return:
// sort name
tstack_push_sort_name(tstack, tkval(lex), tklen(lex), &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case next_goto_s1:
state = s1;
goto loop;
case next_goto_s2:
state = s2;
goto loop;
case next_goto_s5:
state = s5;
goto loop;
case symbol_next_push_s10_goto_s0:
// sort constructor in ( <symbol> <sort> ... <sort> )
tstack_push_sort_constructor(tstack, tkval(lex), tklen(lex), &loc);
parser_push_state(stack, s10);
state = s0;
goto loop;
case symbol_next_goto_s3:
// indexed sort in (_ <symbol> <idx> .. <idx> )
tstack_push_idx_sort(tstack, tkval(lex), tklen(lex), &loc);
state = s3;
goto loop;
case numeral_next_goto_s4:
// index in indexed sort
tstack_push_rational(tstack, tkval(lex), &loc);
state = s4;
goto loop;
case next_goto_s6:
state = s6;
goto loop;
case symbol_next_goto_s7:
// indexed sort constructor
// in ((_ <symbol> <idx> ... <idx>) <sort> ... <sort>)
tstack_push_idx_sort_constructor(tstack, tkval(lex), tklen(lex), &loc);
state = s7;
goto loop;
case numeral_next_goto_s8:
// <idx> in indexed sort constructor
tstack_push_rational(tstack, tkval(lex), &loc);
state = s8;
goto loop;
case next_push_s10_goto_s0:
parser_push_state(stack, s10);
state = s0;
goto loop;
case push_s10_goto_s0:
parser_push_state(stack, s10);
state = s0;
goto skip_token;
case term_symbol_next_return:
// term name
tstack_push_term_name(tstack, tkval(lex), tklen(lex), &loc);
state = parser_pop_state(stack);
if (state == done) {
goto the_end;
}
goto loop;
case next_goto_t1:
state = t1;
goto loop;
case next_goto_t2:
// (let
tstack_push_op(tstack, LET, &loc);
state = t2;
goto loop;
case forall_next_goto_t3:
// (forall
tstack_push_op(tstack, MK_FORALL, &loc);
state = t3;
goto loop;
case exists_next_goto_t3:
// (exists
tstack_push_op(tstack, MK_EXISTS, &loc);
state = t3;
goto loop;
case next_push_t4a_goto_t0:
// (!
tstack_push_op(tstack, SMT2_ADD_ATTRIBUTES, &loc);
parser_push_state(stack, t4a);
state = t0;
goto loop;
case next_goto_t5:
// (as
saved_loc = loc;
state = t5;
goto loop;
case next_goto_t6:
// ((
state = t6;
goto loop;
case next_goto_t7:
// (_
state = t7;
goto loop;
case symbol_next_push_t8a_goto_t0:
// function name in (<symbol> <term> .... <term>)
tstack_push_smt2_op(tstack, tkval(lex), tklen(lex), &loc);
parser_push_state(stack, t8a);
state = t0;
goto loop;
case next_goto_t2a:
state = t2a;
goto loop;
case next_goto_t2b:
state = t2b;
goto loop;
case symbol_next_push_t2d_goto_t0:
// in (let (.. (<symbol> <term>) ...) ...)
tstack_push_op(tstack, BIND, &loc);
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
parser_push_state(stack, t2d);
state = t0;
goto loop;
case next_goto_t2e:
tstack_eval(tstack); // eval the BIND
state = t2e;
goto loop;
case next_push_r0_goto_t0:
parser_push_state(stack, r0);
state = t0;
goto loop;
case next_goto_t3a:
state = t3a;
goto loop;
case next_goto_t3b:
state = t3b;
goto loop;
case symbol_next_push_t3d_goto_s0:
// in (exists/forall (.. (<symbol <sort>) ...) ...)
tstack_push_op(tstack, DECLARE_VAR, &loc);
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
parser_push_state(stack, t3d);
state = s0;
goto loop;
case next_goto_t3e:
tstack_eval(tstack); // eval DECLARE_VAR
state = t3e;
goto loop;
case check_keyword_then_branch:
// in (! <term> .. <keyword> <attribute-value> ...)
// We push the keyword in all cases as tstack's add_attributes requires a keyword.
// We ignore anything other than :named or :pattern
kw = smt2_string_to_keyword(tkval(lex), tklen(lex));
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
if (kw == SMT2_KW_NAMED) {
state = t4d;
} else if (kw == SMT2_KW_PATTERN) {
state = t4e;
} else {
state = t4b;
}
goto loop;
case push_t4c_goto_a0:
parser_push_state(stack, t4c);
state = a0;
goto skip_token;
case symbol_next_goto_t4c:
// <symbol> as :named attribute
// in (! <term> ... :named <symbol> ...)
tstack_push_symbol(tstack, tkval(lex), tklen(lex), &loc);
state = t4c;
goto loop;
case next_push_t4g_goto_t0:
parser_push_state(stack, t4g);
state = t0;
goto loop;
case next_goto_t4c:
state = t4c;
goto loop;
case push_t4g_goto_t0:
parser_push_state(stack, t4g);
state = t0;
goto skip_token;
case next_goto_t5a:
state = t5a;
goto loop;
case symbol_next_push_r0_goto_s0:
// in (as <symbol> <sort> )
tstack_push_op(tstack, SMT2_SORTED_TERM, &saved_loc);
tstack_push_qual_term_name(tstack, tkval(lex), tklen(lex), &loc);
parser_push_state(stack, r0);
state = s0;
goto loop;
case next_goto_t5b:
state = t5b;
goto loop;
case symbol_next_goto_t5c:
// in (as (_ <symbol> ...) <sort> )
tstack_push_op(tstack, SMT2_SORTED_INDEXED_TERM, &saved_loc);
tstack_push_qual_idx_term_name(tstack, tkval(lex), tklen(lex), &loc);
state = t5c;
goto loop;
case numeral_next_goto_t5d:
// push number
tstack_push_rational(tstack, tkval(lex), &loc);
state = t5d;
goto loop;
case next_goto_t6a:
// ((as
saved_loc = loc;
state = t6a;
goto loop;
case next_goto_t6h:
state = t6h;
goto loop;
case next_goto_t6b:
state = t6b;
goto loop;
case symbol_next_push_t6g_goto_s0:
// in ((as <symbol> <sort>) <arg> ... <arg>)
tstack_push_op(tstack, SMT2_SORTED_APPLY, &saved_loc);
tstack_push_qual_smt2_op(tstack, tkval(lex), tklen(lex), &loc);
parser_push_state(stack, t6g);
state = s0;
goto loop;
case next_goto_t6c:
state = t6c;
goto loop;
case symbol_next_goto_t6d:
// in ((as (_ <symbol> ...) <sort> ) <arg> ... <arg> )
tstack_push_op(tstack, SMT2_SORTED_INDEXED_APPLY, &saved_loc);
tstack_push_qual_smt2_idx_op(tstack, tkval(lex), tklen(lex), &loc);
state = t6d;
goto loop;
case numeral_next_goto_t6e:
tstack_push_rational(tstack, tkval(lex), &loc);
state = t6e;
goto loop;
case next_push_t6g_goto_s0:
parser_push_state(stack, t6g);
state = s0;
goto loop;
case next_push_t8a_goto_t0:
parser_push_state(stack, t8a);
state = t0;
goto loop;
case symbol_next_goto_t6i:
// in ((_ <symbol> ,,, ) <arg> ... <arg> )
tstack_push_smt2_idx_op(tstack, tkval(lex), tklen(lex), &loc);
state = t6i;
goto loop;
case numeral_next_goto_t6j:
tstack_push_rational(tstack, tkval(lex), &loc);
state = t6j;
goto loop;
case symbol_next_goto_t7a:
// in (_ <symbol> <idx> ... <idx> )
tstack_push_idx_term(tstack, tkval(lex), tklen(lex), &loc);
state = t7a;
goto loop;
case numeral_next_goto_t7b:
tstack_push_rational(tstack, tkval(lex), &loc);
state = t7b;
goto loop;
case push_t8a_goto_t0:
parser_push_state(stack, t8a);
state = t0;
goto skip_token;
case error_lp_expected:
smt2_syntax_error(lex, SMT2_TK_LP);
goto cleanup;
case error_string_expected:
smt2_syntax_error(lex, SMT2_TK_STRING);
goto cleanup;
case error_symbol_expected:
smt2_syntax_error(lex, SMT2_TK_SYMBOL);
goto cleanup;
case error_numeral_expected:
smt2_syntax_error(lex, SMT2_TK_NUMERAL);
goto cleanup;
case error_keyword_expected:
smt2_syntax_error(lex, SMT2_TK_KEYWORD);
goto cleanup;
case error_rp_expected:
smt2_syntax_error(lex, SMT2_TK_RP);
goto cleanup;
case error_underscore_expected:
smt2_syntax_error(lex, SMT2_TK_UNDERSCORE);
goto cleanup;
case error_command_expected:
smt2_syntax_error(lex, -2);
goto cleanup;
case error:
smt2_syntax_error(lex, -1);
goto cleanup;
}
} else {
// exception from term_stack
smt2_tstack_error(tstack, exception);
goto cleanup;
}
cleanup:
tstack_reset(tstack);
parser_stack_reset(stack);
if (keep_tokens) {
reset_etk_queue(token_queue);
}
return -1;
the_end:
if (keep_tokens) {
reset_etk_queue(token_queue);
}
return 0;
}
int32_t parse_smt2_command(parser_t *parser) {
return smt2_parse(parser, c0);
}
| /*
* The Yices SMT Solver. Copyright 2014 SRI International.
*
* This program may only be used subject to the noncommercial end user
* license agreement which is downloadable along with this program.
*/ |
welltyped.mli | (** Construction of wellformed contexts and welltyped terms. *)
open Fmlib
open Module_types
(** {1 Basics} *)
(** Type of a wellformed context. *)
type context
(** Type of a wellformed context with a term and its type. *)
type judgement
(** Printing of welltyped items. *)
module Print (PP: Pretty_printer.SIG):
sig
val judgement: judgement -> PP.t
end
val empty: context (** An empty context. *)
(** Extract the context from the encapsulation. *)
val extract_context: context -> Context.t
(** Extract the judgement from the encapsulation. *)
val extract_judgement: judgement -> Context.t * Term.t * Term.typ
(** {1 Term building} *)
(** A builder for welltyped terms in wellformed contexts. *)
module Builder (Info: ANY):
sig
type problem = Info.t * Type_error.t
(** ['a res] The result of a building process. *)
type 'a res = ('a, problem) result
type t
type name = Info.t * string
type formal_argument = name * t
type signature = formal_argument list * t
(** Combinators: Primitive and compound combinators to build terms or build
and add globals to the context. *)
val sort:
Info.t -> Sort.t -> t
val variable:
Info.t -> int -> t
val identifier: Info.t -> string -> t
(** [identifier info name] Build the term represented by [name]. *)
val unknown: Info.t -> t
(** Unknown term. The compiler is asked to derive. *)
val application:
Info.t -> t -> t -> t
(** [lambda name typ exp] Build the lambda term [\ (name: typ) := exp].
*)
val lambda:
Info.t -> name -> t -> t -> t
(** [pi name typ res] Build the product [all (name: typ): res].
*)
val pi:
Info.t -> name -> t -> t -> t
(** [make_term c term] Build the term [term] in context [c]. *)
val make_term: context -> t -> judgement res
val make_builtin:
context -> name -> signature -> context res
val make_definition:
Info.t -> name -> signature -> t -> context -> context res
end
(** {1 Type checking} *)
(** A type checker. *)
module Check:
sig
type 'a res = ('a, Type_error.t) result
(** [check term context] Check [term] in the wellformed [context] and return
a judgement, i.e. a welltyped term with its type in the same context or
return a type error. *)
val check_term: Term.t -> context -> judgement res
end
| (** Construction of wellformed contexts and welltyped terms. *)
|
fit_length.c |
#include <stdlib.h>
#include "nmod_poly.h"
void nmod_poly_factor_fit_length(nmod_poly_factor_t fac, slong len)
{
if (len > fac->alloc)
{
/* At least double number of allocated coeffs */
if (len < 2 * fac->alloc)
len = 2 * fac->alloc;
nmod_poly_factor_realloc(fac, len);
}
}
| /*
Copyright (C) 2011 Sebastian Pancratz
Copyright (C) 2008, 2009 William Hart
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
vmod.ml |
let run () = print_endline "Impl's Vmd.run ()"
| |
BlockRule.ml |
type 'state res =
| Go of { state : 'state; handle_line : [ `Keep | `Discard ] }
| Stop of { state : 'state; handle_line : [ `Keep | `Discard | `Left ] }
| Die
type trans_f = {
trans_spans : string -> Typ.span list;
trans_spans_from_lines : string list -> Typ.span list;
trans_blocks : string list -> Typ.block list;
}
module type S = sig
val is_safe : bool
val first_char : FirstChar.t
type state
val start : string -> state res
val continue : state -> string -> state res
val construct : trans_f -> state -> string list -> Typ.block
val force_construct : bool
end
| |
client_proto_utils.mli | val sign_message :
#Protocol_client_context.full ->
src_sk:Client_keys_v0.sk_uri ->
block:Block_hash.t ->
message:string ->
Tezos_crypto.Signature.V0.t tzresult Lwt.t
val check_message :
#Protocol_client_context.full ->
block:Block_hash.t ->
key_locator:Client_keys_v0.pk_uri ->
quiet:bool ->
message:string ->
signature:Tezos_crypto.Signature.V0.t ->
bool tzresult Lwt.t
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2020 Metastate AG <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
lTerm_term_stubs.c |
#include <lwt_unix.h>
#include <caml/alloc.h>
#include <caml/fail.h>
#include <caml/memory.h>
#if defined(_WIN32) || defined(_WIN64)
/* +-----------------------------------------------------------------+
| Terminal sizes on Windows |
+-----------------------------------------------------------------+ */
#include <windows.h>
#include <wincon.h>
CAMLprim value lt_term_get_size_from_fd(value fd)
{
CONSOLE_SCREEN_BUFFER_INFO info;
value result;
if (!GetConsoleScreenBufferInfo(Handle_val(fd), &info)) {
win32_maperr(GetLastError());
uerror("GetConsoleScreenBufferInfo", Nothing);
}
result = caml_alloc_tuple(2);
Field(result, 0) = Val_int(info.srWindow.Bottom - info.srWindow.Top + 1);
Field(result, 1) = Val_int(info.srWindow.Right - info.srWindow.Left + 1);
return result;
}
CAMLprim value lt_term_set_size_from_fd(value fd, value val_size)
{
CONSOLE_SCREEN_BUFFER_INFO info;
SMALL_RECT rect;
if (!GetConsoleScreenBufferInfo(Handle_val(fd), &info)) {
win32_maperr(GetLastError());
uerror("GetConsoleScreenBufferInfo", Nothing);
}
rect;
rect.Top = info.srWindow.Top;
rect.Left = info.srWindow.Left;
rect.Bottom = rect.Top + Int_val(Field(val_size, 0)) - 1;
rect.Right = rect.Left + Int_val(Field(val_size, 1)) - 1;
if (!SetConsoleWindowInfo(Handle_val(fd), TRUE, &rect)) {
win32_maperr(GetLastError());
uerror("SetConsoleWindowInfo", Nothing);
}
return Val_unit;
}
#else
/* +-----------------------------------------------------------------+
| Terminal sizes on Unix |
+-----------------------------------------------------------------+ */
#include <unistd.h>
#include <sys/ioctl.h>
#include <termios.h>
#include <errno.h>
CAMLprim value lt_term_get_size_from_fd(value fd)
{
struct winsize size;
if (ioctl(Int_val(fd), TIOCGWINSZ, &size) < 0)
uerror("ioctl", Nothing);
value result = caml_alloc_tuple(2);
Field(result, 0) = Val_int(size.ws_row);
Field(result, 1) = Val_int(size.ws_col);
return result;
}
CAMLprim value lt_term_set_size_from_fd(value fd, value val_size)
{
struct winsize size;
if (ioctl(Int_val(fd), TIOCGWINSZ, &size) < 0)
uerror("ioctl", Nothing);
int row = Int_val(Field(val_size, 0));
int col = Int_val(Field(val_size, 1));
size.ws_xpixel = size.ws_xpixel * col / size.ws_col;
size.ws_ypixel = size.ws_ypixel * row / size.ws_row;
size.ws_row = row;
size.ws_col = col;
if (ioctl(Int_val(fd), TIOCSWINSZ, &size) < 0)
uerror("ioctl", Nothing);
return Val_unit;
}
#endif
| /*
* lTerm_term_stubs.c
* ------------------
* Copyright : (c) 2011, Jeremie Dimino <[email protected]>
* Licence : BSD3
*
* This file is a part of Lambda-Term.
*/ |
sdlrender_stub.c | #define CAML_NAME_SPACE
#include <caml/mlvalues.h>
#include <caml/memory.h>
#include <caml/alloc.h>
#include <caml/fail.h>
#include <SDL_render.h>
#include "sdlrender_stub.h"
#include "sdlwindow_stub.h"
#include "sdltexture_stub.h"
#include "sdlsurface_stub.h"
#include "sdlrect_stub.h"
#include "sdlpoint_stub.h"
#include "sdlblendMode_stub.h"
#define Val_none Val_int(0)
#define Some_val(v) Field(v,0)
CAMLprim value
caml_SDL_CreateWindowAndRenderer(
value width, value height, value _window_flags)
{
CAMLparam2(width, height);
CAMLlocal1(ret);
SDL_Window *window;
SDL_Renderer *renderer;
Uint32 window_flags =
Val_SDL_WindowFlags(_window_flags);
int r = SDL_CreateWindowAndRenderer(
Int_val(width), Int_val(height), window_flags, &window, &renderer);
if (r)
caml_failwith("Sdlrender.create_window_and_renderer");
ret = caml_alloc(2, 0);
Store_field(ret, 0, Val_SDL_Window(window));
Store_field(ret, 1, Val_SDL_Renderer(renderer));
CAMLreturn(ret);
}
static const SDL_RendererFlags SDL_RendererFlags_table[] = {
SDL_RENDERER_SOFTWARE,
SDL_RENDERER_ACCELERATED,
SDL_RENDERER_PRESENTVSYNC,
SDL_RENDERER_TARGETTEXTURE,
};
static inline Uint32
SDL_RendererFlags_val(value flag_list)
{
int c_mask = 0;
while (flag_list != Val_emptylist)
{
value head = Field(flag_list, 0);
c_mask |= SDL_RendererFlags_table[Long_val(head)];
flag_list = Field(flag_list, 1);
}
return c_mask;
}
CAMLprim value
caml_SDL_CreateRenderer(value window, value index, value _flags)
{
Uint32 flags = SDL_RendererFlags_val(_flags);
SDL_Renderer * rend =
SDL_CreateRenderer(
SDL_Window_val(window),
Int_val(index),
flags);
if (rend == NULL)
caml_failwith("Sdlrender.create_renderer");
return Val_SDL_Renderer(rend);
}
CAMLprim value
caml_SDL_RenderSetLogicalSize(value renderer, value dims)
{
value w = Field(dims,0);
value h = Field(dims,1);
int r = SDL_RenderSetLogicalSize(
SDL_Renderer_val(renderer),
Int_val(w), Int_val(h));
if (r) caml_failwith("Sdlrender.set_logical_size");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderSetLogicalSize2(value renderer, value w, value h)
{
int r = SDL_RenderSetLogicalSize(
SDL_Renderer_val(renderer),
Int_val(w), Int_val(h));
if (r) caml_failwith("Sdlrender.set_logical_size2");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderSetViewport(value renderer, value _rect)
{
SDL_Rect rect;
SDL_Rect_val(&rect, _rect);
int r = SDL_RenderSetViewport(
SDL_Renderer_val(renderer),
&rect);
if (r) caml_failwith("Sdlrender.set_viewport");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderSetClipRect(value renderer, value _rect)
{
SDL_Rect rect;
SDL_Rect_val(&rect, _rect);
int r = SDL_RenderSetClipRect(
SDL_Renderer_val(renderer),
&rect);
if (r) caml_failwith("Sdlrender.set_clip_rect");
return Val_unit;
}
#define Uint8_val Int_val
CAMLprim value
caml_SDL_SetRenderDrawColor(
value renderer, value rgb, value a)
{
int s = SDL_SetRenderDrawColor(
SDL_Renderer_val(renderer),
Uint8_val(Field(rgb, 0)),
Uint8_val(Field(rgb, 1)),
Uint8_val(Field(rgb, 2)),
Uint8_val(a));
if (s) caml_failwith("Sdlrender.draw_color");
return Val_unit;
}
CAMLprim value
caml_SDL_SetRenderDrawColor3(
value renderer, value r, value g, value b, value a)
{
int s = SDL_SetRenderDrawColor(
SDL_Renderer_val(renderer),
Uint8_val(r), Uint8_val(g), Uint8_val(b), Uint8_val(a));
if (s) caml_failwith("Sdlrender.draw_color3");
return Val_unit;
}
#undef Uint8_val
CAMLprim value
caml_SDL_SetRenderDrawBlendMode(value renderer, value blendMode)
{
int r = SDL_SetRenderDrawBlendMode(
SDL_Renderer_val(renderer),
SDL_BlendMode_val(blendMode));
if (r)
caml_failwith("Sdlrender.set_draw_blend_mode");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawPoint(value renderer, value p)
{
int r = SDL_RenderDrawPoint(
SDL_Renderer_val(renderer),
Int_val(Field(p, 0)),
Int_val(Field(p, 1)));
if (r) caml_failwith("Sdlrender.draw_point");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawPoint2(value renderer, value x, value y)
{
int r = SDL_RenderDrawPoint(
SDL_Renderer_val(renderer),
Int_val(x), Int_val(y));
if (r) caml_failwith("Sdlrender.draw_point2");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawPoints(value renderer, value ml_points)
{
unsigned int i;
unsigned int count = Wosize_val(ml_points);
SDL_Point * points = malloc(count * sizeof(SDL_Point));
for (i = 0; i < count; i++) {
value p = Field(ml_points, i);
points[i].x = Int_val(Field(p, 0));
points[i].y = Int_val(Field(p, 1));
}
int r = SDL_RenderDrawPoints(
SDL_Renderer_val(renderer),
points, count);
free(points);
if (r) caml_failwith("Sdlrender.draw_points");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawLine(value renderer, value line)
{
value p1 = Field(line, 0);
value p2 = Field(line, 1);
int r = SDL_RenderDrawLine(
SDL_Renderer_val(renderer),
Int_val(Field(p1, 0)),
Int_val(Field(p1, 1)),
Int_val(Field(p2, 0)),
Int_val(Field(p2, 1)));
if (r) caml_failwith("Sdlrender.draw_line");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawLine2(value renderer, value p1, value p2)
{
int r = SDL_RenderDrawLine(
SDL_Renderer_val(renderer),
Int_val(Field(p1, 0)),
Int_val(Field(p1, 1)),
Int_val(Field(p2, 0)),
Int_val(Field(p2, 1)));
if (r) caml_failwith("Sdlrender.draw_line2");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawLines(value renderer, value ml_points)
{
unsigned int i;
unsigned int count = Wosize_val(ml_points);
SDL_Point * points = malloc(count * sizeof(SDL_Point));
for (i = 0; i < count; i++) {
value p = Field(ml_points, i);
points[i].x = Int_val(Field(p, 0));
points[i].y = Int_val(Field(p, 1));
}
int r = SDL_RenderDrawLines(
SDL_Renderer_val(renderer),
points, count);
free(points);
if (r) caml_failwith("Sdlrender.draw_lines");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawRect(value renderer, value _rect)
{
SDL_Rect rect;
SDL_Rect_val(&rect, _rect);
int r = SDL_RenderDrawRect(
SDL_Renderer_val(renderer),
&rect);
if (r) caml_failwith("Sdlrender.draw_rect");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderDrawRects(value renderer, value ml_rects)
{
unsigned int i;
unsigned int count = Wosize_val(ml_rects);
SDL_Rect * rects = malloc(count * sizeof(SDL_Rect));
for (i = 0; i < count; i++) {
value _rect = Field(ml_rects, i);
SDL_Rect_val(&(rects[i]), _rect);
}
int r = SDL_RenderDrawRects(
SDL_Renderer_val(renderer),
rects, count);
free(rects);
if (r) caml_failwith("Sdlrender.draw_rects");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderFillRect(value renderer, value _rect)
{
SDL_Rect rect;
SDL_Rect_val(&rect, _rect);
int r = SDL_RenderFillRect(
SDL_Renderer_val(renderer),
&rect);
if (r) caml_failwith("Sdlrender.fill_rect");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderFillRects(value renderer, value ml_rects)
{
unsigned int i;
unsigned int count = Wosize_val(ml_rects);
SDL_Rect * rects = malloc(count * sizeof(SDL_Rect));
for (i = 0; i < count; i++) {
value _rect = Field(ml_rects, i);
SDL_Rect_val(&(rects[i]), _rect);
}
int r = SDL_RenderFillRects(
SDL_Renderer_val(renderer),
rects, count);
free(rects);
if (r) caml_failwith("Sdlrender.fill_rects");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderCopy(
value renderer,
value texture,
value _srcrect,
value _dstrect,
value unit)
{
SDL_Rect srcrect;
SDL_Rect dstrect;
SDL_Rect *srcrect_;
SDL_Rect *dstrect_;
if (_srcrect == Val_none) {
srcrect_ = NULL;
} else {
SDL_Rect_val(&srcrect, Some_val(_srcrect));
srcrect_ = &srcrect;
}
if (_dstrect == Val_none) {
dstrect_ = NULL;
} else {
SDL_Rect_val(&dstrect, Some_val(_dstrect));
dstrect_ = &dstrect;
}
int r = SDL_RenderCopy(
SDL_Renderer_val(renderer),
SDL_Texture_val(texture),
srcrect_,
dstrect_);
if (r)
caml_failwith("Sdlrender.copy");
return Val_unit;
}
static const SDL_RendererFlip sdl_rendererflip_table[] = {
SDL_FLIP_NONE,
SDL_FLIP_HORIZONTAL,
SDL_FLIP_VERTICAL,
};
#define SDL_RendererFlip_val(v) \
sdl_rendererflip_table[Long_val(v)]
CAMLprim value
caml_SDL_RenderCopyEx(
value renderer,
value texture,
value _srcrect,
value _dstrect,
value angle,
value _center,
value flip,
value unit)
{
SDL_Rect srcrect;
SDL_Rect *srcrect_;
SDL_Rect dstrect;
SDL_Rect *dstrect_;
SDL_Point center;
SDL_Point *center_;
double angle_;
SDL_RendererFlip flip_;
if (_srcrect == Val_none) {
srcrect_ = NULL;
} else {
SDL_Rect_val(&srcrect, Some_val(_srcrect));
srcrect_ = &srcrect;
}
if (_dstrect == Val_none) {
dstrect_ = NULL;
} else {
SDL_Rect_val(&dstrect, Some_val(_dstrect));
dstrect_ = &dstrect;
}
if (_center == Val_none) {
center_ = NULL;
} else {
SDL_Point_val(¢er, Some_val(_center));
center_ = ¢er;
}
angle_ =
(angle == Val_none
? 0.0
: Double_val(Some_val(angle))
);
flip_ =
(flip == Val_none
? SDL_FLIP_NONE
: SDL_RendererFlip_val(Some_val(flip))
);
int r =
SDL_RenderCopyEx(
SDL_Renderer_val(renderer),
SDL_Texture_val(texture),
srcrect_,
dstrect_,
angle_,
center_,
flip_);
if (r)
caml_failwith("Sdlrender.copyEx");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderCopyEx_bc(value * argv, int argn)
{
return caml_SDL_RenderCopyEx(
argv[0], argv[1], argv[2], argv[3],
argv[4], argv[5], argv[6], argv[7]);
}
CAMLprim value
caml_SDL_RenderSetScale(value renderer, value scale)
{
int r = SDL_RenderSetScale(
SDL_Renderer_val(renderer),
Double_val(Field(scale,0)),
Double_val(Field(scale,1)));
if (r) caml_failwith("Sdlrender.set_scale");
return Val_unit;
}
CAMLprim value
caml_SDL_RenderPresent(value renderer)
{
SDL_RenderPresent(SDL_Renderer_val(renderer));
return Val_unit;
}
CAMLprim value
caml_SDL_RenderClear(value renderer)
{
int r = SDL_RenderClear(SDL_Renderer_val(renderer));
if (r) caml_failwith("Sdlrender.clear");
return Val_unit;
}
static value
Val_SDL_RendererInfo(SDL_RendererInfo * info)
{
#if 0
Uint32 flags; /**< Supported ::SDL_RendererFlags */
Uint32 num_texture_formats; /**< The number of available texture formats */
Uint32 texture_formats[16]; /**< The available texture formats */
#endif
CAMLparam0();
CAMLlocal1(ret);
ret = caml_alloc(3, 0);
Store_field(ret, 0, caml_copy_string(info->name));
Store_field(ret, 1, Val_int(info->max_texture_width));
Store_field(ret, 2, Val_int(info->max_texture_height));
CAMLreturn(ret);
}
CAMLprim value
caml_SDL_GetRenderDrivers(value unit)
{
CAMLparam0();
CAMLlocal2(ret, dinf);
unsigned int i, n;
SDL_RendererInfo info;
n = SDL_GetNumRenderDrivers();
ret = caml_alloc(n, 0);
for (i = 0; i < n; i++) {
int r = SDL_GetRenderDriverInfo(i, &info);
if (r) caml_failwith("Sdlrender.get_render_drivers");
Store_field(ret, i, Val_SDL_RendererInfo(&info));
}
CAMLreturn(ret);
}
CAMLprim value
caml_SDL_RenderReadPixels(value renderer, value _rect, value surf)
{
SDL_Rect rect;
SDL_Rect *rect_;
SDL_Surface *surface = SDL_Surface_val(surf);
if (_rect == Val_none) {
rect_ = NULL;
} else {
SDL_Rect_val(&rect, Some_val(_rect));
rect_ = ▭
}
int r = SDL_RenderReadPixels(
SDL_Renderer_val(renderer),
rect_,
surface->format->format,
surface->pixels,
surface->pitch);
if (r != 0) caml_failwith("Sdlrender.read_pixels");
return Val_unit;
}
/* vim: set ts=4 sw=4 et: */
| /* OCamlSDL2 - An OCaml interface to the SDL2 library
Copyright (C) 2013 Florent Monnier
This software is provided "AS-IS", without any express or implied warranty.
In no event will the authors be held liable for any damages arising from
the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it freely.
*/ |
compile.ml | (** High-level compilation functions. *)
open Lplib
open Timed
open Common open Error open Library
open Parsing
open Core open Sign
(** [gen_obj] indicates whether we should generate object files when compiling
source files. The default behaviour is not te generate them. *)
let gen_obj = Stdlib.ref false
(** [compile_with ~handle ~force mp] compiles the file corresponding to module
path [mp] using function [~handle] to process commands. Module [mp] is
processed when it is necessary, i.e. the corresponding object file does not
exist, or it must be updated, or [~force] is [true]. In that case, the
produced signature is stored in the corresponding object file if the option
[--gen_obj] or [-c] is set. *)
let rec compile_with :
handle:(Command.compiler -> Sig_state.t -> Syntax.p_command -> Sig_state.t)
-> force:bool -> Command.compiler =
fun ~handle ~force mp ->
if mp = Ghost.path then Ghost.sign else
let base = file_of_path mp in
let src =
let lp_src = base ^ lp_src_extension in
let dk_src = base ^ dk_src_extension in
match (Sys.file_exists lp_src, Sys.file_exists dk_src) with
| (false, false) ->
fatal_no_pos "File \"%s.lp\" (or .dk) not found." base
| (true , true ) ->
wrn None "Both \"%s\" and \"%s\" exist. We take \"%s\"."
lp_src dk_src lp_src; lp_src
| (true , false) -> lp_src
| (false, true ) -> dk_src
in
let obj = base ^ obj_extension in
if List.mem mp !loading then
begin
fatal_msg "Circular dependencies detected in \"%s\".@." src;
fatal_msg "Dependency stack for module %a:@." Path.pp mp;
List.iter (fatal_msg "- %a@." Path.pp) !loading;
fatal_no_pos "Build aborted."
end;
match Path.Map.find_opt mp !loaded with
| Some sign -> sign
| None ->
if force || Extra.more_recent src obj then
begin
let forced = if force then " (forced)" else "" in
Console.out 1 "Checking \"%s\"%s ..." src forced;
loading := mp :: !loading;
let sign = Sig_state.create_sign mp in
let sig_st = Stdlib.ref (Sig_state.of_sign sign) in
(* [sign] is added to [loaded] before processing the commands so that it
is possible to qualify the symbols of the current modules. *)
loaded := Path.Map.add mp sign !loaded;
Tactic.reset_admitted();
let compile = compile_with ~handle ~force in
let consume cmd = Stdlib.(sig_st := handle compile !sig_st cmd) in
Debug.stream_iter consume (Parser.parse_file src);
Sign.strip_private sign;
if Stdlib.(!gen_obj) then begin
Console.out 1 "Writing \"%s\" ..." obj; Sign.write sign obj
end;
loading := List.tl !loading;
sign
end
else
begin
Console.out 1 "Loading \"%s\" ..." obj;
let sign = Sign.read obj in
(* We recursively load every module [mp'] on which [mp] depends. *)
let compile mp' _ = ignore (compile_with ~handle ~force:false mp') in
Path.Map.iter compile !(sign.sign_deps);
loaded := Path.Map.add mp sign !loaded;
Sign.link sign;
(* Since ghost signatures are always assumed to be already loaded,
we need to explicitly update the decision tree of their symbols
because it is not done in linking which normally follows loading. *)
Ghost.iter (fun s -> Tree.update_dtree s []);
sign
end
(** [compile force mp] compiles module path [mp], forcing
compilation of up-to-date files if [force] is true. *)
let compile : ?force:bool -> Path.t -> Sign.t = fun ?(force=false) ->
compile_with ~handle:Command.handle ~force
(** [compile_file fname] looks for a package configuration file for
[fname] and compiles [fname]. It is the main compiling function. It
is called from the main program exclusively. *)
let compile_file : ?force:bool -> string -> Sign.t =
fun ?(force=false) fname ->
Package.apply_config fname;
compile ~force (path_of_file LpLexer.escape fname)
(** The functions provided in this module perform the same computations as the
ones defined earlier, but restore the console state and the library
mappings when they have finished. An optional library mapping or console
state can be passed as argument to change the settings. *)
module PureUpToSign = struct
(** [apply_cfg ?lm ?st f x] is the same as [f x] except that the console
state and {!val:Library.lib_mappings} are restored after the evaluation
of [f x]. [?lm] allows to set the library mappings and [?st] to set the
console state. *)
let apply_cfg :
?lm:Path.t*string -> ?st:Console.State.t -> ('a -> 'b) -> 'a -> 'b =
fun ?lm ?st f x ->
let lib_mappings = !Library.lib_mappings in
Console.State.push ();
Option.iter Library.add_mapping lm;
Option.iter Console.State.apply st;
let restore () =
Library.lib_mappings := lib_mappings;
Console.State.pop ()
in
try let res = f x in restore (); res
with e -> restore (); raise e
let compile :
?lm:Path.t*string -> ?st:Console.State.t -> ?force:bool -> Path.t -> Sign.t
= fun ?lm ?st ?(force=false) -> apply_cfg ?lm ?st (compile ~force)
let compile_file :
?lm:Path.t*string -> ?st:Console.State.t -> ?force:bool -> string -> Sign.t
= fun ?lm ?st ?(force=false) -> apply_cfg ?lm ?st (compile_file ~force)
end
| (** High-level compilation functions. *)
|
inlines.c |
#define UTILS_FLINT_INLINES_C
#include "utils_flint.h"
| /*
Copyright (C) 2020 Fredrik Johansson
This file is part of Calcium.
Calcium is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <http://www.gnu.org/licenses/>.
*/ |
dune |
(library
(name rdf_lwt)
(public_name rdf_lwt)
(wrapped false)
(flags -g :standard -bin-annot -w -6-7-9-10-27-32-33-34-35-36-50-52 -no-strict-sequence)
(libraries rdf lwt cohttp-lwt-unix)
(preprocess (pps lwt_ppx))
)
| |
ppx_nop_from_scratch.ml |
open Ppxlib
let expand ~ctxt payload =
ignore ctxt; payload
let () =
let rule =
Extension.V3.declare "nop"
Extension.Context.expression
Ast_pattern.(single_expr_payload __)
expand
|> Ppxlib.Context_free.Rule.extension
in
Driver.register_transformation ~rules:[rule] "ppx_nop_from_scratch"
| |
uint8.ml |
open Stdint
type uint8 = Uint8.t
type t = uint8
let add = Uint8.add
let sub = Uint8.sub
let mul = Uint8.mul
let div = Uint8.div
let rem = Uint8.rem
let logand = Uint8.logand
let logor = Uint8.logor
let logxor = Uint8.logxor
let lognot = Uint8.lognot
let shift_left = Uint8.shift_left
let shift_right = Uint8.shift_right
let of_int = Uint8.of_int
let to_int = Uint8.to_int
let of_float = Uint8.of_float
let to_float = Uint8.to_float
let of_int32 = Uint8.of_int32
let to_int32 = Uint8.to_int32
let bits_of_float = Uint8.of_float (* This may cause issues *)
let float_of_bits = Uint8.to_float (* This may cause issues *)
let zero = of_int 0
let one = of_int 1
let succ = Uint8.succ
let pred = Uint8.pred
let max_int = Uint8.max_int
let min_int = Uint8.min_int
module Conv = Uint.Str_conv.Make(struct
type t = uint8
let fmt = "Ul"
let name = "Uint8"
let zero = zero
let max_int = max_int
let bits = 8
let of_int = of_int
let to_int = to_int
let add = add
let mul = mul
let divmod = (fun x y -> div x y, rem x y)
end)
let of_string = Conv.of_string
let to_string = Conv.to_string
let to_string_bin = Conv.to_string_bin
let to_string_oct = Conv.to_string_oct
let to_string_hex = Conv.to_string_hex
let printer = Conv.printer
let printer_bin = Conv.printer_bin
let printer_oct = Conv.printer_oct
let printer_hex = Conv.printer_hex
let compare = Stdlib.compare
| |
ca_mat.h |
#ifndef CA_MAT_H
#define CA_MAT_H
#ifdef CA_MAT_INLINES_C
#define CA_MAT_INLINE
#else
#define CA_MAT_INLINE static __inline__
#endif
#include <stdio.h>
#include "flint/flint.h"
#include "flint/fmpz_mat.h"
#include "flint/fmpq_mat.h"
#include "flint/perm.h"
#include "arb_mat.h"
#include "acb_mat.h"
#include "antic/nf.h"
#include "antic/nf_elem.h"
#include "ca.h"
#include "ca_vec.h"
#include "ca_poly.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Matrix object */
typedef struct
{
ca_ptr entries;
slong r;
slong c;
ca_ptr * rows;
}
ca_mat_struct;
typedef ca_mat_struct ca_mat_t[1];
#define ca_mat_entry(mat,i,j) ((mat)->rows[i] + (j))
#define ca_mat_nrows(mat) ((mat)->r)
#define ca_mat_ncols(mat) ((mat)->c)
CA_MAT_INLINE ca_ptr
ca_mat_entry_ptr(ca_mat_t mat, slong i, slong j)
{
return ca_mat_entry(mat, i, j);
}
/* Memory management */
void ca_mat_init(ca_mat_t mat, slong r, slong c, ca_ctx_t ctx);
void ca_mat_clear(ca_mat_t mat, ca_ctx_t ctx);
CA_MAT_INLINE void
ca_mat_swap(ca_mat_t mat1, ca_mat_t mat2, ca_ctx_t ctx)
{
ca_mat_struct t = *mat1;
*mat1 = *mat2;
*mat2 = t;
}
/* Window matrices */
void ca_mat_window_init(ca_mat_t window, const ca_mat_t mat, slong r1, slong c1, slong r2, slong c2, ca_ctx_t ctx);
CA_MAT_INLINE void
ca_mat_window_clear(ca_mat_t window, ca_ctx_t ctx)
{
flint_free(window->rows);
}
/* Shape */
CA_MAT_INLINE int
ca_mat_is_empty(const ca_mat_t mat)
{
return (mat->r == 0) || (mat->c == 0);
}
CA_MAT_INLINE int
ca_mat_is_square(const ca_mat_t mat)
{
return (mat->r == mat->c);
}
/* Conversions */
void ca_mat_set(ca_mat_t dest, const ca_mat_t src, ca_ctx_t ctx);
void ca_mat_set_fmpz_mat(ca_mat_t dest, const fmpz_mat_t src, ca_ctx_t ctx);
void ca_mat_set_fmpq_mat(ca_mat_t dest, const fmpq_mat_t src, ca_ctx_t ctx);
void ca_mat_set_ca(ca_mat_t y, const ca_t x, ca_ctx_t ctx);
void ca_mat_transfer(ca_mat_t res, ca_ctx_t res_ctx, const ca_mat_t src, ca_ctx_t src_ctx);
/* Random generation */
void ca_mat_randtest(ca_mat_t mat, flint_rand_t state, slong len, slong bits, ca_ctx_t ctx);
void ca_mat_randtest_rational(ca_mat_t mat, flint_rand_t state, slong bits, ca_ctx_t ctx);
void ca_mat_randops(ca_mat_t mat, flint_rand_t state, slong count, ca_ctx_t ctx);
/* I/O */
void ca_mat_print(const ca_mat_t mat, ca_ctx_t ctx);
void ca_mat_printn(const ca_mat_t mat, slong digits, ca_ctx_t ctx);
/* Special matrices */
void ca_mat_zero(ca_mat_t mat, ca_ctx_t ctx);
void ca_mat_one(ca_mat_t mat, ca_ctx_t ctx);
void ca_mat_ones(ca_mat_t mat, ca_ctx_t ctx);
void ca_mat_pascal(ca_mat_t mat, int triangular, ca_ctx_t ctx);
void ca_mat_stirling(ca_mat_t mat, int kind, ca_ctx_t ctx);
void ca_mat_hilbert(ca_mat_t mat, ca_ctx_t ctx);
void ca_mat_dft(ca_mat_t res, int type, ca_ctx_t ctx);
/* Comparisons and properties */
truth_t ca_mat_check_equal(const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
truth_t ca_mat_check_is_zero(const ca_mat_t A, ca_ctx_t ctx);
truth_t ca_mat_check_is_one(const ca_mat_t A, ca_ctx_t ctx);
/* Conjugate and transpose */
void ca_mat_transpose(ca_mat_t B, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_conj(ca_mat_t B, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_conj_transpose(ca_mat_t mat1, const ca_mat_t mat2, ca_ctx_t ctx);
/* Arithmetic */
void ca_mat_add_ca(ca_mat_t y, const ca_mat_t a, const ca_t x, ca_ctx_t ctx);
void ca_mat_sub_ca(ca_mat_t y, const ca_mat_t a, const ca_t x, ca_ctx_t ctx);
void ca_mat_addmul_ca(ca_mat_t y, const ca_mat_t a, const ca_t x, ca_ctx_t ctx);
void ca_mat_submul_ca(ca_mat_t y, const ca_mat_t a, const ca_t x, ca_ctx_t ctx);
void ca_mat_neg(ca_mat_t dest, const ca_mat_t src, ca_ctx_t ctx);
void ca_mat_add(ca_mat_t res, const ca_mat_t mat1, const ca_mat_t mat2, ca_ctx_t ctx);
void ca_mat_sub(ca_mat_t res, const ca_mat_t mat1, const ca_mat_t mat2, ca_ctx_t ctx);
void ca_mat_mul(ca_mat_t C, const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
void ca_mat_mul_classical(ca_mat_t C, const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
void ca_mat_mul_same_nf(ca_mat_t C, const ca_mat_t A, const ca_mat_t B, ca_field_t K, ca_ctx_t ctx);
CA_MAT_INLINE void
ca_mat_mul_si(ca_mat_t B, const ca_mat_t A, slong c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_mul_si(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_mul_fmpz(ca_mat_t B, const ca_mat_t A, const fmpz_t c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_mul_fmpz(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_mul_fmpq(ca_mat_t B, const ca_mat_t A, const fmpq_t c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_mul_fmpq(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_mul_ca(ca_mat_t B, const ca_mat_t A, const ca_t c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_mul(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_div_si(ca_mat_t B, const ca_mat_t A, slong c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_div_si(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_div_fmpz(ca_mat_t B, const ca_mat_t A, const fmpz_t c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_div_fmpz(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_div_fmpq(ca_mat_t B, const ca_mat_t A, const fmpq_t c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_div_fmpq(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_div_ca(ca_mat_t B, const ca_mat_t A, const ca_t c, ca_ctx_t ctx)
{
slong i, j;
for (i = 0; i < ca_mat_nrows(A); i++)
for (j = 0; j < ca_mat_ncols(A); j++)
ca_div(ca_mat_entry(B, i, j), ca_mat_entry(A, i, j), c, ctx);
}
CA_MAT_INLINE void
ca_mat_sqr(ca_mat_t res, const ca_mat_t A, ca_ctx_t ctx)
{
ca_mat_mul(res, A, A, ctx);
}
void ca_mat_pow_ui_binexp(ca_mat_t B, const ca_mat_t A, ulong exp, ca_ctx_t ctx);
/* Polynomial evaluation */
void _ca_mat_ca_poly_evaluate(ca_mat_t y, ca_srcptr poly, slong len, const ca_mat_t x, ca_ctx_t ctx);
void ca_mat_ca_poly_evaluate(ca_mat_t res, const ca_poly_t f, const ca_mat_t a, ca_ctx_t ctx);
/* Trace */
void ca_mat_trace(ca_t trace, const ca_mat_t mat, ca_ctx_t ctx);
/* Gaussian elimination, solving and inverse */
truth_t ca_mat_find_pivot(slong * pivot_row, ca_mat_t mat, slong start_row, slong end_row, slong column, ca_ctx_t ctx);
CA_MAT_INLINE void
_ca_mat_swap_rows(ca_mat_t mat, slong * perm, slong r, slong s)
{
if (r != s)
{
ca_ptr u;
slong t;
if (perm != NULL)
{
t = perm[s];
perm[s] = perm[r];
perm[r] = t;
}
u = mat->rows[s];
mat->rows[s] = mat->rows[r];
mat->rows[r] = u;
}
}
int ca_mat_lu_classical(slong * rank, slong * P, ca_mat_t LU, const ca_mat_t A, int rank_check, ca_ctx_t ctx);
int ca_mat_lu_recursive(slong * rank, slong * P, ca_mat_t LU, const ca_mat_t A, int rank_check, ca_ctx_t ctx);
int ca_mat_lu(slong * rank, slong * P, ca_mat_t LU, const ca_mat_t A, int rank_check, ca_ctx_t ctx);
int ca_mat_fflu(slong * rank, slong * P, ca_mat_t LU, ca_t den, const ca_mat_t A, int rank_check, ca_ctx_t ctx);
int ca_mat_rref_fflu(slong * rank, ca_mat_t R, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_rref_lu(slong * rank, ca_mat_t R, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_rref(slong * rank, ca_mat_t R, const ca_mat_t A, ca_ctx_t ctx);
truth_t ca_mat_nonsingular_lu(slong * P, ca_mat_t LU, const ca_mat_t A, ca_ctx_t ctx);
truth_t ca_mat_nonsingular_fflu(slong * P, ca_mat_t LU, ca_t den, const ca_mat_t A, ca_ctx_t ctx);
truth_t ca_mat_nonsingular_solve_adjugate(ca_mat_t X, const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
truth_t ca_mat_nonsingular_solve_fflu(ca_mat_t X, const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
truth_t ca_mat_nonsingular_solve_lu(ca_mat_t X, const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
truth_t ca_mat_nonsingular_solve(ca_mat_t X, const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
truth_t ca_mat_inv(ca_mat_t X, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_solve_tril_classical(ca_mat_t X, const ca_mat_t L, const ca_mat_t B, int unit, ca_ctx_t ctx);
void ca_mat_solve_tril_recursive(ca_mat_t X, const ca_mat_t L, const ca_mat_t B, int unit, ca_ctx_t ctx);
void ca_mat_solve_tril(ca_mat_t X, const ca_mat_t L, const ca_mat_t B, int unit, ca_ctx_t ctx);
void ca_mat_solve_triu_classical(ca_mat_t X, const ca_mat_t U, const ca_mat_t B, int unit, ca_ctx_t ctx);
void ca_mat_solve_triu_recursive(ca_mat_t X, const ca_mat_t U, const ca_mat_t B, int unit, ca_ctx_t ctx);
void ca_mat_solve_triu(ca_mat_t X, const ca_mat_t U, const ca_mat_t B, int unit, ca_ctx_t ctx);
void ca_mat_solve_lu_precomp(ca_mat_t X, const slong * perm, const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
void ca_mat_solve_fflu_precomp(ca_mat_t X, const slong * perm, const ca_mat_t A, const ca_t den, const ca_mat_t B, ca_ctx_t ctx);
/* Rank and kernel */
int ca_mat_rank(slong * rank, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_right_kernel(ca_mat_t X, const ca_mat_t A, ca_ctx_t ctx);
/* Determinant */
void ca_mat_det_berkowitz(ca_t det, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_det_lu(ca_t det, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_det_bareiss(ca_t det, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_det_cofactor(ca_t det, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_det(ca_t det, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_adjugate_cofactor(ca_mat_t adj, ca_t det, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_adjugate_charpoly(ca_mat_t adj, ca_t det, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_adjugate(ca_mat_t adj, ca_t det, const ca_mat_t A, ca_ctx_t ctx);
/* Characteristic polynomial */
void _ca_mat_charpoly_berkowitz(ca_ptr cp, const ca_mat_t mat, ca_ctx_t ctx);
void ca_mat_charpoly_berkowitz(ca_poly_t cp, const ca_mat_t mat, ca_ctx_t ctx);
int _ca_mat_charpoly_danilevsky(ca_ptr p, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_charpoly_danilevsky(ca_poly_t cp, const ca_mat_t mat, ca_ctx_t ctx);
void _ca_mat_charpoly(ca_ptr cp, const ca_mat_t mat, ca_ctx_t ctx);
void ca_mat_charpoly(ca_poly_t cp, const ca_mat_t mat, ca_ctx_t ctx);
int ca_mat_companion(ca_mat_t A, const ca_poly_t poly, ca_ctx_t ctx);
/* Eigenvalues and eigenvectors */
int ca_mat_eigenvalues(ca_vec_t lambda, ulong * exp, const ca_mat_t mat, ca_ctx_t ctx);
/* Diagonalization */
truth_t ca_mat_diagonalization(ca_mat_t D, ca_mat_t P, const ca_mat_t A, ca_ctx_t ctx);
void ca_mat_set_jordan_blocks(ca_mat_t mat, const ca_vec_t lambda, slong num_blocks, slong * block_lambda, slong * block_size, ca_ctx_t ctx);
int ca_mat_jordan_blocks(ca_vec_t lambda, slong * num_blocks, slong * block_lambda, slong * block_size, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_jordan_transformation(ca_mat_t mat, const ca_vec_t lambda, slong num_blocks, slong * block_lambda, slong * block_size, const ca_mat_t A, ca_ctx_t ctx);
int ca_mat_jordan_form(ca_mat_t J, ca_mat_t P, const ca_mat_t A, ca_ctx_t ctx);
/* Matrix functions */
int ca_mat_exp(ca_mat_t res, const ca_mat_t A, ca_ctx_t ctx);
truth_t ca_mat_log(ca_mat_t res, const ca_mat_t A, ca_ctx_t ctx);
/* Internal representation */
/* todo: document, make consistent */
ca_field_ptr _ca_mat_same_field(const ca_mat_t A, ca_ctx_t ctx);
ca_field_ptr _ca_mat_same_field2(const ca_mat_t A, const ca_mat_t B, ca_ctx_t ctx);
#ifdef __cplusplus
}
#endif
#endif
| /*
Copyright (C) 2020 Fredrik Johansson
This file is part of Calcium.
Calcium is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <http://www.gnu.org/licenses/>.
*/ |
dune |
(library
(name test_git)
(modules test_git)
(libraries
alcotest
fmt
fpath
irmin
irmin-test
irmin.mem
irmin-git
git
git-unix
lwt
lwt.unix)
(preprocess
(pps ppx_irmin ppx_irmin.internal)))
(library
(name test_git_unix)
(modules test_git_unix)
(libraries test_git irmin.unix irmin-git.unix irmin-watcher)
(preprocess
(pps ppx_irmin ppx_irmin.internal)))
(executable
(name test)
(modules test)
(libraries alcotest irmin irmin-test test_git))
(executable
(name test_unix)
(modules test_unix)
(libraries alcotest irmin irmin-test test_git_unix))
(rule
(alias runtest)
(package irmin-git)
(action
(run %{exe:test.exe} -q --color=always)))
(rule
(alias runtest)
(package irmin-git)
(action
(run %{exe:test_unix.exe} -q --color=always)))
| |
cancelBundleTask.mli |
open Types
type input = CancelBundleTaskRequest.t
type output = CancelBundleTaskResult.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type
error := error | |
gPango.ml | (**************************************************************************)
(* Lablgtk *)
(* *)
(* This program is free software; you can redistribute it *)
(* and/or modify it under the terms of the GNU Library General *)
(* Public License as published by the Free Software Foundation *)
(* version 2, with the exception described in file COPYING which *)
(* comes with the library. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Library General Public License for more details. *)
(* *)
(* You should have received a copy of the GNU Library General *)
(* Public License along with this program; if not, write to the *)
(* Free Software Foundation, Inc., 59 Temple Place, Suite 330, *)
(* Boston, MA 02111-1307 USA *)
(* *)
(* *)
(**************************************************************************)
(* $Id$ *)
open Gaux
open Pango
open Font
let to_pixels (x : units) = (x-1) / scale + 1
let from_pixels x : units = x * scale
class metrics obj = object
method ascent = get_ascent obj
method descent = get_descent obj
method approx_char_width = get_approximate_char_width obj
method approx_digit_width = get_approximate_digit_width obj
end
class font_description fd = object
method fd = fd
method copy = new font_description (copy fd)
method to_string = to_string fd
method family = get_family fd
method style = get_style fd
method variant = get_variant fd
method weight = get_weight fd
method stretch = get_stretch fd
method size = get_size fd
method modify = modify fd
end
let font_description_from_string s = new font_description (from_string s)
open Context
class layout obj = object
val obj = obj
method as_layout = obj
method copy = new layout (Layout.copy obj)
method get_context = new context (Layout.get_context obj)
method get_text = Layout.get_text obj
method set_text s = Layout.set_text obj s
method set_markup s = Layout.set_markup obj s
method set_markup_with_accel s c = Layout.set_markup_with_accel obj s c
method set_font_description (fd : font_description) = Layout.set_font_description obj fd#fd
method get_font_description = new font_description (Layout.get_font_description obj)
method get_width = Layout.get_width obj
method set_width w = Layout.set_width obj w
method get_indent = Layout.get_indent obj
method set_indent i = Layout.set_indent obj i
method get_spacing = Layout.get_spacing obj
method set_spacing s = Layout.set_spacing obj s
method get_wrap = Layout.get_wrap obj
method set_wrap w = Layout.set_wrap obj w
method get_justify = Layout.get_justify obj
method set_justify b = Layout.set_justify obj b
method get_single_paragraph_mode = Layout.get_single_paragraph_mode obj
method set_single_paragraph_mode b = Layout.set_single_paragraph_mode obj b
method context_changed = Layout.context_changed obj
method get_size = Layout.get_size obj
method get_pixel_size = Layout.get_pixel_size obj
method get_extent = Layout.get_extent obj
method get_pixel_extent = Layout.get_pixel_extent obj
method index_to_pos i = Layout.index_to_pos obj i
method xy_to_index ~x ~y = Layout.xy_to_index obj ~x ~y
method get_ellipsize = Layout.get_ellipsize obj
method set_ellipsize m = Layout.set_ellipsize obj m
end
and context obj = object (self)
val obj = obj
method as_context = obj
method font_description = new font_description (get_font_description obj)
method font_name = Font.to_string (get_font_description obj)
method language = Language.to_string (get_language obj)
method load_font desc = load_font obj (Font.from_string desc)
method load_fontset
?(desc = self#font_description) ?(lang = self#language) () =
load_fontset obj (desc#fd) (Language.from_string lang)
method get_metrics
?(desc = self#font_description) ?(lang = self#language) () =
new metrics (get_metrics obj (desc#fd) (Some (Language.from_string lang)))
method create_layout = new layout (Layout.create obj)
method set_font_description (desc : font_description) =
set_font_description obj desc#fd
method set_font_by_name desc =
set_font_description obj (Font.from_string desc)
method set_language lang = set_language obj (Language.from_string lang)
end
| (**************************************************************************)
(* Lablgtk *)
(* *)
(* This program is free software; you can redistribute it *)
(* and/or modify it under the terms of the GNU Library General *)
(* Public License as published by the Free Software Foundation *)
(* version 2, with the exception described in file COPYING which *)
(* comes with the library. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Library General Public License for more details. *)
(* *)
(* You should have received a copy of the GNU Library General *)
(* Public License along with this program; if not, write to the *)
(* Free Software Foundation, Inc., 59 Temple Place, Suite 330, *)
(* Boston, MA 02111-1307 USA *)
(* *)
(* *)
(**************************************************************************)
|
taylor_shift.c |
#include "flint.h"
#include "nmod_poly.h"
void
_nmod_poly_taylor_shift(mp_ptr poly, mp_limb_t c, slong len, nmod_t mod)
{
if (len < 100 || len > mod.n)
_nmod_poly_taylor_shift_horner(poly, c, len, mod);
else if ((c == 1 || c == mod.n - 1) && len < 1000)
_nmod_poly_taylor_shift_horner(poly, c, len, mod);
else
_nmod_poly_taylor_shift_convolution(poly, c, len, mod);
}
void
nmod_poly_taylor_shift(nmod_poly_t g, const nmod_poly_t f, mp_limb_t c)
{
if (f != g)
nmod_poly_set(g, f);
_nmod_poly_taylor_shift(g->coeffs, c, g->length, g->mod);
}
| /*
Copyright (C) 2012 Fredrik Johansson
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
bytes0.ml | (* [Bytes0] defines string functions that are primitives or can be simply
defined in terms of [Caml.Bytes]. [Bytes0] is intended to completely express
the part of [Caml.Bytes] that [Base] uses -- no other file in Base other
than bytes0.ml should use [Caml.Bytes]. [Bytes0] has few dependencies, and
so is available early in Base's build order.
All Base files that need to use strings and come before [Base.Bytes] in
build order should do:
{[
module Bytes = Bytes0
]}
Defining [module Bytes = Bytes0] is also necessary because it prevents
ocamldep from mistakenly causing a file to depend on [Base.Bytes]. *)
open! Import0
module Sys = Sys0
module Primitives = struct
external get : bytes -> int -> char = "%bytes_safe_get"
external length : bytes -> int = "%bytes_length"
external unsafe_get : bytes -> int -> char = "%bytes_unsafe_get"
external set : bytes -> int -> char -> unit = "%bytes_safe_set"
external unsafe_set : bytes -> int -> char -> unit = "%bytes_unsafe_set"
(* [unsafe_blit_string] is not exported in the [stdlib] so we export it here *)
external unsafe_blit_string
: src:string
-> src_pos:int
-> dst:bytes
-> dst_pos:int
-> len:int
-> unit
= "caml_blit_string"
[@@noalloc]
external unsafe_get_int64 : bytes -> int -> int64 = "%caml_bytes_get64u"
external unsafe_set_int64 : bytes -> int -> int64 -> unit = "%caml_bytes_set64u"
end
include Primitives
let max_length = Sys.max_string_length
let blit = Caml.Bytes.blit
let blit_string = Caml.Bytes.blit_string
let compare = Caml.Bytes.compare
let copy = Caml.Bytes.copy
let create = Caml.Bytes.create
let fill = Caml.Bytes.fill
let make = Caml.Bytes.make
let map = Caml.Bytes.map
let mapi = Caml.Bytes.mapi
let sub = Caml.Bytes.sub
let unsafe_blit = Caml.Bytes.unsafe_blit
let to_string = Caml.Bytes.to_string
let of_string = Caml.Bytes.of_string
let unsafe_to_string ~no_mutation_while_string_reachable:s = Caml.Bytes.unsafe_to_string s
let unsafe_of_string_promise_no_mutation = Caml.Bytes.unsafe_of_string
| (* [Bytes0] defines string functions that are primitives or can be simply
defined in terms of [Caml.Bytes]. [Bytes0] is intended to completely express
the part of [Caml.Bytes] that [Base] uses -- no other file in Base other
than bytes0.ml should use [Caml.Bytes]. [Bytes0] has few dependencies, and
so is available early in Base's build order.
All Base files that need to use strings and come before [Base.Bytes] in
build order should do:
{[
module Bytes = Bytes0
]}
Defining [module Bytes = Bytes0] is also necessary because it prevents
ocamldep from mistakenly causing a file to depend on [Base.Bytes]. *) |
dune |
(test
(name test)
(package charrua-server)
(preprocess (pps ppx_cstruct))
(libraries cstruct-unix alcotest charrua charrua-server))
(alias
(name runtest)
(package charrua-server)
(deps
(:< test.exe)
dhcp.pcap
dhcp2.pcap))
| |
m_2.ml |
let () =
M_1.f();
()
let print () = Js.log "done"
| |
mk_simplified_app.h | #pragma once
#include "ast/ast.h"
#include "util/params.h"
#include "ast/rewriter/rewriter_types.h"
class mk_simplified_app {
struct imp;
imp * m_imp;
public:
mk_simplified_app(ast_manager & m, params_ref const & p = params_ref());
~mk_simplified_app();
br_status mk_core(func_decl * decl, unsigned num, expr * const * args, expr_ref & result);
void operator()(func_decl * decl, unsigned num, expr * const * args, expr_ref & result);
};
| /*++
Copyright (c) 2011 Microsoft Corporation
Module Name:
mk_simplified_app.h
Abstract:
Functor for creating new simplified applications
Author:
Leonardo (leonardo) 2011-06-06
Notes:
--*/ |
scanner.c |
#include <tree_sitter/parser.h>
#include <wctype.h>
enum TokenType {
PREPROC_DIRECTIVE_END,
};
void *tree_sitter_c_sharp_external_scanner_create() { return NULL; }
void tree_sitter_c_sharp_external_scanner_destroy(void *p) {}
void tree_sitter_c_sharp_external_scanner_reset(void *p) {}
unsigned tree_sitter_c_sharp_external_scanner_serialize(void *p, char *buffer) { return 0; }
void tree_sitter_c_sharp_external_scanner_deserialize(void *p, const char *b, unsigned n) {}
bool tree_sitter_c_sharp_external_scanner_scan(
void *payload,
TSLexer *lexer,
const bool *valid_symbols
) {
// Detect either a newline or EOF. Currently, external scanners
// are the only way to match against EOF.
if (!valid_symbols[PREPROC_DIRECTIVE_END]) return false;
lexer->result_symbol = PREPROC_DIRECTIVE_END;
for (;;) {
if (lexer->lookahead == 0) return true;
if (lexer->lookahead == '\n') return true;
if (!iswspace(lexer->lookahead)) return false;
lexer->advance(lexer, true);
}
}
| |
issue51.ml |
val run :
unit
-> (unit -> ('a, ([> `Msg of string] as 'b)) result)
-> ('a, 'b) result
| |
interpolate_barycentric.c |
#include "acb_poly.h"
void
_acb_poly_interpolate_barycentric(acb_ptr poly,
acb_srcptr xs, acb_srcptr ys, slong n, slong prec)
{
acb_ptr P, Q, w;
acb_t t;
slong i, j;
if (n == 1)
{
acb_set(poly, ys);
return;
}
P = _acb_vec_init(n + 1);
Q = _acb_vec_init(n);
w = _acb_vec_init(n);
acb_init(t);
_acb_poly_product_roots(P, xs, n, prec);
for (i = 0; i < n; i++)
{
acb_one(w + i);
for (j = 0; j < n; j++)
{
if (i != j)
{
acb_sub(t, xs + i, xs + j, prec);
acb_mul(w + i, w + i, t, prec);
}
}
acb_inv(w + i, w + i, prec);
}
_acb_vec_zero(poly, n);
for (i = 0; i < n; i++)
{
_acb_poly_div_root(Q, t, P, n + 1, xs + i, prec);
acb_mul(t, w + i, ys + i, prec);
_acb_vec_scalar_addmul(poly, Q, n, t, prec);
}
_acb_vec_clear(P, n + 1);
_acb_vec_clear(Q, n);
_acb_vec_clear(w, n);
acb_clear(t);
}
void
acb_poly_interpolate_barycentric(acb_poly_t poly,
acb_srcptr xs, acb_srcptr ys, slong n, slong prec)
{
if (n == 0)
{
acb_poly_zero(poly);
}
else
{
acb_poly_fit_length(poly, n);
_acb_poly_set_length(poly, n);
_acb_poly_interpolate_barycentric(poly->coeffs,
xs, ys, n, prec);
_acb_poly_normalise(poly);
}
}
| /*
Copyright (C) 2012 Fredrik Johansson
This file is part of Arb.
Arb is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <http://www.gnu.org/licenses/>.
*/ |
pickle_printer.h |
#ifndef GHERKIN_PICKLE_PRINTER_H_
#define GHERKIN_PICKLE_PRINTER_H_
#include "pickle.h"
#include <stdio.h>
#ifdef __cplusplus
extern "C" {
#endif
void PicklePrinter_print_pickle(FILE* file, const Pickle* pickle);
#ifdef __cplusplus
}
#endif
#endif /* GHERKIN_PICKLE_PRINTER_H_ */
| |
michelson_v1_gas.ml | open Alpha_context
open Gas
module Cost_of = struct
let log2 =
let rec help acc = function
| 0 -> acc
| n -> help (acc + 1) (n / 2)
in help 1
let z_bytes (z : Z.t) =
let bits = Z.numbits z in
(7 + bits) / 8
let int_bytes (z : 'a Script_int.num) =
z_bytes (Script_int.to_zint z)
let timestamp_bytes (t : Script_timestamp.t) =
let z = Script_timestamp.to_zint t in
z_bytes z
(* For now, returns size in bytes, but this could get more complicated... *)
let rec size_of_comparable : type a b. (a, b) Script_typed_ir.comparable_struct -> a -> int =
fun wit v ->
match wit with
| Int_key _ -> int_bytes v
| Nat_key _ -> int_bytes v
| String_key _ -> String.length v
| Bytes_key _ -> MBytes.length v
| Bool_key _ -> 8
| Key_hash_key _ -> Signature.Public_key_hash.size
| Timestamp_key _ -> timestamp_bytes v
| Address_key _ -> Signature.Public_key_hash.size
| Mutez_key _ -> 8
| Pair_key ((l, _), (r, _), _) ->
let (lval, rval) = v in
size_of_comparable l lval + size_of_comparable r rval
let string length =
alloc_bytes_cost length
let bytes length =
alloc_mbytes_cost length
let manager_operation = step_cost 10_000
module Legacy = struct
let zint z =
alloc_bits_cost (Z.numbits z)
let set_to_list : type item. item Script_typed_ir.set -> cost
= fun (module Box) ->
alloc_cost @@ Pervasives.(Box.size * 2)
let map_to_list : type key value. (key, value) Script_typed_ir.map -> cost
= fun (module Box) ->
let size = snd Box.boxed in
3 *@ alloc_cost size
let z_to_int64 = step_cost 2 +@ alloc_cost 1
let hash data len = 10 *@ step_cost (MBytes.length data) +@ bytes len
let set_access : type elt. elt -> elt Script_typed_ir.set -> int
= fun _key (module Box) ->
log2 @@ Box.size
let set_update key _presence set =
set_access key set *@ alloc_cost 3
end
module Interpreter = struct
let cycle = atomic_step_cost 10
let nop = free
let stack_op = atomic_step_cost 10
let push = atomic_step_cost 10
let wrap = atomic_step_cost 10
let variant_no_data = atomic_step_cost 10
let branch = atomic_step_cost 10
let pair = atomic_step_cost 10
let pair_access = atomic_step_cost 10
let cons = atomic_step_cost 10
let loop_size = atomic_step_cost 5
let loop_cycle = atomic_step_cost 10
let loop_iter = atomic_step_cost 20
let loop_map = atomic_step_cost 30
let empty_set = atomic_step_cost 10
let set_to_list : type elt. elt Script_typed_ir.set -> cost =
fun (module Box) ->
atomic_step_cost (Box.size * 20)
let set_mem : type elt. elt -> elt Script_typed_ir.set -> cost =
fun elt (module Box) ->
let elt_bytes = size_of_comparable Box.elt_ty elt in
atomic_step_cost ((1 + (elt_bytes / 82)) * log2 Box.size)
let set_update : type elt. elt -> bool -> elt Script_typed_ir.set -> cost =
fun elt _ (module Box) ->
let elt_bytes = size_of_comparable Box.elt_ty elt in
atomic_step_cost ((1 + (elt_bytes / 82)) * log2 Box.size)
let set_size = atomic_step_cost 10
let empty_map = atomic_step_cost 10
let map_to_list : type key value. (key, value) Script_typed_ir.map -> cost =
fun (module Box) ->
let size = snd Box.boxed in
atomic_step_cost (size * 20)
let map_access : type key value. key -> (key, value) Script_typed_ir.map -> cost
= fun key (module Box) ->
let map_card = snd Box.boxed in
let key_bytes = size_of_comparable Box.key_ty key in
atomic_step_cost ((1 + (key_bytes / 70)) * log2 map_card)
let map_mem = map_access
let map_get = map_access
let map_update : type key value. key -> value option -> (key, value) Script_typed_ir.map -> cost
= fun key _value (module Box) ->
let map_card = snd Box.boxed in
let key_bytes = size_of_comparable Box.key_ty key in
atomic_step_cost ((1 + (key_bytes / 38)) * log2 map_card)
let map_size = atomic_step_cost 10
let add_timestamp (t1 : Script_timestamp.t) (t2 : 'a Script_int.num) =
let bytes1 = timestamp_bytes t1 in
let bytes2 = int_bytes t2 in
atomic_step_cost (51 + (Compare.Int.max bytes1 bytes2 / 62))
let sub_timestamp = add_timestamp
let diff_timestamps (t1 : Script_timestamp.t) (t2 : Script_timestamp.t) =
let bytes1 = timestamp_bytes t1 in
let bytes2 = timestamp_bytes t2 in
atomic_step_cost (51 + (Compare.Int.max bytes1 bytes2 / 62))
let rec concat_loop l acc =
match l with
| [] -> 30
| _ :: tl -> concat_loop tl (acc + 30)
let concat_string string_list =
atomic_step_cost (concat_loop string_list 0)
let slice_string string_length =
atomic_step_cost (40 + (string_length / 70))
let concat_bytes bytes_list =
atomic_step_cost (concat_loop bytes_list 0)
let int64_op = atomic_step_cost 61
let z_to_int64 = atomic_step_cost 20
let int64_to_z = atomic_step_cost 20
let bool_binop _ _ = atomic_step_cost 10
let bool_unop _ = atomic_step_cost 10
let abs int = atomic_step_cost (61 + ((int_bytes int) / 70))
let int _int = free
let neg = abs
let add i1 i2 = atomic_step_cost (51 + (Compare.Int.max (int_bytes i1) (int_bytes i2) / 62))
let sub = add
let mul i1 i2 =
let bytes = Compare.Int.max (int_bytes i1) (int_bytes i2) in
atomic_step_cost (51 + (bytes / 6 * log2 bytes))
let indic_lt x y = if Compare.Int.(x < y) then 1 else 0
let div i1 i2 =
let bytes1 = int_bytes i1 in
let bytes2 = int_bytes i2 in
let cost = indic_lt bytes2 bytes1 * (bytes1 - bytes2) * bytes2 in
atomic_step_cost (51 + (cost / 3151))
let shift_left _i _shift_bits = atomic_step_cost 30
let shift_right _i _shift_bits = atomic_step_cost 30
let logor i1 i2 =
let bytes1 = int_bytes i1 in
let bytes2 = int_bytes i2 in
atomic_step_cost (51 + ((Compare.Int.max bytes1 bytes2) / 70))
let logand i1 i2 =
let bytes1 = int_bytes i1 in
let bytes2 = int_bytes i2 in
atomic_step_cost (51 + ((Compare.Int.min bytes1 bytes2) / 70))
let logxor = logor
let lognot i = atomic_step_cost (51 + ((int_bytes i) / 20))
let exec = atomic_step_cost 10
let compare_bool _ _ = atomic_step_cost 30
let compare_string s1 s2 =
let bytes1 = String.length s1 in
let bytes2 = String.length s2 in
atomic_step_cost (30 + ((Compare.Int.min bytes1 bytes2) / 123))
let compare_bytes b1 b2 =
let bytes1 = MBytes.length b1 in
let bytes2 = MBytes.length b2 in
atomic_step_cost (30 + ((Compare.Int.min bytes1 bytes2) / 123))
let compare_tez _ _ = atomic_step_cost 30
let compare_zint i1 i2 =
atomic_step_cost (51 + ((Compare.Int.min (int_bytes i1) (int_bytes i2)) / 82))
let compare_key_hash _ _ = atomic_step_cost 92
let compare_timestamp t1 t2 =
let bytes1 = timestamp_bytes t1 in
let bytes2 = timestamp_bytes t2 in
atomic_step_cost (51 + ((Compare.Int.min bytes1 bytes2) / 82))
let compare_address _ _ = atomic_step_cost 92
let compare_res = atomic_step_cost 30
let unpack_failed bytes =
(* We cannot instrument failed deserialization,
so we take worst case fees: a set of size 1 bytes values. *)
let len = MBytes.length bytes in
(len *@ alloc_mbytes_cost 1) +@
(len *@ (log2 len *@ (alloc_cost 3 +@ step_cost 1)))
let address = atomic_step_cost 10
let contract = step_cost 10000
let transfer = step_cost 10
let create_account = step_cost 10
let create_contract = step_cost 10
let implicit_account = step_cost 10
let set_delegate = step_cost 10 +@ write_bytes_cost (Z.of_int 32)
let balance = atomic_step_cost 10
let now = atomic_step_cost 10
let check_signature_secp256k1 bytes = atomic_step_cost (10342 + (bytes / 5))
let check_signature_ed25519 bytes = atomic_step_cost (36864 + (bytes / 5))
let check_signature_p256 bytes = atomic_step_cost (36864 + (bytes / 5))
let check_signature (pkey : Signature.public_key) bytes =
match pkey with
| Ed25519 _ -> check_signature_ed25519 (MBytes.length bytes)
| Secp256k1 _ -> check_signature_secp256k1 (MBytes.length bytes)
| P256 _ -> check_signature_p256 (MBytes.length bytes)
let hash_key = atomic_step_cost 30
let hash_blake2b b = atomic_step_cost (102 + ((MBytes.length b) / 5))
let hash_sha256 b = atomic_step_cost (409 + (MBytes.length b))
let hash_sha512 b =
let bytes = MBytes.length b in atomic_step_cost (409 + ((bytes lsr 1) + (bytes lsr 4)))
let steps_to_quota = atomic_step_cost 10
let source = atomic_step_cost 10
let self = atomic_step_cost 10
let amount = atomic_step_cost 10
let chain_id = step_cost 1
let stack_n_op n = atomic_step_cost (20 + (((n lsr 1) + (n lsr 2)) + (n lsr 4)))
let apply = alloc_cost 8 +@ step_cost 1
let rec compare : type a s. (a, s) Script_typed_ir.comparable_struct -> a -> a -> cost = fun ty x y ->
match ty with
| Bool_key _ -> compare_bool x y
| String_key _ -> compare_string x y
| Bytes_key _ -> compare_bytes x y
| Mutez_key _ -> compare_tez x y
| Int_key _ -> compare_zint x y
| Nat_key _ -> compare_zint x y
| Key_hash_key _ -> compare_key_hash x y
| Timestamp_key _ -> compare_timestamp x y
| Address_key _ -> compare_address x y
| Pair_key ((tl, _), (tr, _), _) ->
(* Reasonable over-approximation of the cost of lexicographic comparison. *)
let (xl, xr) = x and (yl, yr) = y in
compare tl xl yl +@ compare tr xr yr
end
module Typechecking = struct
let cycle = step_cost 1
let bool = free
let unit = free
let string = string
let bytes = bytes
let z = Legacy.zint
let int_of_string str =
alloc_cost @@ (Pervasives.(/) (String.length str) 5)
let tez = step_cost 1 +@ alloc_cost 1
let string_timestamp = step_cost 3 +@ alloc_cost 3
let key = step_cost 3 +@ alloc_cost 3
let key_hash = step_cost 1 +@ alloc_cost 1
let signature = step_cost 1 +@ alloc_cost 1
let chain_id = step_cost 1 +@ alloc_cost 1
let contract = step_cost 5
let get_script = step_cost 20 +@ alloc_cost 5
let contract_exists = step_cost 15 +@ alloc_cost 5
let pair = alloc_cost 2
let union = alloc_cost 1
let lambda = alloc_cost 5 +@ step_cost 3
let some = alloc_cost 1
let none = alloc_cost 0
let list_element = alloc_cost 2 +@ step_cost 1
let set_element size = log2 size *@ (alloc_cost 3 +@ step_cost 2)
let map_element size = log2 size *@ (alloc_cost 4 +@ step_cost 2)
let primitive_type = alloc_cost 1
let one_arg_type = alloc_cost 2
let two_arg_type = alloc_cost 3
let operation b = bytes b
let type_ nb_args = alloc_cost (nb_args + 1)
(* Cost of parsing instruction, is cost of allocation of
constructor + cost of contructor parameters + cost of
allocation on the stack type *)
let instr
: type b a. (b, a) Script_typed_ir.instr -> cost
= fun i ->
let open Script_typed_ir in
alloc_cost 1 +@ (* cost of allocation of constructor *)
match i with
| Drop -> alloc_cost 0
| Dup -> alloc_cost 1
| Swap -> alloc_cost 0
| Const _ -> alloc_cost 1
| Cons_pair -> alloc_cost 2
| Car -> alloc_cost 1
| Cdr -> alloc_cost 1
| Cons_some -> alloc_cost 2
| Cons_none _ -> alloc_cost 3
| If_none _ -> alloc_cost 2
| Left -> alloc_cost 3
| Right -> alloc_cost 3
| If_left _ -> alloc_cost 2
| Cons_list -> alloc_cost 1
| Nil -> alloc_cost 1
| If_cons _ -> alloc_cost 2
| List_map _ -> alloc_cost 5
| List_iter _ -> alloc_cost 4
| List_size -> alloc_cost 1
| Empty_set _ -> alloc_cost 1
| Set_iter _ -> alloc_cost 4
| Set_mem -> alloc_cost 1
| Set_update -> alloc_cost 1
| Set_size -> alloc_cost 1
| Empty_map _ -> alloc_cost 2
| Map_map _ -> alloc_cost 5
| Map_iter _ -> alloc_cost 4
| Map_mem -> alloc_cost 1
| Map_get -> alloc_cost 1
| Map_update -> alloc_cost 1
| Map_size -> alloc_cost 1
| Empty_big_map _ -> alloc_cost 2
| Big_map_mem -> alloc_cost 1
| Big_map_get -> alloc_cost 1
| Big_map_update -> alloc_cost 1
| Concat_string -> alloc_cost 1
| Concat_string_pair -> alloc_cost 1
| Concat_bytes -> alloc_cost 1
| Concat_bytes_pair -> alloc_cost 1
| Slice_string -> alloc_cost 1
| Slice_bytes -> alloc_cost 1
| String_size -> alloc_cost 1
| Bytes_size -> alloc_cost 1
| Add_seconds_to_timestamp -> alloc_cost 1
| Add_timestamp_to_seconds -> alloc_cost 1
| Sub_timestamp_seconds -> alloc_cost 1
| Diff_timestamps -> alloc_cost 1
| Add_tez -> alloc_cost 1
| Sub_tez -> alloc_cost 1
| Mul_teznat -> alloc_cost 1
| Mul_nattez -> alloc_cost 1
| Ediv_teznat -> alloc_cost 1
| Ediv_tez -> alloc_cost 1
| Or -> alloc_cost 1
| And -> alloc_cost 1
| Xor -> alloc_cost 1
| Not -> alloc_cost 1
| Is_nat -> alloc_cost 1
| Neg_nat -> alloc_cost 1
| Neg_int -> alloc_cost 1
| Abs_int -> alloc_cost 1
| Int_nat -> alloc_cost 1
| Add_intint -> alloc_cost 1
| Add_intnat -> alloc_cost 1
| Add_natint -> alloc_cost 1
| Add_natnat -> alloc_cost 1
| Sub_int -> alloc_cost 1
| Mul_intint -> alloc_cost 1
| Mul_intnat -> alloc_cost 1
| Mul_natint -> alloc_cost 1
| Mul_natnat -> alloc_cost 1
| Ediv_intint -> alloc_cost 1
| Ediv_intnat -> alloc_cost 1
| Ediv_natint -> alloc_cost 1
| Ediv_natnat -> alloc_cost 1
| Lsl_nat -> alloc_cost 1
| Lsr_nat -> alloc_cost 1
| Or_nat -> alloc_cost 1
| And_nat -> alloc_cost 1
| And_int_nat -> alloc_cost 1
| Xor_nat -> alloc_cost 1
| Not_nat -> alloc_cost 1
| Not_int -> alloc_cost 1
| Seq _ -> alloc_cost 8
| If _ -> alloc_cost 8
| Loop _ -> alloc_cost 4
| Loop_left _ -> alloc_cost 5
| Dip _ -> alloc_cost 4
| Exec -> alloc_cost 1
| Apply _ -> alloc_cost 1
| Lambda _ -> alloc_cost 2
| Failwith _ -> alloc_cost 1
| Nop -> alloc_cost 0
| Compare _ -> alloc_cost 1
| Eq -> alloc_cost 1
| Neq -> alloc_cost 1
| Lt -> alloc_cost 1
| Gt -> alloc_cost 1
| Le -> alloc_cost 1
| Ge -> alloc_cost 1
| Address -> alloc_cost 1
| Contract _ -> alloc_cost 2
| Transfer_tokens -> alloc_cost 1
| Create_account -> alloc_cost 2
| Implicit_account -> alloc_cost 1
| Create_contract _ -> alloc_cost 8
(* Deducted the cost of removed arguments manager, spendable and delegatable:
- manager: key_hash = 1
- spendable: bool = 0
- delegatable: bool = 0
*)
| Create_contract_2 _ -> alloc_cost 7
| Set_delegate -> alloc_cost 1
| Now -> alloc_cost 1
| Balance -> alloc_cost 1
| Check_signature -> alloc_cost 1
| Hash_key -> alloc_cost 1
| Pack _ -> alloc_cost 2
| Unpack _ -> alloc_cost 2
| Blake2b -> alloc_cost 1
| Sha256 -> alloc_cost 1
| Sha512 -> alloc_cost 1
| Steps_to_quota -> alloc_cost 1
| Source -> alloc_cost 1
| Sender -> alloc_cost 1
| Self _ -> alloc_cost 2
| Amount -> alloc_cost 1
| Dig (n,_) -> n *@ alloc_cost 1 (* _ is a unary development of n *)
| Dug (n,_) -> n *@ alloc_cost 1
| Dipn (n,_,_) -> n *@ alloc_cost 1
| Dropn (n,_) -> n *@ alloc_cost 1
| ChainId -> alloc_cost 1
end
module Unparse = struct
let prim_cost l annot = Script.prim_node_cost_nonrec_of_length l annot
let seq_cost = Script.seq_node_cost_nonrec_of_length
let string_cost length = Script.string_node_cost_of_length length
let cycle = step_cost 1
let bool = prim_cost 0 []
let unit = prim_cost 0 []
(* We count the length of strings and bytes to prevent hidden
miscalculations due to non detectable expansion of sharing. *)
let string s = Script.string_node_cost s
let bytes s = Script.bytes_node_cost s
let z i = Script.int_node_cost i
let int i = Script.int_node_cost (Script_int.to_zint i)
let tez = Script.int_node_cost_of_numbits 60 (* int64 bound *)
let timestamp x = Script_timestamp.to_zint x |> Script_int.of_zint |> int
let operation bytes = Script.bytes_node_cost bytes
let chain_id bytes = Script.bytes_node_cost bytes
let key = string_cost 54
let key_hash = string_cost 36
let signature = string_cost 128
let contract = string_cost 36
let pair = prim_cost 2 []
let union = prim_cost 1 []
let some = prim_cost 1 []
let none = prim_cost 0 []
let list_element = alloc_cost 2
let set_element = alloc_cost 2
let map_element = alloc_cost 2
let one_arg_type = prim_cost 1
let two_arg_type = prim_cost 2
let set_to_list = Legacy.set_to_list
let map_to_list = Legacy.map_to_list
end
end
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
docstrings.ml | open Location
(* Docstrings *)
(* A docstring is "attached" if it has been inserted in the AST. This
is used for generating unexpected docstring warnings. *)
type ds_attached =
| Unattached (* Not yet attached anything.*)
| Info (* Attached to a field or constructor. *)
| Docs (* Attached to an item or as floating text. *)
(* A docstring is "associated" with an item if there are no blank lines between
them. This is used for generating docstring ambiguity warnings. *)
type ds_associated =
| Zero (* Not associated with an item *)
| One (* Associated with one item *)
| Many (* Associated with multiple items (ambiguity) *)
type docstring =
{ ds_body: string;
ds_loc: Location.t;
mutable ds_attached: ds_attached;
mutable ds_associated: ds_associated; }
(* List of docstrings *)
let docstrings : docstring list ref = ref []
(* Warn for unused and ambiguous docstrings *)
let warn_bad_docstrings () =
if Warnings.is_active (Warnings.Bad_docstring true) then begin
List.iter
(fun ds ->
match ds.ds_attached with
| Info -> ()
| Unattached ->
prerr_warning ds.ds_loc (Warnings.Bad_docstring true)
| Docs ->
match ds.ds_associated with
| Zero | One -> ()
| Many ->
prerr_warning ds.ds_loc (Warnings.Bad_docstring false))
(List.rev !docstrings)
end
(* Docstring constructors and destructors *)
let docstring body loc =
let ds =
{ ds_body = body;
ds_loc = loc;
ds_attached = Unattached;
ds_associated = Zero; }
in
ds
let register ds =
docstrings := ds :: !docstrings
let docstring_body ds = ds.ds_body
let docstring_loc ds = ds.ds_loc
(* Docstrings attached to items *)
type docs =
{ docs_pre: docstring option;
docs_post: docstring option; }
let empty_docs = { docs_pre = None; docs_post = None }
let doc_loc = {txt = "ocaml.doc"; loc = Location.none}
let docs_attr ds =
let open Parsetree in
let exp =
{ pexp_desc = Pexp_constant (Pconst_string(ds.ds_body, None));
pexp_loc = ds.ds_loc;
pexp_loc_stack = [];
pexp_attributes = []; }
in
let item =
{ pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc }
in
{ attr_name = doc_loc;
attr_payload = PStr [item];
attr_loc = Location.none }
let add_docs_attrs docs attrs =
let attrs =
match docs.docs_pre with
| None | Some { ds_body=""; _ } -> attrs
| Some ds -> docs_attr ds :: attrs
in
let attrs =
match docs.docs_post with
| None | Some { ds_body=""; _ } -> attrs
| Some ds -> attrs @ [docs_attr ds]
in
attrs
(* Docstrings attached to constructors or fields *)
type info = docstring option
let empty_info = None
let info_attr = docs_attr
let add_info_attrs info attrs =
match info with
| None | Some {ds_body=""; _} -> attrs
| Some ds -> attrs @ [info_attr ds]
(* Docstrings not attached to a specific item *)
type text = docstring list
let empty_text = []
let empty_text_lazy = lazy []
let text_loc = {txt = "ocaml.text"; loc = Location.none}
let text_attr ds =
let open Parsetree in
let exp =
{ pexp_desc = Pexp_constant (Pconst_string(ds.ds_body, None));
pexp_loc = ds.ds_loc;
pexp_loc_stack = [];
pexp_attributes = []; }
in
let item =
{ pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc }
in
{ attr_name = text_loc;
attr_payload = PStr [item];
attr_loc = Location.none }
let add_text_attrs dsl attrs =
let fdsl = List.filter (function {ds_body=""} -> false| _ ->true) dsl in
(List.map text_attr fdsl) @ attrs
(* Find the first non-info docstring in a list, attach it and return it *)
let get_docstring ~info dsl =
let rec loop = function
| [] -> None
| {ds_attached = Info; _} :: rest -> loop rest
| ds :: _ ->
ds.ds_attached <- if info then Info else Docs;
Some ds
in
loop dsl
(* Find all the non-info docstrings in a list, attach them and return them *)
let get_docstrings dsl =
let rec loop acc = function
| [] -> List.rev acc
| {ds_attached = Info; _} :: rest -> loop acc rest
| ds :: rest ->
ds.ds_attached <- Docs;
loop (ds :: acc) rest
in
loop [] dsl
(* "Associate" all the docstrings in a list *)
let associate_docstrings dsl =
List.iter
(fun ds ->
match ds.ds_associated with
| Zero -> ds.ds_associated <- One
| (One | Many) -> ds.ds_associated <- Many)
dsl
(* Map from positions to pre docstrings *)
let pre_table : (Lexing.position, docstring list) Hashtbl.t =
Hashtbl.create 50
let set_pre_docstrings pos dsl =
if dsl <> [] then Hashtbl.add pre_table pos dsl
let get_pre_docs pos =
try
let dsl = Hashtbl.find pre_table pos in
associate_docstrings dsl;
get_docstring ~info:false dsl
with Not_found -> None
let mark_pre_docs pos =
try
let dsl = Hashtbl.find pre_table pos in
associate_docstrings dsl
with Not_found -> ()
(* Map from positions to post docstrings *)
let post_table : (Lexing.position, docstring list) Hashtbl.t =
Hashtbl.create 50
let set_post_docstrings pos dsl =
if dsl <> [] then Hashtbl.add post_table pos dsl
let get_post_docs pos =
try
let dsl = Hashtbl.find post_table pos in
associate_docstrings dsl;
get_docstring ~info:false dsl
with Not_found -> None
let mark_post_docs pos =
try
let dsl = Hashtbl.find post_table pos in
associate_docstrings dsl
with Not_found -> ()
let get_info pos =
try
let dsl = Hashtbl.find post_table pos in
get_docstring ~info:true dsl
with Not_found -> None
(* Map from positions to floating docstrings *)
let floating_table : (Lexing.position, docstring list) Hashtbl.t =
Hashtbl.create 50
let set_floating_docstrings pos dsl =
if dsl <> [] then Hashtbl.add floating_table pos dsl
let get_text pos =
try
let dsl = Hashtbl.find floating_table pos in
get_docstrings dsl
with Not_found -> []
let get_post_text pos =
try
let dsl = Hashtbl.find post_table pos in
get_docstrings dsl
with Not_found -> []
(* Maps from positions to extra docstrings *)
let pre_extra_table : (Lexing.position, docstring list) Hashtbl.t =
Hashtbl.create 50
let set_pre_extra_docstrings pos dsl =
if dsl <> [] then Hashtbl.add pre_extra_table pos dsl
let get_pre_extra_text pos =
try
let dsl = Hashtbl.find pre_extra_table pos in
get_docstrings dsl
with Not_found -> []
let post_extra_table : (Lexing.position, docstring list) Hashtbl.t =
Hashtbl.create 50
let set_post_extra_docstrings pos dsl =
if dsl <> [] then Hashtbl.add post_extra_table pos dsl
let get_post_extra_text pos =
try
let dsl = Hashtbl.find post_extra_table pos in
get_docstrings dsl
with Not_found -> []
(* Docstrings from parser actions *)
module WithParsing = struct
let symbol_docs () =
{ docs_pre = get_pre_docs (Parsing.symbol_start_pos ());
docs_post = get_post_docs (Parsing.symbol_end_pos ()); }
let symbol_docs_lazy () =
let p1 = Parsing.symbol_start_pos () in
let p2 = Parsing.symbol_end_pos () in
lazy { docs_pre = get_pre_docs p1;
docs_post = get_post_docs p2; }
let rhs_docs pos1 pos2 =
{ docs_pre = get_pre_docs (Parsing.rhs_start_pos pos1);
docs_post = get_post_docs (Parsing.rhs_end_pos pos2); }
let rhs_docs_lazy pos1 pos2 =
let p1 = Parsing.rhs_start_pos pos1 in
let p2 = Parsing.rhs_end_pos pos2 in
lazy { docs_pre = get_pre_docs p1;
docs_post = get_post_docs p2; }
let mark_symbol_docs () =
mark_pre_docs (Parsing.symbol_start_pos ());
mark_post_docs (Parsing.symbol_end_pos ())
let mark_rhs_docs pos1 pos2 =
mark_pre_docs (Parsing.rhs_start_pos pos1);
mark_post_docs (Parsing.rhs_end_pos pos2)
let symbol_info () =
get_info (Parsing.symbol_end_pos ())
let rhs_info pos =
get_info (Parsing.rhs_end_pos pos)
let symbol_text () =
get_text (Parsing.symbol_start_pos ())
let symbol_text_lazy () =
let pos = Parsing.symbol_start_pos () in
lazy (get_text pos)
let rhs_text pos =
get_text (Parsing.rhs_start_pos pos)
let rhs_post_text pos =
get_post_text (Parsing.rhs_end_pos pos)
let rhs_text_lazy pos =
let pos = Parsing.rhs_start_pos pos in
lazy (get_text pos)
let symbol_pre_extra_text () =
get_pre_extra_text (Parsing.symbol_start_pos ())
let symbol_post_extra_text () =
get_post_extra_text (Parsing.symbol_end_pos ())
let rhs_pre_extra_text pos =
get_pre_extra_text (Parsing.rhs_start_pos pos)
let rhs_post_extra_text pos =
get_post_extra_text (Parsing.rhs_end_pos pos)
end
include WithParsing
module WithMenhir = struct
let symbol_docs (startpos, endpos) =
{ docs_pre = get_pre_docs startpos;
docs_post = get_post_docs endpos; }
let symbol_docs_lazy (p1, p2) =
lazy { docs_pre = get_pre_docs p1;
docs_post = get_post_docs p2; }
let rhs_docs pos1 pos2 =
{ docs_pre = get_pre_docs pos1;
docs_post = get_post_docs pos2; }
let rhs_docs_lazy p1 p2 =
lazy { docs_pre = get_pre_docs p1;
docs_post = get_post_docs p2; }
let mark_symbol_docs (startpos, endpos) =
mark_pre_docs startpos;
mark_post_docs endpos;
()
let mark_rhs_docs pos1 pos2 =
mark_pre_docs pos1;
mark_post_docs pos2;
()
let symbol_info endpos =
get_info endpos
let rhs_info endpos =
get_info endpos
let symbol_text startpos =
get_text startpos
let symbol_text_lazy startpos =
lazy (get_text startpos)
let rhs_text pos =
get_text pos
let rhs_post_text pos =
get_post_text pos
let rhs_text_lazy pos =
lazy (get_text pos)
let symbol_pre_extra_text startpos =
get_pre_extra_text startpos
let symbol_post_extra_text endpos =
get_post_extra_text endpos
let rhs_pre_extra_text pos =
get_pre_extra_text pos
let rhs_post_extra_text pos =
get_post_extra_text pos
end
(* (Re)Initialise all comment state *)
let init () =
docstrings := [];
Hashtbl.reset pre_table;
Hashtbl.reset post_table;
Hashtbl.reset floating_table;
Hashtbl.reset pre_extra_table;
Hashtbl.reset post_extra_table
| (**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* Leo White *)
(* *)
(* Copyright 1996 Institut National de Recherche en Informatique et *)
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
(* the GNU Lesser General Public License version 2.1, with the *)
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
|
sort.mli |
val list : ('a -> 'a -> bool) -> 'a list -> 'a list
val array : ('a -> 'a -> bool) -> 'a array -> unit
val merge : ('a -> 'a -> bool) -> 'a list -> 'a list -> 'a list
| |
p-compose.c |
#include <stdio.h>
#include <stdlib.h>
#include <gmp.h>
#include <float.h>
#include "flint.h"
#include "fmpz.h"
#include "fmpz_poly.h"
#include "ulong_extras.h"
#include "profiler.h"
/*
Definitions for the parameters of the timing process.
len1lo Minimum length
len1hi Maximum length
len1h Step size for the length
len2lo Minimum length
len2hi Maximum length
len2h Step size for the length
bits Bit size of the coefficients
cols Number of different lengths
rows Number of different bit sizes
cpumin Minimum number of ms spent on each test case
ncases Number of test cases per point (length, bit size)
nalgs Number of algorithms
img Whether an RGB coloured image should be produced
imgname File name for image
*/
#define len1lo 1
#define len1hi 30
#define len1h 1
#define len2lo 1
#define len2hi 30
#define len2h 1
#define bits 112
#define cols ((len1hi + 1 - len1lo + (len1h - 1)) / len1h)
#define rows ((len2hi + 1 - len2lo + (len2h - 1)) / len2h)
#define cpumin 10
#define ncases 1
#define nalgs 2
int
main(void)
{
int i, j, len1, len2;
int X[rows][cols];
double T[rows][cols][nalgs];
fmpz_poly_t f, g, h;
FLINT_TEST_INIT(state);
fmpz_poly_init2(f, len1hi);
fmpz_poly_init2(g, len2hi);
fmpz_poly_init2(h, (len1hi-1) * (len2hi-1) + 1);
for (len1 = len1lo, j = 0; len1 <= len1hi; len1 += len1h, j++)
{
slong s[nalgs];
for (len2 = len2lo, i = 0; len2 <= len2hi; len2 += len2h, i++)
{
int c, n, reps = 0;
for (c = 0; c < nalgs; c++)
s[c] = WORD(0);
for (n = 0; n < ncases; n++)
{
timeit_t t[nalgs];
int l, loops = 1;
/*
Construct random polynomials f and g
*/
{
slong k;
for (k = 0; k < len1; k++)
fmpz_randbits(f->coeffs + k, state, bits);
if ((f->coeffs)[len1-1] == WORD(0))
fmpz_randtest_not_zero(f->coeffs + (len1 - 1), state, bits);
f->length = len1;
}
{
slong k;
for (k = 0; k < len2; k++)
fmpz_randbits(g->coeffs + k, state, bits);
if ((g->coeffs)[len2-1] == WORD(0))
fmpz_randtest_not_zero(g->coeffs + (len2 - 1), state, bits);
g->length = len2;
}
loop:
timeit_start(t[0]);
for (l = 0; l < loops; l++)
fmpz_poly_compose_horner(h, f, g);
timeit_stop(t[0]);
timeit_start(t[1]);
for (l = 0; l < loops; l++)
fmpz_poly_compose_divconquer(h, f, g);
timeit_stop(t[1]);
for (c = 0; c < nalgs; c++)
if (t[c]->cpu <= cpumin)
{
loops *= 10;
goto loop;
}
for (c = 0; c < nalgs; c++)
s[c] += t[c]->cpu;
reps += loops;
}
for (c = 0; c < nalgs; c++)
T[i][j][c] = s[c] / (double) reps;
if (s[0] <= s[1])
X[i][j] = 0;
else
X[i][j] = 1;
}
flint_printf("len1 = %d, time = %wdms\n", len1, s[0] + s[1]), fflush(stdout);
}
fmpz_poly_clear(f);
fmpz_poly_clear(g);
fmpz_poly_clear(h);
/*
Print 2-D ASCII image of the winning algorithms
*/
for (i = 0; i < rows; i++)
{
for (j = 0; j < cols; j++)
flint_printf("%d", X[i][j]);
flint_printf("\n");
}
flint_randclear(state);
}
| /*
Copyright (C) 2010 Sebastian Pancratz
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
michelson_v1_entrypoints.mli | open Protocol
(** Returns [Some type] if the contract has an entrypoint of type [type]. None if it does not exists. *)
val script_entrypoint_type :
#Protocol_client_context.rpc_context ->
chain:Chain_services.chain ->
block:Block_services.block ->
Alpha_context.Script.expr ->
entrypoint:Alpha_context.Entrypoint.t ->
Alpha_context.Script.expr option tzresult Lwt.t
(** Returns [Some type] if the script has an entrypoint of type [type]. None if it does not exists. *)
val contract_entrypoint_type :
#Protocol_client_context.rpc_context ->
chain:Chain_services.chain ->
block:Block_services.block ->
contract:Contract_hash.t ->
entrypoint:Alpha_context.Entrypoint.t ->
normalize_types:bool ->
Alpha_context.Script.expr option tzresult Lwt.t
val print_entrypoint_type :
#Client_context.printer ->
?on_errors:(error list -> unit tzresult Lwt.t) ->
emacs:bool ->
?contract:Contract_hash.t ->
?script_name:string ->
entrypoint:Alpha_context.Entrypoint.t ->
Alpha_context.Script.expr option tzresult ->
unit tzresult Lwt.t
(** List paths of unreachable parameters.
Only useful to test the stitching, as no such parameter should be
allowed in originated contracts. *)
val list_contract_unreachables :
#Protocol_client_context.rpc_context ->
chain:Chain_services.chain ->
block:Block_services.block ->
contract:Contract_hash.t ->
Michelson_v1_primitives.prim list list tzresult Lwt.t
val list_unreachables :
#Protocol_client_context.rpc_context ->
chain:Chain_services.chain ->
block:Block_services.block ->
Alpha_context.Script.expr ->
Michelson_v1_primitives.prim list list tzresult Lwt.t
val print_unreachables :
#Client_context.printer ->
?on_errors:(error list -> unit tzresult Lwt.t) ->
emacs:bool ->
?contract:Contract_hash.t ->
?script_name:string ->
Michelson_v1_primitives.prim list list tzresult ->
unit tzresult Lwt.t
(** List the contract entrypoints with their types.
If their is no explicit default, th type of default entrypoint will still be given.
*)
val list_contract_entrypoints :
#Protocol_client_context.rpc_context ->
chain:Chain_services.chain ->
block:Block_services.block ->
contract:Contract_hash.t ->
normalize_types:bool ->
(string * Alpha_context.Script.expr) list tzresult Lwt.t
(** List the script entrypoints with their types. *)
val list_entrypoints :
#Protocol_client_context.rpc_context ->
chain:Chain_services.chain ->
block:Block_services.block ->
Alpha_context.Script.expr ->
(string * Alpha_context.Script.expr) list tzresult Lwt.t
(** Print the contract entrypoints with their types. *)
val print_entrypoints_list :
#Client_context.printer ->
?on_errors:(error list -> unit tzresult Lwt.t) ->
emacs:bool ->
?contract:Contract_hash.t ->
?script_name:string ->
(string * Alpha_context.Script.expr) list tzresult ->
unit tzresult Lwt.t
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2019 Nomadic Labs <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
test_printf.mli | (*_ This signature is deliberately empty. *)
| (*_ This signature is deliberately empty. *)
|
db.mli |
type entry = {
job_id : string;
build : int64; (* Build number (increases for rebuilds). *)
value : string;
outcome : string Current.or_error;
ready : float;
running : float option;
finished : float;
rebuild : bool; (* If [true], then a rebuild was requested. *)
}
val record :
op:string ->
key:string ->
value:string ->
job_id:string ->
ready:Unix.tm ->
running:Unix.tm option ->
finished:Unix.tm ->
build:int64 ->
string Current.or_error ->
unit
(** [record ~op ~key ~value ~job_id ~ready ~running ~finished ~build outcome] records that [key] is now set to [value],
producing [outcome]. This replaces any previous entry.
@param ready When the job was ready to start (i.e. enqueued).
@param running When the job started running.
@param finished When the job stopped running (i.e. now). *)
val lookup : op:string -> string -> entry option
(** [lookup ~op key] returns the most recently stored result for [op] and [key], if any. *)
val drop_all : string -> unit
(** [drop_all op] drops all cached entries for [op]. *)
val invalidate : op:string -> string -> unit
(** [invalidate ~op key] removes any existing entry for [op, key]. *)
val lookup_job_id : string -> (string * string) option
(** [lookup_job_id x] is the (op, key) of job [x], if known. *)
(** These functions are described in the {!Current_cache} public API. *)
val init : unit -> unit
val query : ?op:string -> ?ok:bool -> ?rebuild:bool -> ?job_prefix:string -> unit -> entry list
val history : limit:int -> op:string -> string -> entry list
val ops : unit -> string list
| |
deleteAssociation.ml |
open Types
open Aws
type input = DeleteAssociationRequest.t
type output = unit
type error = Errors_internal.t
let service = "ssm"
let signature_version = Request.V4
let to_http service region req =
let uri =
Uri.add_query_params
(Uri.of_string
(Aws.Util.of_option_exn (Endpoints.url_of service region)))
(List.append
[("Version", ["2014-11-06"]); ("Action", ["DeleteAssociation"])]
(Util.drop_empty
(Uri.query_of_encoded
(Query.render (DeleteAssociationRequest.to_query req))))) in
(`POST, uri, [])
let of_http body = `Ok ()
let parse_error code err =
let errors =
[Errors_internal.TooManyUpdates;
Errors_internal.InvalidInstanceId;
Errors_internal.InvalidDocument;
Errors_internal.InternalServerError;
Errors_internal.AssociationDoesNotExist] @ Errors_internal.common in
match Errors_internal.of_string err with
| Some var ->
if
(List.mem var errors) &&
((match Errors_internal.to_http_code var with
| Some var -> var = code
| None -> true))
then Some var
else None
| None -> None | |
helpers_services.ml | open Alpha_context
open Misc.Syntax
type error += Cannot_parse_operation (* `Branch *)
let () =
register_error_kind
`Branch
~id:"operation.cannot_parse"
~title:"Cannot parse operation"
~description:"The operation is ill-formed or for another protocol version"
~pp:(fun ppf () -> Format.fprintf ppf "The operation cannot be parsed")
Data_encoding.unit
(function Cannot_parse_operation -> Some () | _ -> None)
(fun () -> Cannot_parse_operation)
let parse_operation (op : Operation.raw) =
match
Data_encoding.Binary.of_bytes Operation.protocol_data_encoding op.proto
with
| Some protocol_data ->
ok {shell = op.shell; protocol_data}
| None ->
error Cannot_parse_operation
let path = RPC_path.(open_root / "helpers")
module Scripts = struct
module S = struct
open Data_encoding
let path = RPC_path.(path / "scripts")
let run_code_input_encoding =
obj9
(req "script" Script.expr_encoding)
(req "storage" Script.expr_encoding)
(req "input" Script.expr_encoding)
(req "amount" Tez.encoding)
(req "chain_id" Chain_id.encoding)
(opt "source" Contract.encoding)
(opt "payer" Contract.encoding)
(opt "gas" Gas.Arith.z_integral_encoding)
(dft "entrypoint" string "default")
let trace_encoding =
def "scripted.trace" @@ list
@@ obj3
(req "location" Script.location_encoding)
(req "gas" Gas.encoding)
(req
"stack"
(list
(obj2 (req "item" Script.expr_encoding) (opt "annot" string))))
let run_code =
RPC_service.post_service
~description:"Run a piece of code in the current context"
~query:RPC_query.empty
~input:run_code_input_encoding
~output:
(obj3
(req "storage" Script.expr_encoding)
(req "operations" (list Operation.internal_operation_encoding))
(opt "big_map_diff" Contract.big_map_diff_encoding))
RPC_path.(path / "run_code")
let trace_code =
RPC_service.post_service
~description:
"Run a piece of code in the current context, keeping a trace"
~query:RPC_query.empty
~input:run_code_input_encoding
~output:
(obj4
(req "storage" Script.expr_encoding)
(req "operations" (list Operation.internal_operation_encoding))
(req "trace" trace_encoding)
(opt "big_map_diff" Contract.big_map_diff_encoding))
RPC_path.(path / "trace_code")
let typecheck_code =
RPC_service.post_service
~description:"Typecheck a piece of code in the current context"
~query:RPC_query.empty
~input:
(obj2
(req "program" Script.expr_encoding)
(opt "gas" Gas.Arith.z_integral_encoding))
~output:
(obj2
(req "type_map" Script_tc_errors_registration.type_map_enc)
(req "gas" Gas.encoding))
RPC_path.(path / "typecheck_code")
let typecheck_data =
RPC_service.post_service
~description:
"Check that some data expression is well formed and of a given type \
in the current context"
~query:RPC_query.empty
~input:
(obj3
(req "data" Script.expr_encoding)
(req "type" Script.expr_encoding)
(opt "gas" Gas.Arith.z_integral_encoding))
~output:(obj1 (req "gas" Gas.encoding))
RPC_path.(path / "typecheck_data")
let pack_data =
RPC_service.post_service
~description:
"Computes the serialized version of some data expression using the \
same algorithm as script instruction PACK"
~input:
(obj3
(req "data" Script.expr_encoding)
(req "type" Script.expr_encoding)
(opt "gas" Gas.Arith.z_integral_encoding))
~output:(obj2 (req "packed" bytes) (req "gas" Gas.encoding))
~query:RPC_query.empty
RPC_path.(path / "pack_data")
let run_operation =
RPC_service.post_service
~description:"Run an operation without signature checks"
~query:RPC_query.empty
~input:
(obj2
(req "operation" Operation.encoding)
(req "chain_id" Chain_id.encoding))
~output:Apply_results.operation_data_and_metadata_encoding
RPC_path.(path / "run_operation")
let entrypoint_type =
RPC_service.post_service
~description:"Return the type of the given entrypoint"
~query:RPC_query.empty
~input:
(obj2
(req "script" Script.expr_encoding)
(dft "entrypoint" string "default"))
~output:(obj1 (req "entrypoint_type" Script.expr_encoding))
RPC_path.(path / "entrypoint")
let list_entrypoints =
RPC_service.post_service
~description:"Return the list of entrypoints of the given script"
~query:RPC_query.empty
~input:(obj1 (req "script" Script.expr_encoding))
~output:
(obj2
(dft
"unreachable"
(Data_encoding.list
(obj1
(req
"path"
(Data_encoding.list
Michelson_v1_primitives.prim_encoding))))
[])
(req "entrypoints" (assoc Script.expr_encoding)))
RPC_path.(path / "entrypoints")
end
let register () =
let open Services_registration in
let originate_dummy_contract ctxt script =
let ctxt = Contract.init_origination_nonce ctxt Operation_hash.zero in
Lwt.return (Contract.fresh_contract_from_current_nonce ctxt)
>>=? fun (ctxt, dummy_contract) ->
let balance =
match Tez.of_mutez 4_000_000_000_000L with
| Some balance ->
balance
| None ->
assert false
in
Contract.originate
ctxt
dummy_contract
~balance
~delegate:None
~script:(script, None)
>>=? fun ctxt -> return (ctxt, dummy_contract)
in
register0
S.run_code
(fun ctxt
()
( code,
storage,
parameter,
amount,
chain_id,
source,
payer,
gas,
entrypoint )
->
let storage = Script.lazy_expr storage in
let code = Script.lazy_expr code in
originate_dummy_contract ctxt {storage; code}
>>=? fun (ctxt, dummy_contract) ->
let (source, payer) =
match (source, payer) with
| (Some source, Some payer) ->
(source, payer)
| (Some source, None) ->
(source, source)
| (None, Some payer) ->
(payer, payer)
| (None, None) ->
(dummy_contract, dummy_contract)
in
let gas =
match gas with
| Some gas ->
gas
| None ->
Constants.hard_gas_limit_per_operation ctxt
in
let ctxt = Gas.set_limit ctxt gas in
let step_constants =
let open Script_interpreter in
{source; payer; self = dummy_contract; amount; chain_id}
in
Script_interpreter.execute
ctxt
Readable
step_constants
~script:{storage; code}
~entrypoint
~parameter
>|=? fun {Script_interpreter.storage; operations; big_map_diff; _} ->
(storage, operations, big_map_diff)) ;
register0
S.trace_code
(fun ctxt
()
( code,
storage,
parameter,
amount,
chain_id,
source,
payer,
gas,
entrypoint )
->
let storage = Script.lazy_expr storage in
let code = Script.lazy_expr code in
originate_dummy_contract ctxt {storage; code}
>>=? fun (ctxt, dummy_contract) ->
let (source, payer) =
match (source, payer) with
| (Some source, Some payer) ->
(source, payer)
| (Some source, None) ->
(source, source)
| (None, Some payer) ->
(payer, payer)
| (None, None) ->
(dummy_contract, dummy_contract)
in
let gas =
match gas with
| Some gas ->
gas
| None ->
Constants.hard_gas_limit_per_operation ctxt
in
let ctxt = Gas.set_limit ctxt gas in
let step_constants =
let open Script_interpreter in
{source; payer; self = dummy_contract; amount; chain_id}
in
Script_interpreter.trace
ctxt
Readable
step_constants
~script:{storage; code}
~entrypoint
~parameter
>|=? fun ( {Script_interpreter.storage; operations; big_map_diff; _},
trace ) ->
(storage, operations, trace, big_map_diff)) ;
register0 S.typecheck_code (fun ctxt () (expr, maybe_gas) ->
let ctxt =
match maybe_gas with
| None ->
Gas.set_unlimited ctxt
| Some gas ->
Gas.set_limit ctxt gas
in
Script_ir_translator.typecheck_code ctxt expr
>|=? fun (res, ctxt) -> (res, Gas.level ctxt)) ;
register0 S.typecheck_data (fun ctxt () (data, ty, maybe_gas) ->
let ctxt =
match maybe_gas with
| None ->
Gas.set_unlimited ctxt
| Some gas ->
Gas.set_limit ctxt gas
in
Script_ir_translator.typecheck_data ctxt (data, ty)
>|=? fun ctxt -> Gas.level ctxt) ;
register0 S.pack_data (fun ctxt () (expr, typ, maybe_gas) ->
let open Script_ir_translator in
let ctxt =
match maybe_gas with
| None ->
Gas.set_unlimited ctxt
| Some gas ->
Gas.set_limit ctxt gas
in
parse_packable_ty ctxt ~legacy:true (Micheline.root typ)
>>?= fun (Ex_ty typ, ctxt) ->
parse_data ctxt ~legacy:true typ (Micheline.root expr)
>>=? fun (data, ctxt) ->
Script_ir_translator.pack_data ctxt typ data
>|=? fun (bytes, ctxt) -> (bytes, Gas.level ctxt)) ;
register0
S.run_operation
(fun ctxt
()
({shell; protocol_data = Operation_data protocol_data}, chain_id)
->
(* this code is a duplicate of Apply without signature check *)
let partial_precheck_manager_contents (type kind) ctxt
(op : kind Kind.manager contents) : context tzresult Lwt.t =
let (Manager_operation
{source; fee; counter; operation; gas_limit; storage_limit}) =
op
in
Gas.check_limit ctxt gas_limit
>>?= fun () ->
let ctxt = Gas.set_limit ctxt gas_limit in
Fees.check_storage_limit ctxt storage_limit
>>?= fun () ->
Contract.must_be_allocated ctxt (Contract.implicit_contract source)
>>=? fun () ->
Contract.check_counter_increment ctxt source counter
>>=? fun () ->
( match operation with
| Reveal pk ->
Contract.reveal_manager_key ctxt source pk
| Transaction {parameters; _} ->
(* Here the data comes already deserialized, so we need to fake the deserialization to mimic apply *)
let arg_bytes =
Data_encoding.Binary.to_bytes_exn
Script.lazy_expr_encoding
parameters
in
let arg =
match
Data_encoding.Binary.of_bytes
Script.lazy_expr_encoding
arg_bytes
with
| Some arg ->
arg
| None ->
assert false
in
(* Fail quickly if not enough gas for minimal deserialization cost *)
Lwt.return
@@ record_trace Apply.Gas_quota_exceeded_init_deserialize
@@ ( Gas.check_enough ctxt (Script.minimal_deserialize_cost arg)
>>? fun () ->
(* Fail if not enough gas for complete deserialization cost *)
Script.force_decode_in_context ctxt arg
>|? fun (_arg, ctxt) -> ctxt )
| Origination {script; _} ->
(* Here the data comes already deserialized, so we need to fake the deserialization to mimic apply *)
let script_bytes =
Data_encoding.Binary.to_bytes_exn Script.encoding script
in
let script =
match
Data_encoding.Binary.of_bytes Script.encoding script_bytes
with
| Some script ->
script
| None ->
assert false
in
(* Fail quickly if not enough gas for minimal deserialization cost *)
Lwt.return
@@ record_trace Apply.Gas_quota_exceeded_init_deserialize
@@ ( Gas.consume
ctxt
(Script.minimal_deserialize_cost script.code)
>>? fun ctxt ->
Gas.check_enough
ctxt
(Script.minimal_deserialize_cost script.storage)
>>? fun () ->
(* Fail if not enough gas for complete deserialization cost *)
Script.force_decode_in_context ctxt script.code
>>? fun (_code, ctxt) ->
Script.force_decode_in_context ctxt script.storage
>|? fun (_storage, ctxt) -> ctxt )
| _ ->
return ctxt )
>>=? fun ctxt ->
Contract.get_manager_key ctxt source
>>=? fun _public_key ->
(* signature check unplugged from here *)
Contract.increment_counter ctxt source
>>=? fun ctxt ->
Contract.spend ctxt (Contract.implicit_contract source) fee
in
let rec partial_precheck_manager_contents_list :
type kind.
Alpha_context.t ->
kind Kind.manager contents_list ->
context tzresult Lwt.t =
fun ctxt contents_list ->
match contents_list with
| Single (Manager_operation _ as op) ->
partial_precheck_manager_contents ctxt op
| Cons ((Manager_operation _ as op), rest) ->
partial_precheck_manager_contents ctxt op
>>=? fun ctxt -> partial_precheck_manager_contents_list ctxt rest
in
let ret contents =
( Operation_data protocol_data,
Apply_results.Operation_metadata {contents} )
in
let operation : _ operation = {shell; protocol_data} in
let hash = Operation.hash {shell; protocol_data} in
let ctxt = Contract.init_origination_nonce ctxt hash in
let baker = Signature.Public_key_hash.zero in
match protocol_data.contents with
| Single (Manager_operation _) as op ->
partial_precheck_manager_contents_list ctxt op
>>=? fun ctxt ->
Apply.apply_manager_contents_list ctxt Optimized baker chain_id op
>|= fun (_ctxt, result) -> ok @@ ret result
| Cons (Manager_operation _, _) as op ->
partial_precheck_manager_contents_list ctxt op
>>=? fun ctxt ->
Apply.apply_manager_contents_list ctxt Optimized baker chain_id op
>|= fun (_ctxt, result) -> ok @@ ret result
| _ ->
Apply.apply_contents_list
ctxt
chain_id
Optimized
shell.branch
baker
operation
operation.protocol_data.contents
>|=? fun (_ctxt, result) -> ret result) ;
register0 S.entrypoint_type (fun ctxt () (expr, entrypoint) ->
let ctxt = Gas.set_unlimited ctxt in
let legacy = false in
let open Script_ir_translator in
Lwt.return
( parse_toplevel ~legacy expr
>>? (fun (arg_type, _, _, root_name) ->
parse_parameter_ty ctxt ~legacy arg_type
>>? fun (Ex_ty arg_type, _) ->
Script_ir_translator.find_entrypoint
~root_name
arg_type
entrypoint)
>>? fun (_f, Ex_ty ty) ->
unparse_ty ctxt ty
>|? fun (ty_node, _) -> Micheline.strip_locations ty_node )) ;
register0 S.list_entrypoints (fun ctxt () expr ->
let ctxt = Gas.set_unlimited ctxt in
let legacy = false in
let open Script_ir_translator in
Lwt.return
( parse_toplevel ~legacy expr
>>? fun (arg_type, _, _, root_name) ->
parse_parameter_ty ctxt ~legacy arg_type
>>? fun (Ex_ty arg_type, _) ->
Script_ir_translator.list_entrypoints ~root_name arg_type ctxt
>|? fun (unreachable_entrypoint, map) ->
( unreachable_entrypoint,
Entrypoints_map.fold
(fun entry (_, ty) acc ->
(entry, Micheline.strip_locations ty) :: acc)
map
[] ) ))
let run_code ctxt block code
(storage, input, amount, chain_id, source, payer, gas, entrypoint) =
RPC_context.make_call0
S.run_code
ctxt
block
()
(code, storage, input, amount, chain_id, source, payer, gas, entrypoint)
let trace_code ctxt block code
(storage, input, amount, chain_id, source, payer, gas, entrypoint) =
RPC_context.make_call0
S.trace_code
ctxt
block
()
(code, storage, input, amount, chain_id, source, payer, gas, entrypoint)
let typecheck_code ctxt block =
RPC_context.make_call0 S.typecheck_code ctxt block ()
let typecheck_data ctxt block =
RPC_context.make_call0 S.typecheck_data ctxt block ()
let pack_data ctxt block = RPC_context.make_call0 S.pack_data ctxt block ()
let run_operation ctxt block =
RPC_context.make_call0 S.run_operation ctxt block ()
let entrypoint_type ctxt block =
RPC_context.make_call0 S.entrypoint_type ctxt block ()
let list_entrypoints ctxt block =
RPC_context.make_call0 S.list_entrypoints ctxt block ()
end
module Forge = struct
module S = struct
open Data_encoding
let path = RPC_path.(path / "forge")
let operations =
RPC_service.post_service
~description:"Forge an operation"
~query:RPC_query.empty
~input:Operation.unsigned_encoding
~output:bytes
RPC_path.(path / "operations")
let empty_proof_of_work_nonce =
MBytes.of_string
(String.make Constants_repr.proof_of_work_nonce_size '\000')
let protocol_data =
RPC_service.post_service
~description:"Forge the protocol-specific part of a block header"
~query:RPC_query.empty
~input:
(obj3
(req "priority" uint16)
(opt "nonce_hash" Nonce_hash.encoding)
(dft
"proof_of_work_nonce"
(Fixed.bytes Alpha_context.Constants.proof_of_work_nonce_size)
empty_proof_of_work_nonce))
~output:(obj1 (req "protocol_data" bytes))
RPC_path.(path / "protocol_data")
end
let register () =
let open Services_registration in
register0_noctxt S.operations (fun () (shell, proto) ->
return
(Data_encoding.Binary.to_bytes_exn
Operation.unsigned_encoding
(shell, proto))) ;
register0_noctxt
S.protocol_data
(fun () (priority, seed_nonce_hash, proof_of_work_nonce) ->
return
(Data_encoding.Binary.to_bytes_exn
Block_header.contents_encoding
{priority; seed_nonce_hash; proof_of_work_nonce}))
module Manager = struct
let operations ctxt block ~branch ~source ?sourcePubKey ~counter ~fee
~gas_limit ~storage_limit operations =
Contract_services.manager_key ctxt block source
>>= function
| Error _ as e ->
Lwt.return e
| Ok revealed ->
let ops =
List.map
(fun (Manager operation) ->
Contents
(Manager_operation
{
source;
counter;
operation;
fee;
gas_limit;
storage_limit;
}))
operations
in
let ops =
match (sourcePubKey, revealed) with
| (None, _) | (_, Some _) ->
ops
| (Some pk, None) ->
let operation = Reveal pk in
Contents
(Manager_operation
{
source;
counter;
operation;
fee;
gas_limit;
storage_limit;
})
:: ops
in
RPC_context.make_call0
S.operations
ctxt
block
()
({branch}, Operation.of_list ops)
let reveal ctxt block ~branch ~source ~sourcePubKey ~counter ~fee () =
operations
ctxt
block
~branch
~source
~sourcePubKey
~counter
~fee
~gas_limit:Gas.Arith.zero
~storage_limit:Z.zero
[]
let transaction ctxt block ~branch ~source ?sourcePubKey ~counter ~amount
~destination ?(entrypoint = "default") ?parameters ~gas_limit
~storage_limit ~fee () =
let parameters =
Option.unopt_map
~f:Script.lazy_expr
~default:Script.unit_parameter
parameters
in
operations
ctxt
block
~branch
~source
?sourcePubKey
~counter
~fee
~gas_limit
~storage_limit
[Manager (Transaction {amount; parameters; destination; entrypoint})]
let origination ctxt block ~branch ~source ?sourcePubKey ~counter ~balance
?delegatePubKey ~script ~gas_limit ~storage_limit ~fee () =
operations
ctxt
block
~branch
~source
?sourcePubKey
~counter
~fee
~gas_limit
~storage_limit
[ Manager
(Origination
{
delegate = delegatePubKey;
script;
credit = balance;
preorigination = None;
}) ]
let delegation ctxt block ~branch ~source ?sourcePubKey ~counter ~fee
delegate =
operations
ctxt
block
~branch
~source
?sourcePubKey
~counter
~fee
~gas_limit:Gas.Arith.zero
~storage_limit:Z.zero
[Manager (Delegation delegate)]
end
let operation ctxt block ~branch operation =
RPC_context.make_call0
S.operations
ctxt
block
()
({branch}, Contents_list (Single operation))
let endorsement ctxt b ~branch ~level () =
operation ctxt b ~branch (Endorsement {level})
let proposals ctxt b ~branch ~source ~period ~proposals () =
operation ctxt b ~branch (Proposals {source; period; proposals})
let ballot ctxt b ~branch ~source ~period ~proposal ~ballot () =
operation ctxt b ~branch (Ballot {source; period; proposal; ballot})
let seed_nonce_revelation ctxt block ~branch ~level ~nonce () =
operation ctxt block ~branch (Seed_nonce_revelation {level; nonce})
let double_baking_evidence ctxt block ~branch ~bh1 ~bh2 () =
operation ctxt block ~branch (Double_baking_evidence {bh1; bh2})
let double_endorsement_evidence ctxt block ~branch ~op1 ~op2 () =
operation ctxt block ~branch (Double_endorsement_evidence {op1; op2})
let empty_proof_of_work_nonce =
MBytes.of_string
(String.make Constants_repr.proof_of_work_nonce_size '\000')
let protocol_data ctxt block ~priority ?seed_nonce_hash
?(proof_of_work_nonce = empty_proof_of_work_nonce) () =
RPC_context.make_call0
S.protocol_data
ctxt
block
()
(priority, seed_nonce_hash, proof_of_work_nonce)
end
module Parse = struct
module S = struct
open Data_encoding
let path = RPC_path.(path / "parse")
let operations =
RPC_service.post_service
~description:"Parse operations"
~query:RPC_query.empty
~input:
(obj2
(req "operations" (list (dynamic_size Operation.raw_encoding)))
(opt "check_signature" bool))
~output:(list (dynamic_size Operation.encoding))
RPC_path.(path / "operations")
let block =
RPC_service.post_service
~description:"Parse a block"
~query:RPC_query.empty
~input:Block_header.raw_encoding
~output:Block_header.protocol_data_encoding
RPC_path.(path / "block")
end
let parse_protocol_data protocol_data =
match
Data_encoding.Binary.of_bytes
Block_header.protocol_data_encoding
protocol_data
with
| None ->
failwith "Cant_parse_protocol_data"
| Some protocol_data ->
protocol_data
let register () =
let open Services_registration in
register0 S.operations (fun _ctxt () (operations, check) ->
map_s
(fun raw ->
parse_operation raw
>>?= fun op ->
( match check with
| Some true ->
return_unit (* FIXME *)
(* I.check_signature ctxt *)
(* op.protocol_data.signature op.shell op.protocol_data.contents *)
| Some false | None ->
return_unit )
>|=? fun () -> op)
operations) ;
register0_noctxt S.block (fun () raw_block ->
return @@ parse_protocol_data raw_block.protocol_data)
let operations ctxt block ?check operations =
RPC_context.make_call0 S.operations ctxt block () (operations, check)
let block ctxt block shell protocol_data =
RPC_context.make_call0
S.block
ctxt
block
()
({shell; protocol_data} : Block_header.raw)
end
module S = struct
open Data_encoding
type level_query = {offset : int32}
let level_query : level_query RPC_query.t =
let open RPC_query in
query (fun offset -> {offset})
|+ field "offset" RPC_arg.int32 0l (fun t -> t.offset)
|> seal
let current_level =
RPC_service.get_service
~description:
"Returns the level of the interrogated block, or the one of a block \
located `offset` blocks after in the chain (or before when \
negative). For instance, the next block if `offset` is 1."
~query:level_query
~output:Level.encoding
RPC_path.(path / "current_level")
let levels_in_current_cycle =
RPC_service.get_service
~description:"Levels of a cycle"
~query:level_query
~output:
(obj2 (req "first" Raw_level.encoding) (req "last" Raw_level.encoding))
RPC_path.(path / "levels_in_current_cycle")
end
let register () =
Scripts.register () ;
Forge.register () ;
Parse.register () ;
let open Services_registration in
register0 S.current_level (fun ctxt q () ->
let level = Level.current ctxt in
return (Level.from_raw ctxt ~offset:q.offset level.level)) ;
register0 S.levels_in_current_cycle (fun ctxt q () ->
let levels = Level.levels_in_current_cycle ctxt ~offset:q.offset () in
match levels with
| [] ->
raise Not_found
| _ ->
let first = List.hd (List.rev levels) in
let last = List.hd levels in
return (first.level, last.level))
let current_level ctxt ?(offset = 0l) block =
RPC_context.make_call0 S.current_level ctxt block {offset} ()
let levels_in_current_cycle ctxt ?(offset = 0l) block =
RPC_context.make_call0 S.levels_in_current_cycle ctxt block {offset} ()
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
main.mli | ||
t-zeta_ui_asymp.c |
#include "arb.h"
int main()
{
slong iter;
flint_rand_t state;
flint_printf("zeta_ui_asymp....");
fflush(stdout);
flint_randinit(state);
for (iter = 0; iter < 10000 * arb_test_multiplier(); iter++)
{
arb_t r;
ulong n;
mpfr_t s;
slong prec;
prec = 2 + n_randint(state, 1 << n_randint(state, 10));
arb_init(r);
mpfr_init2(s, prec + 100);
n = 2 + n_randint(state, 1 << n_randint(state, 10));
arb_zeta_ui_asymp(r, n, prec);
mpfr_zeta_ui(s, n, MPFR_RNDN);
if (!arb_contains_mpfr(r, s))
{
flint_printf("FAIL: containment\n\n");
flint_printf("n = %wu\n\n", n);
flint_printf("r = "); arb_printd(r, prec / 3.33); flint_printf("\n\n");
flint_printf("s = "); mpfr_printf("%.275Rf\n", s); flint_printf("\n\n");
flint_abort();
}
arb_clear(r);
mpfr_clear(s);
}
flint_randclear(state);
flint_cleanup();
flint_printf("PASS\n");
return EXIT_SUCCESS;
}
| /*
Copyright (C) 2012 Fredrik Johansson
This file is part of Arb.
Arb is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <http://www.gnu.org/licenses/>.
*/ |
reload.ml | (* Reloading for the PowerPC *)
let fundecl f =
(new Reloadgen.reload_generic)#fundecl f
| (**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *)
(* *)
(* Copyright 1996 Institut National de Recherche en Informatique et *)
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
(* the GNU Lesser General Public License version 2.1, with the *)
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
|
conv.mli | (** Encoding + Decoding *)
module type S = sig
type t
val decode : t Decoder.t
val encode : t Encoder.t
end
| (** Encoding + Decoding *)
|
stack.mli | (** runtime stack module *)
type 'a t = 'a Value.t list
val empty : 'a t
val pp : Format.formatter -> 'a t -> unit
(** pop operations *)
val drop : 'a t -> 'a t
val pop : 'a t -> 'a Value.t * 'a t
val pop_n : 'a t -> int -> 'a t * 'a t
val keep : 'a t -> int -> 'a t
val pop_bool : 'a t -> bool * 'a t
val pop_i32 : 'a t -> int32 * 'a t
val pop_i32_to_char : 'a t -> char * 'a t
val pop_i32_to_int : 'a t -> int * 'a t
val pop2_i32 : 'a t -> (int32 * int32) * 'a t
val pop_i64 : 'a t -> int64 * 'a t
val pop2_i64 : 'a t -> (int64 * int64) * 'a t
val pop_f32 : 'a t -> Float32.t * 'a t
val pop2_f32 : 'a t -> (Float32.t * Float32.t) * 'a t
val pop_f64 : 'a t -> Float64.t * 'a t
val pop2_f64 : 'a t -> (Float64.t * Float64.t) * 'a t
val pop_ref : 'a t -> 'a Value.t * 'a t
val pop_is_null : 'a t -> bool * 'a t
val pop_as_ref : 'a t -> 'a Value.ref_value * 'a t
val pop_as_externref : 'a Value.Extern_ref.ty -> 'b t -> 'a * 'b t
(** push operations *)
val push : 'a t -> 'a Value.t -> 'a t
val push_bool : 'a t -> bool -> 'a t
val push_i32 : 'a t -> int32 -> 'a t
val push_i32_of_int : 'a t -> int -> 'a t
val push_i64 : 'a t -> int64 -> 'a t
val push_i64_of_int : 'a t -> int -> 'a t
val push_f32 : 'a t -> Float32.t -> 'a t
val push_f64 : 'a t -> Float64.t -> 'a t
val push_as_externref : 'a t -> 'b Value.Extern_ref.ty -> 'b -> 'a t
| (** runtime stack module *)
|
more.mli | (* Tail-recursive lists *)
module List :
sig
include (module type of List)
end
(* Utilities *)
module Util :
sig
val from : int -> int -> int list
val take : 'a list -> int -> 'a list
val drop : 'a list -> int -> 'a list
end
| (* Tail-recursive lists *)
module List : |
user_list.mli |
open! Core_kernel
module Item : sig
module Id : Identifiable
type t [@@deriving sexp]
include Json_object.S_with_fields with type t := t
val username : t -> Username.t
val user_id : t -> Thing.User.Id.t
val relationship_id : t -> Id.t
val since : t -> Time_ns.t
end
type t = Item.t list [@@deriving sexp]
| |
michelson_v1_parser.ml | open Protocol
open Tezos_micheline
open Micheline_parser
open Micheline
type parsed = {
source : string;
unexpanded : string canonical;
expanded : Michelson_v1_primitives.prim canonical;
expansion_table : (int * (Micheline_parser.location * int list)) list;
unexpansion_table : (int * int) list;
}
let compare_parsed = Stdlib.compare
(* Unexpanded toplevel expression should be a sequence *)
let expand_all source ast errors =
let unexpanded, loc_table = extract_locations ast in
let expanded, expansion_errors =
Michelson_v1_macros.expand_rec (root unexpanded)
in
let expanded, unexpansion_table = extract_locations expanded in
let expansion_table =
let sorted =
List.sort (fun (_, a) (_, b) -> Stdlib.compare a b) unexpansion_table
in
let grouped =
let rec group = function
| acc, [] -> acc
| [], (u, e) :: r -> group ([(e, [u])], r)
| ((pe, us) :: racc as acc), (u, e) :: r ->
if e = pe then group ((e, u :: us) :: racc, r)
else group ((e, [u]) :: acc, r)
in
group ([], sorted)
in
match
List.map2
~when_different_lengths:()
(fun (l, ploc) (l', elocs) ->
assert (l = l') ;
(l, (ploc, elocs)))
(List.sort Stdlib.compare loc_table)
(List.sort Stdlib.compare grouped)
with
| Ok v -> v
| Error () -> invalid_arg "Michelson_v1_parser.expand_all"
in
match
Environment.wrap_error (Michelson_v1_primitives.prims_of_strings expanded)
with
| Ok expanded ->
( {source; unexpanded; expanded; expansion_table; unexpansion_table},
errors @ expansion_errors )
| Error errs ->
( {
source;
unexpanded;
expanded = Micheline.strip_locations (Seq ((), []));
expansion_table;
unexpansion_table;
},
errors @ expansion_errors @ errs )
let parse_toplevel ?check source =
let tokens, lexing_errors = Micheline_parser.tokenize source in
let asts, parsing_errors = Micheline_parser.parse_toplevel ?check tokens in
let ast =
let start = min_point asts and stop = max_point asts in
Seq ({start; stop}, asts)
in
expand_all source ast (lexing_errors @ parsing_errors)
let parse_expression ?check source =
let tokens, lexing_errors = Micheline_parser.tokenize source in
let ast, parsing_errors = Micheline_parser.parse_expression ?check tokens in
expand_all source ast (lexing_errors @ parsing_errors)
let expand_all ~source ~original = expand_all source original []
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
lu_def.h | #pragma once
#include <string>
#include <algorithm>
#include <set>
#include "util/vector.h"
#include <utility>
#include "util/debug.h"
#include "math/lp/lu.h"
namespace lp {
template <typename T, typename X, typename M> // print the nr x nc submatrix at the top left corner
void print_submatrix(square_sparse_matrix<T, X> & m, unsigned mr, unsigned nc, std::ostream & out) {
vector<vector<std::string>> A;
vector<unsigned> widths;
for (unsigned i = 0; i < m.row_count() && i < mr ; i++) {
A.push_back(vector<std::string>());
for (unsigned j = 0; j < m.column_count() && j < nc; j++) {
A[i].push_back(T_to_string(static_cast<T>(m(i, j))));
}
}
for (unsigned j = 0; j < m.column_count() && j < nc; j++) {
widths.push_back(get_width_of_column(j, A));
}
print_matrix_with_widths(A, widths, out);
}
template<typename M>
void print_matrix(M &m, std::ostream & out) {
vector<vector<std::string>> A;
vector<unsigned> widths;
for (unsigned i = 0; i < m.row_count(); i++) {
A.push_back(vector<std::string>());
for (unsigned j = 0; j < m.column_count(); j++) {
A[i].push_back(T_to_string(m[i][j]));
}
}
for (unsigned j = 0; j < m.column_count(); j++) {
widths.push_back(get_width_of_column(j, A));
}
print_matrix_with_widths(A, widths, out);
}
template <typename T, typename X>
one_elem_on_diag<T, X>::one_elem_on_diag(const one_elem_on_diag & o) {
m_i = o.m_i;
m_val = o.m_val;
#ifdef Z3DEBUG
m_m = m_n = o.m_m;
m_one_over_val = numeric_traits<T>::one() / o.m_val;
#endif
}
#ifdef Z3DEBUG
template <typename T, typename X>
T one_elem_on_diag<T, X>::get_elem(unsigned i, unsigned j) const {
if (i == j){
if (j == m_i) {
return m_one_over_val;
}
return numeric_traits<T>::one();
}
return numeric_traits<T>::zero();
}
#endif
template <typename T, typename X>
void one_elem_on_diag<T, X>::apply_from_left_to_T(indexed_vector<T> & w, lp_settings & settings) {
T & t = w[m_i];
if (numeric_traits<T>::is_zero(t)) {
return;
}
t /= m_val;
if (numeric_traits<T>::precise()) return;
if (settings.abs_val_is_smaller_than_drop_tolerance(t)) {
w.erase_from_index(m_i);
t = numeric_traits<T>::zero();
}
}
// This class supports updates of the columns of B, and solves systems Bx=b,and yB=c
// Using Suhl-Suhl method described in the dissertation of Achim Koberstein, Chapter 5
template <typename M>
lu<M>::lu(const M& A,
vector<unsigned>& basis,
lp_settings & settings):
m_status(LU_status::OK),
m_dim(A.row_count()),
m_A(A),
m_Q(m_dim),
m_R(m_dim),
m_r_wave(m_dim),
m_U(A, basis), // create the square matrix that eventually will be factorized
m_settings(settings),
m_failure(false),
m_row_eta_work_vector(A.row_count()),
m_refactor_counter(0) {
lp_assert(!(numeric_traits<T>::precise() && settings.use_tableau()));
#ifdef Z3DEBUG
debug_test_of_basis(A, basis);
#endif
++m_settings.stats().m_num_factorizations;
create_initial_factorization();
#ifdef Z3DEBUG
// lp_assert(check_correctness());
#endif
}
template <typename M>
lu<M>::lu(const M& A,
lp_settings & settings):
m_status(LU_status::OK),
m_dim(A.row_count()),
m_A(A),
m_Q(m_dim),
m_R(m_dim),
m_r_wave(m_dim),
m_U(A), // create the square matrix that eventually will be factorized
m_settings(settings),
m_failure(false),
m_row_eta_work_vector(A.row_count()),
m_refactor_counter(0) {
lp_assert(A.row_count() == A.column_count());
create_initial_factorization();
#ifdef Z3DEBUG
lp_assert(is_correct());
#endif
}
template <typename M>
void lu<M>::debug_test_of_basis( M const & A, vector<unsigned> & basis) {
std::set<unsigned> set;
for (unsigned i = 0; i < A.row_count(); i++) {
lp_assert(basis[i]< A.column_count());
set.insert(basis[i]);
}
lp_assert(set.size() == A.row_count());
}
template <typename M>
void lu<M>::solve_By(indexed_vector<X> & y) {
lp_assert(false); // not implemented
// init_vector_y(y);
// solve_By_when_y_is_ready(y);
}
template <typename M>
void lu<M>::solve_By(vector<X> & y) {
init_vector_y(y);
solve_By_when_y_is_ready_for_X(y);
}
template <typename M>
void lu<M>::solve_By_when_y_is_ready_for_X(vector<X> & y) {
if (numeric_traits<T>::precise()) {
m_U.solve_U_y(y);
m_R.apply_reverse_from_left_to_X(y); // see 24.3 from Chvatal
return;
}
m_U.double_solve_U_y(y);
m_R.apply_reverse_from_left_to_X(y); // see 24.3 from Chvatal
unsigned i = m_dim;
while (i--) {
if (is_zero(y[i])) continue;
if (m_settings.abs_val_is_smaller_than_drop_tolerance(y[i])){
y[i] = zero_of_type<X>();
}
}
}
template <typename M>
void lu<M>::solve_By_when_y_is_ready_for_T(vector<T> & y, vector<unsigned> & index) {
if (numeric_traits<T>::precise()) {
m_U.solve_U_y(y);
m_R.apply_reverse_from_left_to_T(y); // see 24.3 from Chvatal
unsigned j = m_dim;
while (j--) {
if (!is_zero(y[j]))
index.push_back(j);
}
return;
}
m_U.double_solve_U_y(y);
m_R.apply_reverse_from_left_to_T(y); // see 24.3 from Chvatal
unsigned i = m_dim;
while (i--) {
if (is_zero(y[i])) continue;
if (m_settings.abs_val_is_smaller_than_drop_tolerance(y[i])){
y[i] = zero_of_type<T>();
} else {
index.push_back(i);
}
}
}
template <typename M>
void lu<M>::solve_By_for_T_indexed_only(indexed_vector<T> & y, const lp_settings & settings) {
if (numeric_traits<T>::precise()) {
vector<unsigned> active_rows;
m_U.solve_U_y_indexed_only(y, settings, active_rows);
m_R.apply_reverse_from_left(y); // see 24.3 from Chvatal
return;
}
m_U.double_solve_U_y(y, m_settings);
m_R.apply_reverse_from_left(y); // see 24.3 from Chvatal
}
template <typename M>
void lu<M>::print_matrix_compact(std::ostream & f) {
f << "matrix_start" << std::endl;
f << "nrows " << m_A.row_count() << std::endl;
f << "ncolumns " << m_A.column_count() << std::endl;
for (unsigned i = 0; i < m_A.row_count(); i++) {
auto & row = m_A.m_rows[i];
f << "row " << i << std::endl;
for (auto & t : row) {
f << "column " << t.m_j << " value " << t.m_value << std::endl;
}
f << "row_end" << std::endl;
}
f << "matrix_end" << std::endl;
}
template <typename M>
void lu< M>::print(indexed_vector<T> & w, const vector<unsigned>& basis) {
std::string dump_file_name("/tmp/lu");
remove(dump_file_name.c_str());
std::ofstream f(dump_file_name);
if (!f.is_open()) {
LP_OUT(m_settings, "cannot open file " << dump_file_name << std::endl);
return;
}
LP_OUT(m_settings, "writing lu dump to " << dump_file_name << std::endl);
print_matrix_compact(f);
print_vector(basis, f);
print_indexed_vector(w, f);
f.close();
}
template <typename M>
void lu< M>::solve_Bd(unsigned a_column, indexed_vector<T> & d, indexed_vector<T> & w) {
init_vector_w(a_column, w);
if (w.m_index.size() * ratio_of_index_size_to_all_size<T>() < d.m_data.size()) { // this const might need some tuning
d = w;
solve_By_for_T_indexed_only(d, m_settings);
} else {
d.m_data = w.m_data;
d.m_index.clear();
solve_By_when_y_is_ready_for_T(d.m_data, d.m_index);
}
}
template <typename M>
void lu< M>::solve_Bd_faster(unsigned a_column, indexed_vector<T> & d) { // puts the a_column into d
init_vector_w(a_column, d);
solve_By_for_T_indexed_only(d, m_settings);
}
template <typename M>
void lu< M>::solve_yB(vector<T>& y) {
// first solve yU = cb*R(-1)
m_R.apply_reverse_from_right_to_T(y); // got y = cb*R(-1)
m_U.solve_y_U(y); // got y*U=cb*R(-1)
m_Q.apply_reverse_from_right_to_T(y); //
for (auto e = m_tail.rbegin(); e != m_tail.rend(); ++e) {
#ifdef Z3DEBUG
(*e)->set_number_of_columns(m_dim);
#endif
(*e)->apply_from_right(y);
}
}
template <typename M>
void lu< M>::solve_yB_indexed(indexed_vector<T>& y) {
lp_assert(y.is_OK());
// first solve yU = cb*R(-1)
m_R.apply_reverse_from_right_to_T(y); // got y = cb*R(-1)
lp_assert(y.is_OK());
m_U.solve_y_U_indexed(y, m_settings); // got y*U=cb*R(-1)
lp_assert(y.is_OK());
m_Q.apply_reverse_from_right_to_T(y);
lp_assert(y.is_OK());
for (auto e = m_tail.rbegin(); e != m_tail.rend(); ++e) {
#ifdef Z3DEBUG
(*e)->set_number_of_columns(m_dim);
#endif
(*e)->apply_from_right(y);
lp_assert(y.is_OK());
}
}
template <typename M>
void lu< M>::add_delta_to_solution(const vector<T>& yc, vector<T>& y){
unsigned i = static_cast<unsigned>(y.size());
while (i--)
y[i]+=yc[i];
}
template <typename M>
void lu< M>::add_delta_to_solution_indexed(indexed_vector<T>& y) {
// the delta sits in m_y_copy, put result into y
lp_assert(y.is_OK());
lp_assert(m_y_copy.is_OK());
m_ii.clear();
m_ii.resize(y.data_size());
for (unsigned i : y.m_index)
m_ii.set_value(1, i);
for (unsigned i : m_y_copy.m_index) {
y.m_data[i] += m_y_copy[i];
if (m_ii[i] == 0)
m_ii.set_value(1, i);
}
lp_assert(m_ii.is_OK());
y.m_index.clear();
for (unsigned i : m_ii.m_index) {
T & v = y.m_data[i];
if (!lp_settings::is_eps_small_general(v, 1e-14))
y.m_index.push_back(i);
else if (!numeric_traits<T>::is_zero(v))
v = zero_of_type<T>();
}
lp_assert(y.is_OK());
}
template <typename M>
void lu< M>::find_error_of_yB(vector<T>& yc, const vector<T>& y, const vector<unsigned>& m_basis) {
unsigned i = m_dim;
while (i--) {
yc[i] -= m_A.dot_product_with_column(y, m_basis[i]);
}
}
template <typename M>
void lu< M>::find_error_of_yB_indexed(const indexed_vector<T>& y, const vector<int>& heading, const lp_settings& settings) {
#if 0 == 1
// it is a non efficient version
indexed_vector<T> yc = m_y_copy;
yc.m_index.clear();
lp_assert(!numeric_traits<T>::precise());
{
vector<unsigned> d_basis(y.m_data.size());
for (unsigned j = 0; j < heading.size(); j++) {
if (heading[j] >= 0) {
d_basis[heading[j]] = j;
}
}
unsigned i = m_dim;
while (i--) {
T & v = yc.m_data[i] -= m_A.dot_product_with_column(y.m_data, d_basis[i]);
if (settings.abs_val_is_smaller_than_drop_tolerance(v))
v = zero_of_type<T>();
else
yc.m_index.push_back(i);
}
}
#endif
lp_assert(m_ii.is_OK());
m_ii.clear();
m_ii.resize(y.data_size());
lp_assert(m_y_copy.is_OK());
// put the error into m_y_copy
for (auto k : y.m_index) {
auto & row = m_A.m_rows[k];
const T & y_k = y.m_data[k];
for (auto & c : row) {
unsigned j = c.var();
int hj = heading[j];
if (hj < 0) continue;
if (m_ii.m_data[hj] == 0)
m_ii.set_value(1, hj);
m_y_copy.m_data[hj] -= c.coeff() * y_k;
}
}
// add the index of m_y_copy to m_ii
for (unsigned i : m_y_copy.m_index) {
if (m_ii.m_data[i] == 0)
m_ii.set_value(1, i);
}
// there is no guarantee that m_y_copy is OK here, but its index
// is contained in m_ii index
m_y_copy.m_index.clear();
// setup the index of m_y_copy
for (auto k : m_ii.m_index) {
T& v = m_y_copy.m_data[k];
if (settings.abs_val_is_smaller_than_drop_tolerance(v))
v = zero_of_type<T>();
else {
m_y_copy.set_value(v, k);
}
}
lp_assert(m_y_copy.is_OK());
}
// solves y*B = y
// y is the input
template <typename M>
void lu< M>::solve_yB_with_error_check_indexed(indexed_vector<T> & y, const vector<int>& heading, const vector<unsigned> & basis, const lp_settings & settings) {
if (numeric_traits<T>::precise()) {
if (y.m_index.size() * ratio_of_index_size_to_all_size<T>() * 3 < m_A.column_count()) {
solve_yB_indexed(y);
} else {
solve_yB(y.m_data);
y.restore_index_and_clean_from_data();
}
return;
}
lp_assert(m_y_copy.is_OK());
lp_assert(y.is_OK());
if (y.m_index.size() * ratio_of_index_size_to_all_size<T>() < m_A.column_count()) {
m_y_copy = y;
solve_yB_indexed(y);
lp_assert(y.is_OK());
if (y.m_index.size() * ratio_of_index_size_to_all_size<T>() >= m_A.column_count()) {
find_error_of_yB(m_y_copy.m_data, y.m_data, basis);
solve_yB(m_y_copy.m_data);
add_delta_to_solution(m_y_copy.m_data, y.m_data);
y.restore_index_and_clean_from_data();
m_y_copy.clear_all();
} else {
find_error_of_yB_indexed(y, heading, settings); // this works with m_y_copy
solve_yB_indexed(m_y_copy);
add_delta_to_solution_indexed(y);
}
lp_assert(m_y_copy.is_OK());
} else {
solve_yB_with_error_check(y.m_data, basis);
y.restore_index_and_clean_from_data();
}
}
// solves y*B = y
// y is the input
template <typename M>
void lu< M>::solve_yB_with_error_check(vector<T> & y, const vector<unsigned>& basis) {
if (numeric_traits<T>::precise()) {
solve_yB(y);
return;
}
auto & yc = m_y_copy.m_data;
yc =y; // copy y aside
solve_yB(y);
find_error_of_yB(yc, y, basis);
solve_yB(yc);
add_delta_to_solution(yc, y);
m_y_copy.clear_all();
}
template <typename M>
void lu< M>::apply_Q_R_to_U(permutation_matrix<T, X> & r_wave) {
m_U.multiply_from_right(r_wave);
m_U.multiply_from_left_with_reverse(r_wave);
}
// Solving yB = cb to find the entering variable,
// where cb is the cost vector projected to B.
// The result is stored in cb.
// solving Bd = a ( to find the column d of B^{-1} A_N corresponding to the entering
// variable
template <typename M>
lu< M>::~lu(){
for (auto t : m_tail) {
delete t;
}
}
template <typename M>
void lu< M>::init_vector_y(vector<X> & y) {
apply_lp_list_to_y(y);
m_Q.apply_reverse_from_left_to_X(y);
}
template <typename M>
void lu< M>::perform_transformations_on_w(indexed_vector<T>& w) {
apply_lp_list_to_w(w);
m_Q.apply_reverse_from_left(w);
// TBD does not compile: lp_assert(numeric_traits<T>::precise() || check_vector_for_small_values(w, m_settings));
}
// see Chvatal 24.3
template <typename M>
void lu< M>::init_vector_w(unsigned entering, indexed_vector<T> & w) {
w.clear();
m_A.copy_column_to_indexed_vector(entering, w); // w = a, the column
perform_transformations_on_w(w);
}
template <typename M>
void lu< M>::apply_lp_list_to_w(indexed_vector<T> & w) {
for (unsigned i = 0; i < m_tail.size(); i++) {
m_tail[i]->apply_from_left_to_T(w, m_settings);
// TBD does not compile: lp_assert(check_vector_for_small_values(w, m_settings));
}
}
template <typename M>
void lu< M>::apply_lp_list_to_y(vector<X>& y) {
for (unsigned i = 0; i < m_tail.size(); i++) {
m_tail[i]->apply_from_left(y, m_settings);
}
}
template <typename M>
void lu< M>::swap_rows(int j, int k) {
if (j != k) {
m_Q.transpose_from_left(j, k);
m_U.swap_rows(j, k);
}
}
template <typename M>
void lu< M>::swap_columns(int j, int pivot_column) {
if (j == pivot_column)
return;
m_R.transpose_from_right(j, pivot_column);
m_U.swap_columns(j, pivot_column);
}
template <typename M>
bool lu< M>::pivot_the_row(int row) {
eta_matrix<T, X> * eta_matrix = get_eta_matrix_for_pivot(row);
if (get_status() != LU_status::OK) {
return false;
}
if (eta_matrix == nullptr) {
m_U.shorten_active_matrix(row, nullptr);
return true;
}
if (!m_U.pivot_with_eta(row, eta_matrix, m_settings))
return false;
eta_matrix->conjugate_by_permutation(m_Q);
push_matrix_to_tail(eta_matrix);
return true;
}
// we're processing the column j now
template <typename M>
eta_matrix<typename M::coefftype, typename M::argtype> * lu< M>::get_eta_matrix_for_pivot(unsigned j) {
eta_matrix<T, X> *ret;
if(!m_U.fill_eta_matrix(j, &ret)) {
set_status(LU_status::Degenerated);
}
return ret;
}
// we're processing the column j now
template <typename M>
eta_matrix<typename M::coefftype, typename M::argtype> * lu<M>::get_eta_matrix_for_pivot(unsigned j, square_sparse_matrix<T, X>& copy_of_U) {
eta_matrix<T, X> *ret;
copy_of_U.fill_eta_matrix(j, &ret);
return ret;
}
// see page 407 of Chvatal
template <typename M>
unsigned lu<M>::transform_U_to_V_by_replacing_column(indexed_vector<T> & w,
unsigned leaving_column) {
unsigned column_to_replace = m_R.apply_reverse(leaving_column);
m_U.replace_column(column_to_replace, w, m_settings);
return column_to_replace;
}
#ifdef Z3DEBUG
template <typename M>
void lu<M>::check_vector_w(unsigned entering) {
T * w = new T[m_dim];
m_A.copy_column_to_vector(entering, w);
check_apply_lp_lists_to_w(w);
delete [] w;
}
template <typename M>
void lu<M>::check_apply_matrix_to_vector(matrix<T, X> *lp, T *w) {
if (lp != nullptr) {
lp -> set_number_of_rows(m_dim);
lp -> set_number_of_columns(m_dim);
apply_to_vector(*lp, w);
}
}
template <typename M>
void lu<M>::check_apply_lp_lists_to_w(T * w) {
for (unsigned i = 0; i < m_tail.size(); i++) {
check_apply_matrix_to_vector(m_tail[i], w);
}
permutation_matrix<T, X> qr = m_Q.get_reverse();
apply_to_vector(qr, w);
for (int i = m_dim - 1; i >= 0; i--) {
lp_assert(abs(w[i] - w[i]) < 0.0000001);
}
}
#endif
template <typename M>
void lu<M>::process_column(int j) {
unsigned pi, pj;
bool success = m_U.get_pivot_for_column(pi, pj, m_settings.c_partial_pivoting, j);
if (!success) {
// LP_OUT(m_settings, "get_pivot returned false: cannot find the pivot for column " << j << std::endl);
m_failure = true;
return;
}
if (static_cast<int>(pi) == -1) {
// LP_OUT(m_settings, "cannot find the pivot for column " << j << std::endl);
m_failure = true;
return;
}
swap_columns(j, pj);
swap_rows(j, pi);
if (!pivot_the_row(j)) {
// LP_OUT(m_settings, "pivot_the_row(" << j << ") failed" << std::endl);
m_failure = true;
}
}
template <typename M>
bool lu<M>::is_correct(const vector<unsigned>& basis) {
#ifdef Z3DEBUG
if (get_status() != LU_status::OK) {
return false;
}
dense_matrix<T, X> left_side = get_left_side(basis);
dense_matrix<T, X> right_side = get_right_side();
return left_side == right_side;
#else
return true;
#endif
}
template <typename M>
bool lu<M>::is_correct() {
#ifdef Z3DEBUG
if (get_status() != LU_status::OK) {
return false;
}
dense_matrix<T, X> left_side = get_left_side();
dense_matrix<T, X> right_side = get_right_side();
return left_side == right_side;
#else
return true;
#endif
}
#ifdef Z3DEBUG
template <typename M>
dense_matrix<typename M::coefftype, typename M::argtype> lu<M>::tail_product() {
lp_assert(tail_size() > 0);
dense_matrix<T, X> left_side = permutation_matrix<T, X>(m_dim);
for (unsigned i = 0; i < tail_size(); i++) {
matrix<T, X>* lp = get_lp_matrix(i);
lp->set_number_of_rows(m_dim);
lp->set_number_of_columns(m_dim);
left_side = ((*lp) * left_side);
}
return left_side;
}
template <typename M>
dense_matrix<typename M::coefftype, typename M::argtype> lu<M>::get_left_side(const vector<unsigned>& basis) {
dense_matrix<T, X> left_side = get_B(*this, basis);
for (unsigned i = 0; i < tail_size(); i++) {
matrix<T, X>* lp = get_lp_matrix(i);
lp->set_number_of_rows(m_dim);
lp->set_number_of_columns(m_dim);
left_side = ((*lp) * left_side);
}
return left_side;
}
template <typename M>
dense_matrix<typename M::coefftype, typename M::argtype> lu<M>::get_left_side() {
dense_matrix<T, X> left_side = get_B(*this);
for (unsigned i = 0; i < tail_size(); i++) {
matrix<T, X>* lp = get_lp_matrix(i);
lp->set_number_of_rows(m_dim);
lp->set_number_of_columns(m_dim);
left_side = ((*lp) * left_side);
}
return left_side;
}
template <typename M>
dense_matrix<typename M::coefftype, typename M::argtype> lu<M>::get_right_side() {
auto ret = U() * R();
ret = Q() * ret;
return ret;
}
#endif
// needed for debugging purposes
template <typename M>
void lu<M>::copy_w(T *buffer, indexed_vector<T> & w) {
unsigned i = m_dim;
while (i--) {
buffer[i] = w[i];
}
}
// needed for debugging purposes
template <typename M>
void lu<M>::restore_w(T *buffer, indexed_vector<T> & w) {
unsigned i = m_dim;
while (i--) {
w[i] = buffer[i];
}
}
template <typename M>
bool lu<M>::all_columns_and_rows_are_active() {
unsigned i = m_dim;
while (i--) {
lp_assert(m_U.col_is_active(i));
lp_assert(m_U.row_is_active(i));
}
return true;
}
template <typename M>
bool lu<M>::too_dense(unsigned j) const {
unsigned r = m_dim - j;
if (r < 5)
return false;
// if (j * 5 < m_dim * 4) // start looking for dense only at the bottom of the rows
// return false;
// return r * r * m_settings.density_threshold <= m_U.get_number_of_nonzeroes_below_row(j);
return r * r * m_settings.density_threshold <= m_U.get_n_of_active_elems();
}
template <typename M>
void lu<M>::pivot_in_dense_mode(unsigned i) {
int j = m_dense_LU->find_pivot_column_in_row(i);
if (j == -1) {
m_failure = true;
return;
}
if (i != static_cast<unsigned>(j)) {
swap_columns(i, j);
m_dense_LU->swap_columns(i, j);
}
m_dense_LU->pivot(i, m_settings);
}
template <typename M>
void lu<M>::create_initial_factorization(){
m_U.prepare_for_factorization();
unsigned j;
for (j = 0; j < m_dim; j++) {
process_column(j);
if (m_failure) {
set_status(LU_status::Degenerated);
return;
}
if (too_dense(j)) {
break;
}
}
if (j == m_dim) {
// TBD does not compile: lp_assert(m_U.is_upper_triangular_and_maximums_are_set_correctly_in_rows(m_settings));
// lp_assert(is_correct());
// lp_assert(m_U.is_upper_triangular_and_maximums_are_set_correctly_in_rows(m_settings));
return;
}
j++;
m_dense_LU = new square_dense_submatrix<T, X>(&m_U, j);
for (; j < m_dim; j++) {
pivot_in_dense_mode(j);
if (m_failure) {
set_status(LU_status::Degenerated);
return;
}
}
m_dense_LU->update_parent_matrix(m_settings);
lp_assert(m_dense_LU->is_L_matrix());
m_dense_LU->conjugate_by_permutation(m_Q);
push_matrix_to_tail(m_dense_LU);
m_refactor_counter = 0;
// lp_assert(is_correct());
// lp_assert(m_U.is_upper_triangular_and_maximums_are_set_correctly_in_rows(m_settings));
}
template <typename M>
void lu<M>::calculate_r_wave_and_update_U(unsigned bump_start, unsigned bump_end, permutation_matrix<T, X> & r_wave) {
if (bump_start > bump_end) {
set_status(LU_status::Degenerated);
return;
}
if (bump_start == bump_end) {
return;
}
r_wave[bump_start] = bump_end; // sending the offensive column to the end of the bump
for ( unsigned i = bump_start + 1 ; i <= bump_end; i++ ) {
r_wave[i] = i - 1;
}
m_U.multiply_from_right(r_wave);
m_U.multiply_from_left_with_reverse(r_wave);
}
template <typename M>
void lu<M>::scan_last_row_to_work_vector(unsigned lowest_row_of_the_bump) {
vector<indexed_value<T>> & last_row_vec = m_U.get_row_values(m_U.adjust_row(lowest_row_of_the_bump));
for (auto & iv : last_row_vec) {
if (is_zero(iv.m_value)) continue;
lp_assert(!m_settings.abs_val_is_smaller_than_drop_tolerance(iv.m_value));
unsigned adjusted_col = m_U.adjust_column_inverse(iv.m_index);
if (adjusted_col < lowest_row_of_the_bump) {
m_row_eta_work_vector.set_value(-iv.m_value, adjusted_col);
} else {
m_row_eta_work_vector.set_value(iv.m_value, adjusted_col); // preparing to calculate the real value in the matrix
}
}
}
template <typename M>
void lu<M>::pivot_and_solve_the_system(unsigned replaced_column, unsigned lowest_row_of_the_bump) {
// we have the system right side at m_row_eta_work_vector now
// solve the system column wise
for (unsigned j = replaced_column; j < lowest_row_of_the_bump; j++) {
T v = m_row_eta_work_vector[j];
if (numeric_traits<T>::is_zero(v)) continue; // this column does not contribute to the solution
unsigned aj = m_U.adjust_row(j);
vector<indexed_value<T>> & row = m_U.get_row_values(aj);
for (auto & iv : row) {
unsigned col = m_U.adjust_column_inverse(iv.m_index);
lp_assert(col >= j || numeric_traits<T>::is_zero(iv.m_value));
if (col == j) continue;
if (numeric_traits<T>::is_zero(iv.m_value)) {
continue;
}
// the -v is for solving the system ( to zero the last row), and +v is for pivoting
T delta = col < lowest_row_of_the_bump? -v * iv.m_value: v * iv.m_value;
lp_assert(numeric_traits<T>::is_zero(delta) == false);
// m_row_eta_work_vector.add_value_at_index_with_drop_tolerance(col, delta);
if (numeric_traits<T>::is_zero(m_row_eta_work_vector[col])) {
if (!m_settings.abs_val_is_smaller_than_drop_tolerance(delta)){
m_row_eta_work_vector.set_value(delta, col);
}
} else {
T t = (m_row_eta_work_vector[col] += delta);
if (m_settings.abs_val_is_smaller_than_drop_tolerance(t)){
m_row_eta_work_vector[col] = numeric_traits<T>::zero();
auto it = std::find(m_row_eta_work_vector.m_index.begin(), m_row_eta_work_vector.m_index.end(), col);
if (it != m_row_eta_work_vector.m_index.end())
m_row_eta_work_vector.m_index.erase(it);
}
}
}
}
}
// see Achim Koberstein's thesis page 58, but here we solve the system and pivot to the last
// row at the same time
template <typename M>
row_eta_matrix<typename M::coefftype, typename M::argtype> *lu<M>::get_row_eta_matrix_and_set_row_vector(unsigned replaced_column, unsigned lowest_row_of_the_bump, const T & pivot_elem_for_checking) {
if (replaced_column == lowest_row_of_the_bump) return nullptr;
scan_last_row_to_work_vector(lowest_row_of_the_bump);
pivot_and_solve_the_system(replaced_column, lowest_row_of_the_bump);
if (numeric_traits<T>::precise() == false && !is_zero(pivot_elem_for_checking)) {
T denom = std::max(T(1), abs(pivot_elem_for_checking));
if (
!m_settings.abs_val_is_smaller_than_pivot_tolerance((m_row_eta_work_vector[lowest_row_of_the_bump] - pivot_elem_for_checking) / denom)) {
set_status(LU_status::Degenerated);
// LP_OUT(m_settings, "diagonal element is off" << std::endl);
return nullptr;
}
}
#ifdef Z3DEBUG
auto ret = new row_eta_matrix<typename M::coefftype, typename M::argtype>(replaced_column, lowest_row_of_the_bump, m_dim);
#else
auto ret = new row_eta_matrix<typename M::coefftype, typename M::argtype>(replaced_column, lowest_row_of_the_bump);
#endif
for (auto j : m_row_eta_work_vector.m_index) {
if (j < lowest_row_of_the_bump) {
auto & v = m_row_eta_work_vector[j];
if (!is_zero(v)) {
if (!m_settings.abs_val_is_smaller_than_drop_tolerance(v)){
ret->push_back(j, v);
}
v = numeric_traits<T>::zero();
}
}
} // now the lowest_row_of_the_bump contains the rest of the row to the right of the bump with correct values
return ret;
}
template <typename M>
void lu<M>::replace_column(T pivot_elem_for_checking, indexed_vector<T> & w, unsigned leaving_column_of_U){
m_refactor_counter++;
unsigned replaced_column = transform_U_to_V_by_replacing_column( w, leaving_column_of_U);
unsigned lowest_row_of_the_bump = m_U.lowest_row_in_column(replaced_column);
m_r_wave.init(m_dim);
calculate_r_wave_and_update_U(replaced_column, lowest_row_of_the_bump, m_r_wave);
auto row_eta = get_row_eta_matrix_and_set_row_vector(replaced_column, lowest_row_of_the_bump, pivot_elem_for_checking);
if (get_status() == LU_status::Degenerated) {
m_row_eta_work_vector.clear_all();
return;
}
m_Q.multiply_by_permutation_from_right(m_r_wave);
m_R.multiply_by_permutation_reverse_from_left(m_r_wave);
if (row_eta != nullptr) {
row_eta->conjugate_by_permutation(m_Q);
push_matrix_to_tail(row_eta);
}
calculate_Lwave_Pwave_for_bump(replaced_column, lowest_row_of_the_bump);
// lp_assert(m_U.is_upper_triangular_and_maximums_are_set_correctly_in_rows(m_settings));
// lp_assert(w.is_OK() && m_row_eta_work_vector.is_OK());
}
template <typename M>
void lu<M>::calculate_Lwave_Pwave_for_bump(unsigned replaced_column, unsigned lowest_row_of_the_bump){
T diagonal_elem;
if (replaced_column < lowest_row_of_the_bump) {
diagonal_elem = m_row_eta_work_vector[lowest_row_of_the_bump];
// lp_assert(m_row_eta_work_vector.is_OK());
m_U.set_row_from_work_vector_and_clean_work_vector_not_adjusted(m_U.adjust_row(lowest_row_of_the_bump), m_row_eta_work_vector, m_settings);
} else {
diagonal_elem = m_U(lowest_row_of_the_bump, lowest_row_of_the_bump); // todo - get it more efficiently
}
if (m_settings.abs_val_is_smaller_than_pivot_tolerance(diagonal_elem)) {
set_status(LU_status::Degenerated);
return;
}
calculate_Lwave_Pwave_for_last_row(lowest_row_of_the_bump, diagonal_elem);
// lp_assert(m_U.is_upper_triangular_and_maximums_are_set_correctly_in_rows(m_settings));
}
template <typename M>
void lu<M>::calculate_Lwave_Pwave_for_last_row(unsigned lowest_row_of_the_bump, T diagonal_element) {
auto l = new one_elem_on_diag<T, X>(lowest_row_of_the_bump, diagonal_element);
#ifdef Z3DEBUG
l->set_number_of_columns(m_dim);
#endif
push_matrix_to_tail(l);
m_U.divide_row_by_constant(lowest_row_of_the_bump, diagonal_element, m_settings);
l->conjugate_by_permutation(m_Q);
}
template <typename M>
void init_factorization(lu<M>* & factorization, M & m_A, vector<unsigned> & m_basis, lp_settings &m_settings) {
if (factorization != nullptr)
delete factorization;
factorization = new lu<M>(m_A, m_basis, m_settings);
// if (factorization->get_status() != LU_status::OK)
// LP_OUT(m_settings, "failing in init_factorization" << std::endl);
}
#ifdef Z3DEBUG
template <typename M>
dense_matrix<typename M::coefftype, typename M::argtype> get_B(lu<M>& f, const vector<unsigned>& basis) {
lp_assert(basis.size() == f.dimension());
lp_assert(basis.size() == f.m_U.dimension());
dense_matrix<typename M::coefftype, typename M::argtype> B(f.dimension(), f.dimension());
for (unsigned i = 0; i < f.dimension(); i++)
for (unsigned j = 0; j < f.dimension(); j++)
B.set_elem(i, j, f.B_(i, j, basis));
return B;
}
template <typename M>
dense_matrix<typename M::coefftype, typename M::argtype> get_B(lu<M>& f) {
dense_matrix<typename M::coefftype, typename M::argtype> B(f.dimension(), f.dimension());
for (unsigned i = 0; i < f.dimension(); i++)
for (unsigned j = 0; j < f.dimension(); j++)
B.set_elem(i, j, f.m_A[i][j]);
return B;
}
#endif
}
| /*++
Copyright (c) 2017 Microsoft Corporation
Module Name:
<name>
Abstract:
<abstract>
Author:
Lev Nachmanson (levnach)
Revision History:
--*/ |
list.ml |
type 'a t = 'a list
let pure x = [ x ]
module Foldable = Preface_make.Foldable.Via_fold_right (struct
type nonrec 'a t = 'a t
let fold_right f x acc = Stdlib.List.fold_right f x acc
end)
module Functor = Preface_make.Functor.Via_map (struct
type nonrec 'a t = 'a t
let map = Stdlib.List.map
end)
module Alternative = Preface_make.Alternative.Via_pure_and_apply (struct
type nonrec 'a t = 'a t
let pure = pure
let apply fs xs =
Stdlib.List.(concat @@ map (fun f -> map (fun x -> f x) xs) fs)
;;
let neutral = []
let combine l r = l @ r
end)
module Applicative_traversable (A : Preface_specs.APPLICATIVE) =
Preface_make.Traversable.Over_applicative
(A)
(struct
type 'a t = 'a A.t
type 'a iter = 'a list
let traverse f =
let open A.Infix in
let rec traverse acc = function
| [] -> Stdlib.List.rev <$> acc
| x :: xs -> traverse (A.lift2 Stdlib.List.cons (f x) acc) xs
in
traverse (A.pure [])
;;
end)
module Applicative =
Preface_make.Traversable.Join_with_applicative
(Alternative)
(Applicative_traversable)
module Monad_plus = Preface_make.Monad_plus.Via_bind (struct
type nonrec 'a t = 'a t
let return = pure
(* Implementation from OCaml 4.10.0 *)
let bind f =
let rec aux_bind acc = function
| [] -> Stdlib.List.rev acc
| x :: tail ->
let xs = f x in
aux_bind (Stdlib.List.rev_append xs acc) tail
in
aux_bind []
;;
let neutral = []
let combine l r = l @ r
end)
module Monad_traversable (M : Preface_specs.MONAD) =
Preface_make.Traversable.Over_monad
(M)
(struct
type 'a t = 'a M.t
type 'a iter = 'a list
let traverse f =
let open M.Infix in
let rec traverse acc = function
| [] -> acc >|= Stdlib.List.rev
| x :: xs -> traverse (M.lift2 Stdlib.List.cons (f x) acc) xs
in
traverse (M.return [])
;;
end)
module Monad =
Preface_make.Traversable.Join_with_monad (Monad_plus) (Monad_traversable)
module Selective =
Preface_make.Selective.Over_applicative_via_select
(Applicative)
(Preface_make.Selective.Select_from_monad (Monad))
module Invariant = Preface_make.Invariant.From_functor (Functor)
module Monoid (T : Preface_specs.Types.T0) =
Preface_make.Monoid.Via_combine_and_neutral (struct
type nonrec t = T.t t
let combine l r = l @ r
let neutral = []
end)
let equal f a b =
let rec eq = function
| [], [] -> true
| x :: xs, y :: ys -> f x y && eq (xs, ys)
| _ -> false
in
eq (a, b)
;;
let pp pp' formater list =
let pp_sep ppf () = Format.fprintf ppf ";@ " in
Format.(fprintf formater "@[[%a]@]" (pp_print_list ~pp_sep pp') list)
;;
| |
perm.h |
#ifndef PERM_H
#define PERM_H
#ifdef PERM_INLINES_C
#define PERM_INLINE FLINT_DLL
#else
#define PERM_INLINE static __inline__
#endif
#undef ulong
#define ulong ulongxx /* interferes with system includes */
#include <stdlib.h>
#include <stdio.h>
#undef ulong
#include <gmp.h>
#define ulong mp_limb_t
#include "flint.h"
#ifdef __cplusplus
extern "C" {
#endif
/* Memory management *********************************************************/
PERM_INLINE slong * _perm_init(slong n)
{
slong i, *vec;
vec = (slong *) flint_malloc(n * sizeof(slong));
if (!vec)
{
flint_printf("ERROR (_perm_init).\n\n");
flint_abort();
}
for (i = 0; i < n; i++)
vec[i] = i;
return vec;
}
PERM_INLINE void _perm_clear(slong * vec)
{
flint_free(vec);
}
/* Assignment ****************************************************************/
PERM_INLINE slong _perm_equal(const slong *vec1, const slong *vec2, slong n)
{
slong i;
for (i = 0; i < n; i++)
if (vec1[i] != vec2[i])
return 0;
return 1;
}
PERM_INLINE void _perm_set(slong *res, const slong *vec, slong n)
{
slong i;
for (i = 0; i < n; i++)
res[i] = vec[i];
}
PERM_INLINE void _perm_set_one(slong *vec, slong n)
{
slong i;
for (i = 0; i < n; i++)
vec[i] = i;
}
PERM_INLINE void
_perm_inv(slong *res, const slong *vec, slong n)
{
slong i;
if (res == vec)
{
slong *t = (slong *) flint_malloc(n * sizeof(slong));
if (!t)
{
flint_printf("ERROR (_perm_inv).\n\n");
flint_abort();
}
for (i = 0; i < n; i++)
t[i] = vec[i];
for (i = 0; i < n; i++)
res[t[i]] = i;
flint_free(t);
}
else
{
for (i = 0; i < n; i++)
res[vec[i]] = i;
}
}
/* Composition ***************************************************************/
PERM_INLINE void
_perm_compose(slong *res, const slong *vec1, const slong *vec2, slong n)
{
slong i;
if (res == vec1)
{
slong *t = (slong *) flint_malloc(n * sizeof(slong));
for (i = 0; i < n; i++)
t[i] = vec1[i];
for (i = 0; i < n; i++)
res[i] = t[vec2[i]];
flint_free(t);
}
else
{
for (i = 0; i < n; i++)
res[i] = vec1[vec2[i]];
}
}
/* Randomisation *************************************************************/
FLINT_DLL int _perm_randtest(slong * vec, slong n, flint_rand_t state);
/* Parity ********************************************************************/
FLINT_DLL int _perm_parity(const slong * vec, slong n);
/* Input and output **********************************************************/
PERM_INLINE int _long_vec_print(const slong * vec, slong len)
{
slong i;
flint_printf("%wd", len);
if (len > 0)
{
flint_printf(" ");
for (i = 0; i < len; i++)
flint_printf(" %wd", vec[i]);
}
return 1;
}
PERM_INLINE int _perm_print(const slong * vec, slong n)
{
slong i;
flint_printf("%wd", n);
if (n > 0)
{
flint_printf(" ");
for (i = 0; i < n; i++)
flint_printf(" %wd", vec[i]);
}
return 1;
}
#ifdef __cplusplus
}
#endif
#endif
| /*
Copyright (C) 2011 Sebastian Pancratz
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
memtrace.mli | (** If the MEMTRACE environment variable is set, begin tracing to the file
it specifies, and continue tracing until the process exits.
The context is an arbitrary string, which is logged in the trace.
It may be useful to identify trace files.
The sampling_rate is the proportion of allocated words that should be
sampled. Values larger than about 1e-4 will have some performance impact.
The sampling rate can also be specified with the MEMTRACE_RATE environment
variable. If both means are used, the env var takes precedence.
May raise Unix.Unix_error if the specified file cannot be opened, or
Invalid_argument if the MEMTRACE_RATE parameter is ill-formed. *)
val trace_if_requested : ?context:string -> ?sampling_rate:float -> unit -> unit
(** Tracing can also be manually started and stopped. *)
type tracer
(** Manually start tracing *)
val start_tracing :
context:string option ->
sampling_rate:float ->
filename:string ->
tracer
(** Manually stop tracing *)
val stop_tracing : tracer -> unit
val default_sampling_rate : float
(** Use the Trace module to read and write trace files *)
module Trace = Trace
(** Use Memprof_tracer in conjunction with Trace.Writer for more manual
control over trace collection *)
module Memprof_tracer = Memprof_tracer
(** Use External to track non-GC-heap allocations in a Memtrace trace *)
module External : sig
type token [@@immediate]
(** [alloc ~bytes] reports an allocation of a given number of bytes.
If tracing is enabled, a small fraction of the calls to this function
will return [Some tok], where [tok] should be passed to [free] when
the object is freed.
This function is very fast in the common case where it returns [None] *)
val alloc : bytes:int -> token option
val free : token -> unit
end
(** (For testing) *)
module Geometric_sampler = Geometric_sampler
| (** If the MEMTRACE environment variable is set, begin tracing to the file
it specifies, and continue tracing until the process exits.
The context is an arbitrary string, which is logged in the trace.
It may be useful to identify trace files.
The sampling_rate is the proportion of allocated words that should be
sampled. Values larger than about 1e-4 will have some performance impact.
The sampling rate can also be specified with the MEMTRACE_RATE environment
variable. If both means are used, the env var takes precedence.
May raise Unix.Unix_error if the specified file cannot be opened, or
Invalid_argument if the MEMTRACE_RATE parameter is ill-formed. *) |
include_stanza.mli |
open Import
type context
val in_file : Path.Source.t -> context
val load_sexps :
context:context -> Loc.t * string -> (Dune_lang.Ast.t list * context) Memo.t
| |
with_2.ml |
let x =
try y with
| A -> _
| B -> _
let x = try y with
| A -> _
| B -> _
let x =
try y with
A -> _
| B -> _
let x = try y with
A -> _
| B -> _
let _ =
let x =
try y with
| A -> _
| B -> _
in
let x = try y with
| A -> _
| B -> _
in
let x =
try y with
A -> _
| B -> _
in
let x = try y with
A -> _
| B -> _
| |
ppx_have.ml |
let all = ref false
let funcs = Hashtbl.create 16
let args_spec =
[
("--gen-all", Arg.Set all, "generate values from all [%%have ...] sections");
]
module ExtUnixConfig = Config
open Ppxlib
let check ~loc name =
match ExtUnixConfig.feature name with
| None -> Location.raise_errorf ~loc "Unregistered feature %s" name
| Some have -> have
let ident x = Ocaml_common.Location.mknoloc (lident x)
(* Evaluating conditions *)
let atom_of_expr ~loc expr =
match expr.pexp_desc with
| Pexp_construct ({ txt = Longident.Lident x; _ }, None) -> x
| _ -> Location.raise_errorf ~loc "have: atom_of_expr"
let conj_of_expr ~loc expr =
match expr.pexp_desc with
| Pexp_construct _ -> [ atom_of_expr ~loc expr ]
| Pexp_tuple args -> List.map (atom_of_expr ~loc) args
| _ -> Location.raise_errorf ~loc "have: conj_of_expr"
let disj_of_expr ~loc expr =
match expr.pexp_desc with
| Pexp_construct _ -> [ [ atom_of_expr ~loc expr ] ]
| Pexp_tuple args -> List.map (conj_of_expr ~loc) args
| _ -> Location.raise_errorf ~loc "have: disj_of_expr"
let eval_cond ~loc cond =
match cond.pstr_desc with
| Pstr_eval (expr, _attributes) ->
List.exists (List.for_all (check ~loc)) (disj_of_expr ~loc expr)
| _ -> Location.raise_errorf ~loc "have: eval_cond"
(* have rule *)
let invalid_external ~loc =
let open Ast_builder.Default in
let rec make_dummy_f ~loc body typ =
match typ.ptyp_desc with
| Ptyp_arrow (l, arg, ret) ->
let arg =
match l with Optional _ -> [%type: [%t arg] option] | _ -> arg
in
let e = make_dummy_f ~loc body ret in
pexp_fun ~loc l None [%pat? (_ : [%t arg])] e
| _ -> [%expr ([%e body] : [%t typ])]
in
let raise_not_available ~loc x =
let e = pexp_constant ~loc (Pconst_string (x, loc, None)) in
[%expr raise (Not_available [%e e])]
in
let externals_of =
object
inherit Ast_traverse.map as super
method! structure_item x =
match x.pstr_desc with
| Pstr_primitive p ->
let body = raise_not_available ~loc p.pval_name.txt in
let expr = make_dummy_f ~loc body p.pval_type in
let pat = ppat_var ~loc p.pval_name in
let vb = value_binding ~loc ~pat ~expr in
let vb =
{ vb with pvb_attributes = p.pval_attributes @ vb.pvb_attributes }
in
pstr_value ~loc Nonrecursive [ vb ]
| _ -> super#structure_item x
end
in
externals_of#structure_item
let record_external have =
let externals_of =
object
inherit Ast_traverse.iter as super
method! structure_item x =
match x.pstr_desc with
| Pstr_primitive p -> Hashtbl.replace funcs p.pval_name.txt have
| _ -> super#structure_item x
end
in
externals_of#structure_item
let have_expand ~ctxt cond items =
let loc = Expansion_context.Extension.extension_point_loc ctxt in
let have = eval_cond ~loc cond in
List.iter (record_external have) items;
match (have, !all) with
| true, _ -> items
| false, true -> List.map (invalid_external ~loc) items
| false, false -> []
let have_extension =
Extension.V3.declare_inline "have" Extension.Context.structure_item
Ast_pattern.(pstr (__ ^:: __))
have_expand
let have_rule = Context_free.Rule.extension have_extension
(* show_me_the_money rule *)
let show_me_the_money_expand ~ctxt doc =
let loc = Expansion_context.Extension.extension_point_loc ctxt in
let open Ast_builder.Default in
let make_have () =
Hashtbl.fold
(fun func have acc ->
let lhs = ppat_constant ~loc (Pconst_string (func, loc, None)) in
let e = pexp_construct ~loc (ident (string_of_bool have)) None in
case ~lhs ~guard:None ~rhs:[%expr Some [%e e]] :: acc)
funcs
[ case ~lhs:[%pat? _] ~guard:None ~rhs:[%expr None] ]
in
if !all then
let expr = pexp_function ~loc (make_have ()) in
let pat = ppat_var ~loc (Ocaml_common.Location.mknoloc "have") in
let vb = value_binding ~loc ~pat ~expr in
let vb = { vb with pvb_attributes = doc :: vb.pvb_attributes } in
[ pstr_value ~loc Nonrecursive [ vb ] ]
else []
let show_me_the_money_extension =
Extension.V3.declare_inline "show_me_the_money"
Extension.Context.structure_item
Ast_pattern.(pstr (pstr_attribute __ ^:: nil))
show_me_the_money_expand
let show_me_the_money_rule =
Context_free.Rule.extension show_me_the_money_extension
let () =
List.iter (fun (key, spec, doc) -> Driver.add_arg key spec ~doc) args_spec;
let rules = [ have_rule; show_me_the_money_rule ] in
Driver.register_transformation ~rules "ppx_have"
| |
toposort.ml |
exception CycleFound
type order =
[ `Dependency
| `Reverse
]
(* Adapted from:
* https://stackoverflow.com/questions/4653914/topological-sort-in-ocaml *)
let dfs ~equal graph ~edges visited start_node =
let rec explore path visited node =
if List.exists (fun node' -> equal node node') path then
raise CycleFound
else if List.exists (fun node' -> equal node node') visited then
visited
else
let new_path = node :: path in
let edges = edges graph node in
let visited = List.fold_left (explore new_path) visited edges in
node :: visited
in
explore [] visited start_node
let toposort ?(order = `Dependency) ~equal ~edges graph =
let sorted =
List.fold_left
(fun visited node -> dfs ~equal ~edges graph visited node)
[]
graph
in
(* `dfs` sorts in reverse by default (easier with `::`) *)
match order with `Dependency -> List.rev sorted | `Reverse -> sorted
| (*----------------------------------------------------------------------------
* Copyright (c) 2020, António Nuno Monteiro
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*---------------------------------------------------------------------------*) |
dune |
(env
(dev
(flags
(:standard -w -69))))
(library
(name mm_base)
(public_name mm.base)
(libraries unix)
(modules ringbuffer iO)
(synopsis
"High-level APIs to create and manipulate multimedia streams -- base modules"))
(library
(name mm_audio)
(public_name mm.audio)
(libraries mm.base)
(modules audio)
(foreign_stubs
(extra_deps config.h)
(language c)
(names audio_c))
(synopsis
"High-level APIs to create and manipulate multimedia streams -- audio module"))
(library
(name mm_image)
(public_name mm.image)
(libraries unix)
(modules
imageBase
imageBitmap
imageBGRA
imageRGBA32
imageYUV420
imageGeneric
imageCanvas
image)
(foreign_stubs
(extra_deps config.h)
(language c)
(names image_data image_pixel image_rgb image_yuv420))
(synopsis
"High-level APIs to create and manipulate multimedia streams -- image module"))
(library
(name mm_video)
(public_name mm.video)
(libraries mm.base mm.image mm.audio)
(modules video)
(synopsis
"High-level APIs to create and manipulate multimedia streams -- video module"))
(library
(name mm_midi)
(public_name mm.midi)
(libraries mm.base mm.audio)
(modules MIDI synth)
(synopsis
"High-level APIs to create and manipulate multimedia streams -- midi module"))
(library
(name mm)
(public_name mm)
(libraries mm.audio mm.image mm.video mm.midi)
(modules mm)
(synopsis "High-level APIs to create and manipulate multimedia streams"))
(rule
(targets config.h)
(action
(run ./config/discover.exe)))
(rule
(targets mm.ml)
(mode fallback)
(deps config.print)
(action
(echo "this should not happen")))
(rule
(targets config.print)
(action
(run ./config/setup.exe)))
| |
metapp_api.ml |
module type UnaryS = sig
type 'a t
end
module type UnaryMakeS = sig
type 'a t
val make : unit -> 'a t
end
module type MetapointsS = sig
type 'a x
type t = {
expr : Ppxlib.expression x;
pat : Ppxlib.pattern x;
typ : Ppxlib.core_type x;
class_type : Ppxlib.class_type x;
class_type_field : Ppxlib.class_type_field x;
class_expr : Ppxlib.class_expr x;
class_field : Ppxlib.class_field x;
module_type : Ppxlib.module_type x;
module_expr : Ppxlib.module_expr x;
signature_item : Ppxlib.signature_item x;
structure_item : Ppxlib.structure_item x;
}
end
module type QuotationsS = sig
type 'a x
type t = {
expr : Ppxlib.expression x;
pat : Ppxlib.pattern x;
typ : Ppxlib.core_type x;
signature : Ppxlib.signature x;
signature_item : Ppxlib.signature_item x;
structure : Ppxlib.structure x;
structure_item : Ppxlib.structure_item x;
}
end
module type MetapointS = sig
include Metapp_preutils.ExtensibleS
module MetapointAccessor (Collector : MetapointsS) : sig
val get : Collector.t -> t Collector.x
val set : t Collector.x -> Collector.t -> Collector.t
end
end
module type QuotationS = sig
include Metapp_preutils.VisitableS
val of_payload : Ppxlib.payload -> t
module QuotationAccessor (Collector : QuotationsS) : sig
val get : Collector.t -> t Collector.x
val set : t Collector.x -> Collector.t -> Collector.t
end
end
module Exp = struct
include Metapp_preutils.Exp
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.expr
let set (x : t Collector.x) (c : Collector.t) =
{ c with expr = x }
end
module QuotationAccessor (Collector : QuotationsS) = struct
let get (c : Collector.t) : t Collector.x = c.expr
let set (x : t Collector.x) (c : Collector.t) =
{ c with expr = x }
end
end
module Pat = struct
include Metapp_preutils.Pat
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.pat
let set (x : t Collector.x) (c : Collector.t) =
{ c with pat = x }
end
module QuotationAccessor (Collector : QuotationsS) = struct
let get (c : Collector.t) : t Collector.x = c.pat
let set (x : t Collector.x) (c : Collector.t) =
{ c with pat = x }
end
end
module Typ = struct
include Metapp_preutils.Typ
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.typ
let set (x : t Collector.x) (c : Collector.t) =
{ c with typ = x }
end
module QuotationAccessor (Collector : QuotationsS) = struct
let get (c : Collector.t) : t Collector.x = c.typ
let set (x : t Collector.x) (c : Collector.t) =
{ c with typ = x }
end
end
module Cty = struct
include Metapp_preutils.Cty
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.class_type
let set (x : t Collector.x) (c : Collector.t) =
{ c with class_type = x }
end
end
module Ctf = struct
include Metapp_preutils.Ctf
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.class_type_field
let set (x : t Collector.x) (c : Collector.t) =
{ c with class_type_field = x }
end
end
module Cl = struct
include Metapp_preutils.Cl
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.class_expr
let set (x : t Collector.x) (c : Collector.t) =
{ c with class_expr = x }
end
end
module Cf = struct
include Metapp_preutils.Cf
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.class_field
let set (x : t Collector.x) (c : Collector.t) =
{ c with class_field = x }
end
end
module Mty = struct
include Metapp_preutils.Mty
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.module_type
let set (x : t Collector.x) (c : Collector.t) =
{ c with module_type = x }
end
end
module Mod = struct
include Metapp_preutils.Mod
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.module_expr
let set (x : t Collector.x) (c : Collector.t) =
{ c with module_expr = x }
end
end
module Sig = struct
include Metapp_preutils.Sig
module QuotationAccessor (Collector : QuotationsS) = struct
let get (c : Collector.t) : t Collector.x = c.signature
let set (x : t Collector.x) (c : Collector.t) =
{ c with signature = x }
end
end
module Sigi = struct
include Metapp_preutils.Sigi
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.signature_item
let set (x : t Collector.x) (c : Collector.t) =
{ c with signature_item = x }
end
module QuotationAccessor (Collector : QuotationsS) = struct
let get (c : Collector.t) : t Collector.x = c.signature_item
let set (x : t Collector.x) (c : Collector.t) =
{ c with signature_item = x }
end
end
module Str = struct
include Metapp_preutils.Str
module QuotationAccessor (Collector : QuotationsS) = struct
let get (c : Collector.t) : t Collector.x = c.structure
let set (x : t Collector.x) (c : Collector.t) =
{ c with structure = x }
end
end
module Stri = struct
include Metapp_preutils.Stri
module MetapointAccessor (Collector : MetapointsS) = struct
let get (c : Collector.t) : t Collector.x = c.structure_item
let set (x : t Collector.x) (c : Collector.t) =
{ c with structure_item = x }
end
module QuotationAccessor (Collector : QuotationsS) = struct
let get (c : Collector.t) : t Collector.x = c.structure_item
let set (x : t Collector.x) (c : Collector.t) =
{ c with structure_item = x }
end
end
module type MetapointsMakeS = sig
include MetapointsS
module Make (X : UnaryMakeS with type 'a t = 'a x) : sig
val make : unit -> t
end
end
module type MetapointsWithMakeS = sig
include MetapointsS
val make : unit -> t
end
module type QuotationsMakeS = sig
include QuotationsS
module Make (X : UnaryMakeS with type 'a t = 'a x) : sig
val make : unit -> t
end
end
module type QuotationsWithMakeS = sig
include QuotationsS
val make : unit -> t
end
module Metapoints (X : UnaryS)
: MetapointsMakeS with type 'a x = 'a X.t = struct
module rec Sub : sig
include MetapointsS with type 'a x = 'a X.t
end = struct
include Sub
end
include Sub
module Make (X : UnaryMakeS with type 'a t = 'a X.t) = struct
let make () = {
expr = X.make ();
pat = X.make ();
typ = X.make ();
class_type = X.make ();
class_type_field = X.make ();
class_expr = X.make ();
class_field = X.make ();
module_type = X.make ();
module_expr = X.make ();
signature_item = X.make ();
structure_item = X.make ();
}
end
end
module Quotations (X : UnaryS)
: QuotationsMakeS with type 'a x = 'a X.t = struct
module rec Sub : sig
include QuotationsS with type 'a x = 'a X.t
end = struct
include Sub
end
include Sub
module Make (X : UnaryMakeS with type 'a t = 'a X.t) = struct
let make () = {
expr = X.make ();
pat = X.make ();
typ = X.make ();
signature = X.make ();
signature_item = X.make ();
structure = X.make ();
structure_item = X.make ();
}
end
end
module MetapointName = Metapoints (struct type 'a t = string end)
let metapoint_name : MetapointName.t =
{
expr = "expr";
pat = "pat";
typ = "typ";
class_type = "class_type";
class_type_field = "class_type_field";
class_expr = "class_expr";
class_field = "class_field";
module_type = "module_type";
module_expr = "module_expr";
signature_item = "signature_item";
structure_item = "structure_item";
}
module QuotationName = Quotations (struct type 'a t = string end)
let quotation_name : QuotationName.t =
{
expr = "expr";
pat = "pat";
typ = "typ";
signature = "signature";
signature_item = "signature_item";
structure = "structure";
structure_item = "structure_item";
}
module MetapointsWithMake (X : UnaryMakeS) : MetapointsWithMakeS
with type 'a x = 'a X.t = struct
include Metapoints (X)
include Make (X)
end
module QuotationsWithMake (X : UnaryMakeS) : QuotationsWithMakeS
with type 'a x = 'a X.t = struct
include Quotations (X)
include Make (X)
end
module type Map = sig
type 'a x
type 'a y
val map : 'a x -> 'a y
end
module MetapointMap (X : MetapointsS) (Y : MetapointsS)
(M : Map with type 'a x = 'a X.x and type 'a y = 'a Y.x) = struct
let map (x : X.t) : Y.t = {
expr = M.map x.expr;
pat = M.map x.pat;
typ = M.map x.typ;
class_type = M.map x.class_type;
class_type_field = M.map x.class_type_field;
class_expr = M.map x.class_expr;
class_field = M.map x.class_field;
module_type = M.map x.module_type;
module_expr = M.map x.module_expr;
signature_item = M.map x.signature_item;
structure_item = M.map x.structure_item;
}
end
module QuotationMap (X : QuotationsS) (Y : QuotationsS)
(M : Map with type 'a x = 'a X.x and type 'a y = 'a Y.x) = struct
let map (x : X.t) : Y.t = {
expr = M.map x.expr;
pat = M.map x.pat;
typ = M.map x.typ;
signature = M.map x.signature;
signature_item = M.map x.signature_item;
structure = M.map x.structure;
structure_item = M.map x.structure_item;
}
end
module OptionArray = struct
type 'a t = 'a option array
end
module OptionArrayMetapoints = Metapoints (OptionArray)
module LocationArray = struct
type _ t = Location.t array
end
module MetapointsLocation = Metapoints (LocationArray)
module rec ArrayQuotation : sig
type context = {
metapoints : OptionArrayMetapoints.t;
loc : MetapointsLocation.t;
subquotations : ArrayQuotations.t;
}
type 'a quotation = {
context : context;
fill : unit -> 'a;
}
type 'a t = (unit -> 'a quotation) array
end = struct
include ArrayQuotation
end
and ArrayQuotations : QuotationsS with type 'a x = 'a ArrayQuotation.t =
Quotations (ArrayQuotation)
type context = ArrayQuotation.context
let top_context : context option ref =
ref None
| |
web_bearer_token.ml |
let key : string Opium.Context.key =
Opium.Context.Key.create ("token", Sexplib.Std.(sexp_of_string))
;;
let find req = Opium.Context.find_exn key req.Opium.Request.env
let find_opt req = Opium.Context.find key req.Opium.Request.env
let set token req =
let env = req.Opium.Request.env in
let env = Opium.Context.add key token env in
{ req with env }
;;
let middleware =
let filter handler req =
match Opium.Request.header "authorization" req with
| Some authorization ->
(match String.split_on_char ' ' authorization with
| [ "Bearer"; token ] ->
let req = set token req in
handler req
| [ "bearer"; token ] ->
let req = set token req in
handler req
| _ -> handler req)
| None -> handler req
in
Rock.Middleware.create ~name:"bearer" ~filter
;;
| |
py.ml |
include Py_base
module PyWrap = Py_wrap
module PyType = Py_type
let of_object : Object.t -> t = PyType.of_object
| |
dl_boogie_proof.h |
/*++
Copyright (c) 2015 Microsoft Corporation
--*/
#pragma once
/**
output :: derivation model
derivation :: "(" "derivation" step* ")"
step :: "(" "step" step-name fact rule-name subst labels premises ")"
step-name :: identifier
rule-name :: identifier
fact :: "(" predicate theory-term* ")"
subst :: "(" "subst" assignment* ")"
assignment :: "(" "=" variable theory-term ")"
labels :: "(" "labels" label* ")"
premises :: "(" "ref" step-name* ")"
model :: "(" "model" smtlib2-model ")"
In each step the "fact" is derivable by hyper-resolution from the named
premises and the named rule, under the given substitution for the
universally quantified variables in the rule. The premises of each
step must have occurred previously in the step sequence. The last fact
is a nullary placeholder predicate representing satisfaction of the query
(its name is arbitrary).
The labels list consists of all the positively labeled sub-formulas whose
truth is used in the proof, and all the negatively labeled formulas whose
negation is used. A theory-term is a ground term using only interpreted
constants of the background theories.
The smtlib2-model gives an interpretation of the uninterpreted constants
in the background theory under which the derivation is valid. Currently
it is a quoted string in the old z3 model format, for compatibility with
Boogie, however, this should be changed to the new model format (using
define-fun) when Boogie supports this.
*/
#include "ast/ast.h"
#include "model/model.h"
namespace datalog {
class boogie_proof {
typedef vector<std::pair<symbol,expr*> > subst;
typedef svector<symbol> labels;
typedef unsigned_vector refs;
struct step {
symbol m_rule_name;
expr* m_fact;
subst m_subst;
labels m_labels;
refs m_refs;
};
ast_manager& m;
proof_ref m_proof;
model_ref m_model;
void pp_proof(std::ostream& out);
void pp_steps(std::ostream& out, vector<step>& steps);
void pp_step(std::ostream& out, unsigned i, step& s);
void pp_fact(std::ostream& out, expr* fact);
void pp_subst(std::ostream& out, subst& s);
void pp_assignment(std::ostream& out, symbol const& v, expr* t);
void pp_labels(std::ostream& out, labels& lbls);
void pp_premises(std::ostream& out, refs& refs);
void get_subst(proof* p, subst& sub);
void get_rule_name(proof* p, symbol&);
void get_labels(proof* p, labels&);
public:
boogie_proof(ast_manager& m): m(m), m_proof(m), m_model(nullptr) {}
void set_proof(proof* p);
void set_model(model* m);
void pp(std::ostream& out);
};
}
| |
external_lib_name.mli |
open Import
(** Represents a valid external lib name *)
type t
include Stringlike_intf.S with type t := t
val equal : t -> t -> bool
val compare : t -> t -> Ordering.t
val to_module_name : t -> Module_name.t
val clean : t -> t
| |
spawn.mli | (** Mini spawn library *)
(** Note: on Unix, spawn uses vfork by default. It has been tested, but if you
believe this is causing a problem in your application, you can change this
default at runtime by setting the environment variable [SPAWN_USE_FORK]. *)
module Working_dir : sig
type t =
| Path of string (** Path in the filesystem *)
| Fd of Unix.file_descr
(** File descriptor pointing to a directory. Not supported on Windows. *)
| Inherit (** Inherit the working directory of the current process *)
end
module Unix_backend : sig
(** System call to use on Unix. *)
type t =
| Fork
| Vfork
(** [Fork] if the [SPAWN_USE_FORK] environment variable is set, [Vfork]
otherwise. *)
val default : t
end
module Env : sig
(** Representation of an environment *)
type t
(** Create an environment from a list of strings of the form ["KEY=VALUE"]. *)
val of_list : string list -> t
end
(** Process group IDs *)
module Pgid : sig
(** Representation of the second parameter to [setpgid]. If a value of this
type is provided to [spawn], the child will immediately set its pgid
accordingly. *)
type t
(** Sets the child's pgid to the same as its process id. Equivalent to calling
[setpgid(0, 0)]. *)
val new_process_group : t
(** Raises [Invalid_arg] if the value is not strictly positive. *)
val of_pid : int -> t
end
(** Spawn a sub-command and return its PID. This function is low-level and
should be used to build higher-level APIs.
In case of errors, it raises [Unix.Unix_error].
{b Binary}
[prog] is not searched in [PATH]. It is up to the caller to do the path
resolution before calling this function. Note that there is no special
treatment of executable text files without a proper #!. The execvp function
from the C library calls [/bin/sh] in this case to imitate the behaviors of
a shell but this function doesn't.
Note that when [prog] is a relative filename, it is interpreted as a path
relative to the working directory specified by the [cwd] argument. On
Windows, this differs from what the underlying [CreateProcess] function
does.
{b Command line arguments}
[argv] is the full command line. The first element should be the program
name and subsequent elements the command line arguments. Note that the head
of [argv] doesn't necessarily have to be equal to [prog]. For instance it
might be [foo] while [prog] might be [/usr/bin/foo].
{b Environment}
[env] represents the environment in which the sub-process is executed. If
not specified, the environment from the process calling this function is
used.
{b Working directory}
[cwd] describes what the current working directory of the sub-process should
be. It defaults to [Inherit]. It is an error to pass [Fd _] on Windows.
{b Standard input/outputs}
[stdin], [stdout] and [stderr] are the file descriptors used as standard
input, output and error output of the sub-process. When not specified, they
default to the ones from the calling process.
{b Process groups}
If [setpgid] is provided, the child will immediately call [setpgid(0,pid)],
where [pid] is a [pid_t] determined from the [Pgid.t] given (see that
module). This parameter has no effect on Windows platforms.
{b Signals}
On Unix, the sub-process will have all its signals unblocked.
{b Implementation}
[unix_backend] describes what backend to use on Unix. If set to [Default],
[vfork] is used unless the environment variable [SPAWN_USE_FORK] is set. On
Windows, [CreateProcess] is used. *)
val spawn :
?env:Env.t
-> ?cwd:Working_dir.t (* default: [Inherit] *)
-> prog:string
-> argv:string list
-> ?stdin:Unix.file_descr
-> ?stdout:Unix.file_descr
-> ?stderr:Unix.file_descr
-> ?unix_backend:Unix_backend.t (* default: [Unix_backend.default] *)
-> ?setpgid:Pgid.t
-> unit
-> int
(**/**)
(* Create a pipe with [O_CLOEXEC] sets for both fds. This is the same as
creating a pipe and setting [O_CLOEXEC] manually on both ends, with the
difference that there is no race condition between [spawn] and [safe_pipe].
I.e. if a thread calls [safe_pipe] and another calls [spawn], it is
guaranteed that the sub-process doesn't have the pipe without [O_CLOEXEC] set
on one or the two file descriptors. The latter situation is problematic as
one often reads a pipe until it is closed, however if some random process
keeps a handle of it because it inherited it from its parent by mistake, the
pipe will never be closed.
It is implemented using the [pipe2] system calls, except on OSX where [pipe2]
is not available. On OSX, both [safe_pipe] and [spawn] lock the same mutex to
prevent race conditions. *)
val safe_pipe : unit -> Unix.file_descr * Unix.file_descr
| (** Mini spawn library *)
|
cset.mli |
(* Character sets, represented as sorted list of intervals *)
type c = int
type t
val iter : t -> f:(c -> c -> unit) -> unit
val union : t -> t -> t
val inter : t -> t -> t
val diff : t -> t -> t
val offset : int -> t -> t
val empty : t
val single : c -> t
val seq : c -> c -> t
val add : c -> t -> t
val mem : c -> t -> bool
type hash
val hash : t -> hash
val pp : Format.formatter -> t -> unit
val one_char : t -> c option
val fold_right : t -> init:'acc -> f:(c * c -> 'acc -> 'acc) -> 'acc
val hash_rec : t -> int
module CSetMap : Map.S with type key = int * t
val cany : t
val csingle : char -> t
val is_empty : t -> bool
val prepend : t -> 'a list -> (t * 'a list) list -> (t * 'a list) list
val pick : t -> c
| (*
RE - A regular expression library
Copyright (C) 2001 Jerome Vouillon
email: [email protected]
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation, with
linking exception; either version 2.1 of the License, or (at
your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*) |
eq.ml | (** {i Eq} plugin: receive another value as inherited attribute and test for equality.
Very similar to {!Compare} plugin.
For type declaration [type ('a,'b,...) typ = ...] it will create a transformation
function with type
[('a -> 'a -> bool) ->
('b -> 'b -> bool) -> ... -> ('a,'b,...) typ -> bool ]
Inherited attribute' is the same as argument, synthetized attribute is {!GT.comparison}.
*)
open GTCommon
let trait_name = "eq"
module Make(AstHelpers : GTHELPERS_sig.S) = struct
let trait_name = trait_name
module C = Compare.Make(AstHelpers)
open AstHelpers
class g initial_args tdecls = object(self: 'self)
inherit C.g initial_args tdecls
method! trait_name = trait_name
method! syn_of_param ~loc s = Typ.sprintf ~loc "bool"
method! syn_of_main ~loc ?in_class tdecl = self#syn_of_param ~loc "dummy"
method! on_different_constructors ~loc is_poly other_name cname arg_typs =
Exp.let_ ~loc [Pat.any ~loc, Exp.ident ~loc other_name]
(Exp.false_ ~loc)
method! chain_exprs ~loc e1 e2 =
Exp.app_list ~loc (Exp.ident ~loc "&&") [ e1; e2 ]
method! chain_init ~loc = Exp.true_ ~loc
end
let create = (new g :> C.P.plugin_constructor)
end
let register () =
Expander.register_plugin trait_name (module Make: Plugin_intf.MAKE)
let () = register ()
| (** {i Eq} plugin: receive another value as inherited attribute and test for equality.
Very similar to {!Compare} plugin.
For type declaration [type ('a,'b,...) typ = ...] it will create a transformation
function with type
[('a -> 'a -> bool) ->
('b -> 'b -> bool) -> ... -> ('a,'b,...) typ -> bool ]
Inherited attribute' is the same as argument, synthetized attribute is {!GT.comparison}.
*) |
SubAST_generic.ml | open AST_generic
module PI = Parse_info
module H = AST_generic_helpers
module V = Visitor_AST
(*****************************************************************************)
(* Prelude *)
(*****************************************************************************)
(* Various helper functions to extract subparts of AST elements.
*
*)
let go_really_deeper_stmt = ref true
(*****************************************************************************)
(* Sub-expressions and sub-statements *)
(*****************************************************************************)
let subexprs_of_any_list xs =
xs
|> List.fold_left
(fun x -> function
| E e -> e :: x
| _ -> x)
[]
(* used for really deep statement matching *)
let subexprs_of_stmt_kind = function
(* 1 *)
| ExprStmt (e, _)
| DoWhile (_, _, e)
| DefStmt (_, VarDef { vinit = Some e; _ })
| DefStmt (_, FieldDefColon { vinit = Some e; _ })
| For (_, ForEach (_, _, e), _)
| Continue (_, LDynamic e, _)
| Break (_, LDynamic e, _)
| Throw (_, e, _) ->
[ e ]
| While (_, cond, _)
| If (_, cond, _, _) ->
[ H.cond_to_expr cond ]
(* opt *)
| Switch (_, condopt, _) -> (
match condopt with
| None -> []
| Some cond -> [ H.cond_to_expr cond ])
| Return (_, eopt, _) -> Option.to_list eopt
(* n *)
| For (_, MultiForEach es, _) ->
es
|> List.filter_map (function
| FE (_, _, e) -> Some [ e ]
| FECond ((_, _, e1), _, e2) -> Some [ e1; e2 ]
| FEllipsis _ -> None)
|> List.concat
| For (_, ForClassic (xs, eopt1, eopt2), _) ->
(xs
|> Common.map_filter (function
| ForInitExpr e -> Some e
| ForInitVar (_, vdef) -> vdef.vinit))
@ Option.to_list eopt1 @ Option.to_list eopt2
| Assert (_, (_, args, _), _) ->
args
|> Common.map_filter (function
| Arg e -> Some e
| _ -> None)
| For (_, ForIn (_, es), _) -> es
| OtherStmt (_op, xs) -> subexprs_of_any_list xs
| OtherStmtWithStmt (_, xs, _) -> subexprs_of_any_list xs
(* 0 *)
| DirectiveStmt _
| Block _
| For (_, ForEllipsis _, _)
| Continue _
| Break _
| Label _
| Goto _
| Try _
| DisjStmt _
| DefStmt _
| WithUsingResource _ ->
[]
let subexprs_of_stmt st = subexprs_of_stmt_kind st.s
let subexprs_of_args args =
args |> PI.unbracket
|> Common.map_filter (function
| Arg e
| ArgKwd (_, e)
| ArgKwdOptional (_, e) ->
Some e
| ArgType _
| OtherArg _ ->
None)
(* used for deep expression matching *)
let subexprs_of_expr with_symbolic_propagation e =
match e.e with
| N (Id (_, { id_svalue = { contents = Some (Sym e1) }; _ }))
when with_symbolic_propagation ->
[ e1 ]
| L _
| N _
| IdSpecial _
| Ellipsis _
| TypedMetavar _ ->
[]
| DotAccess (e, _, _)
| Await (_, e)
| Cast (_, _, e)
| Ref (_, e)
| DeRef (_, e)
| DeepEllipsis (_, e, _)
| DotAccessEllipsis (e, _) ->
[ e ]
| Assign (e1, _, e2)
| AssignOp (e1, _, e2)
| ArrayAccess (e1, (_, e2, _))
(* not sure we always want to return 'e1' here *) ->
[ e1; e2 ]
| Conditional (e1, e2, e3) -> [ e1; e2; e3 ]
| Seq xs -> xs
| Record (_, flds, _) ->
flds |> Common2.map_flatten (function F st -> subexprs_of_stmt st)
| Container (_, xs) -> PI.unbracket xs
| Comprehension (_, (_, (e, xs), _)) ->
e
:: (xs
|> Common.map (function
| CompFor (_, _pat, _, e) -> e
| CompIf (_, e) -> e))
| New (_, _t, args) -> subexprs_of_args args
| Call (e, args) ->
(* not sure we want to return 'e' here *)
e :: subexprs_of_args args
| SliceAccess (e1, e2) ->
e1
:: (e2 |> PI.unbracket
|> (fun (a, b, c) -> [ a; b; c ])
|> Common.map Option.to_list |> List.flatten)
| Yield (_, eopt, _) -> Option.to_list eopt
| StmtExpr st -> subexprs_of_stmt st
| OtherExpr (_, anys) ->
(* in theory we should go deeper in any *)
subexprs_of_any_list anys
| RawExpr x -> Raw_tree.anys x |> subexprs_of_any_list
| Alias (_, e1) -> [ e1 ]
| Lambda def -> subexprs_of_stmt (H.funcbody_to_stmt def.fbody)
(* TODO? or call recursively on e? *)
| ParenExpr (_, e, _) -> [ e ]
| Xml { xml_attrs; xml_body; _ } ->
List.filter_map
(function
| XmlAttr (_, _, e)
| XmlAttrExpr (_, e, _) ->
Some e
| _ -> None)
xml_attrs
@ List.filter_map
(function
| XmlExpr (_, Some e, _) -> Some e
| XmlXml xml -> Some (Xml xml |> AST_generic.e)
| _ -> None)
xml_body
| RegexpTemplate ((_l, e, _r), _opt) -> [ e ]
(* currently skipped over but could recurse *)
| Constructor _
| AnonClass _
| LetPattern _ ->
[]
| DisjExpr _ -> raise Common.Impossible
[@@profiling]
(* Need this wrapper because [@@profiling] has the side-effect of removing labels. *)
let subexprs_of_expr ?(symbolic_propagation = false) e =
subexprs_of_expr symbolic_propagation e
(* This is similar to subexprs_of_expr, but used for the
* *implicit* deep expression matching. Here we should not go as deep.
*
* For example, we should allow patterns like 'foo();'
* to also match code like 'x = foo();' or even 'print(foo());'
* but not necessarily any expressions like 'bar() || foo();'.
* See tests/ts/deep_exprtmt.ts for more examples.
*)
let subexprs_of_expr_implicit with_symbolic_propagation e =
match e.e with
| N (Id (_, { id_svalue = { contents = Some (Sym e1) }; _ }))
when with_symbolic_propagation ->
[ e1 ]
(* cases where we extract a subexpr *)
| Assign (_e1, _, e2)
| AssignOp (_e1, _, e2) ->
[ e2 ]
(* TODO? special case for Bash and Dockerfile to prevent
* 'RUN b' to also match 'RUN a && b' (but still allowing
* 'RUN a' to match 'RUN a && b'?)
* | Call ({ e = IdSpecial (Op And, _); _}, (_, Arg e1::_, _)) -> [e1]
*)
| Call (e, args) ->
(* TODO: ugly we add 'e' also here for cases like
* bar().then(stuff) which is parsed as (bar().then)(stuff)
* and we need to go in left part.
*)
e :: subexprs_of_args args
| Cast (_, _, e)
| ParenExpr (_, e, _)
| Await (_, e) ->
[ e ]
| Yield (_, eopt, _) -> Option.to_list eopt
| StmtExpr st -> subexprs_of_stmt st
(* TODO: ugly, but we have pattern like 'db.find(...)' that we
* also want to match code like 'db.find().then(stuff).
*)
| DotAccess (e, _, _) -> [ e ]
(* TODO: ugly but in semgrep-rules/python/.../flush.yaml there is
* '$F.name' that is matching cmd = [stuff, fout.name, otherstuff].
* They should rewrite the rule and use '... <... $F.name ...>' there.
*)
| Container (_, xs) -> PI.unbracket xs
(* TODO: ugly but in semgrep-rules/terraform/.../missing-athena...yaml
* we look for '{ ... encryption_configuration {...} ...}' and
* the encryption_configuration can actually be nested deeper.
* They should rewrite the rule.
*)
| Record (_, flds, _) ->
flds |> Common2.map_flatten (function F st -> subexprs_of_stmt st)
(* cases where we should not extract a subexpr *)
| L _
| N _
| IdSpecial _
| Ellipsis _ ->
[]
| Ref (_, _e)
| DeRef (_, _e) ->
[]
| Conditional (_e1, _e2, _e3) -> []
| Seq _xs -> []
| ArrayAccess (_e1, (_, _e2, _)) -> []
| SliceAccess (_e1, _e2) -> []
| Comprehension (_, (_, (_e, _xs), _)) -> []
| New (_, _t, _args) -> []
| OtherExpr (_, _anys) -> []
| RawExpr _ -> []
| Alias (_, _e1) -> []
| Xml _xmlbody -> []
| Constructor _ -> []
| RegexpTemplate _ -> []
| AnonClass _def -> []
| Lambda _def -> []
| LetPattern _ -> []
| TypedMetavar _
| DeepEllipsis _
| DotAccessEllipsis _
| DisjExpr _ ->
raise Common.Impossible
[@@profiling]
(* Need this wrapper because [@@profiling] has the side-effect of removing labels. *)
let subexprs_of_expr_implicit ?(symbolic_propagation = false) e =
subexprs_of_expr_implicit symbolic_propagation e
(* used for deep statement matching *)
let substmts_of_stmt st =
match st.s with
(* 0 *)
| DirectiveStmt _
| ExprStmt _
| Return _
| Continue _
| Break _
| Goto _
| Throw _
| Assert _
| OtherStmt _ ->
[]
(* 1 *)
| While (_, _, st)
| DoWhile (_, st, _)
| For (_, _, st)
| Label (_, st)
| OtherStmtWithStmt (_, _, st) ->
[ st ]
(* 2 *)
| If (_, _, st1, st2) -> st1 :: Option.to_list st2
| WithUsingResource (_, st1, st2) -> st1 @ [ st2 ]
(* n *)
| Block (_, xs, _) -> xs
| Switch (_, _, xs) ->
xs
|> Common.map (function
| CasesAndBody (_, st) -> [ st ]
| CaseEllipsis _ -> [])
|> List.flatten
| Try (_, st, xs, opt) -> (
[ st ]
@ (xs |> Common.map Common2.thd3)
@
match opt with
| None -> []
| Some (_, st) -> [ st ])
| DisjStmt _ -> raise Common.Impossible
(* this may slow down things quite a bit *)
| DefStmt (_ent, def) -> (
if not !go_really_deeper_stmt then []
else
match def with
| VarDef _
| FieldDefColon _
| EnumEntryDef _
| TypeDef _
| MacroDef _
| Signature _
| UseOuterDecl _
(* recurse? *)
| ModuleDef _
| OtherDef _ ->
[]
(* this will add lots of substatements *)
| FuncDef def -> [ H.funcbody_to_stmt def.fbody ]
| ClassDef def ->
def.cbody |> PI.unbracket |> Common.map (function F st -> st))
(*****************************************************************************)
(* Visitors *)
(*****************************************************************************)
(* TODO: move in pfff at some point *)
let do_visit_with_ref mk_hooks any =
let res = ref [] in
let hooks = mk_hooks res in
let vout = V.mk_visitor hooks in
vout any;
List.rev !res
let lambdas_in_expr e =
do_visit_with_ref
(fun aref ->
{
V.default_visitor with
V.kexpr =
(fun (k, _) e ->
match e.e with
| Lambda def -> Common.push def aref
| _ -> k e);
})
(E e)
[@@profiling]
(* opti: using memoization speed things up a bit too
* (but again, this is still slow when called many many times).
* todo? note that this is not the optimal memoization we can do because
* using Hashtbl where the key is a full expression can be slow (hashing
* huge expressions still takes some time). It would be better to
* return a unique identifier to each expression to remove the hashing cost.
*)
let hmemo = Hashtbl.create 101
let lambdas_in_expr_memo a =
Common.memoized hmemo a (fun () -> lambdas_in_expr a)
[@@profiling]
(*****************************************************************************)
(* Really substmts_of_stmts *)
(*****************************************************************************)
let flatten_substmts_of_stmts xs =
(* opti: using a ref, List.iter, and Common.push instead of a mix of
* List.map, List.flatten and @ below speed things up
* (but it is still slow when called many many times)
*)
let res = ref [] in
let changed = ref false in
let rec aux x =
(* return the current statement first, and add substmts *)
Common.push x res;
(* this can be really slow because lambdas_in_expr() below can be called
* a zillion times on big files (see tests/PERF/) if we do the
* matching naively in m_stmts_deep.
*)
(if !go_really_deeper_stmt then
let es = subexprs_of_stmt x in
(* getting deeply nested lambdas stmts *)
let lambdas = es |> Common.map lambdas_in_expr_memo |> List.flatten in
lambdas
|> Common.map (fun def -> H.funcbody_to_stmt def.fbody)
|> List.iter aux);
let xs = substmts_of_stmt x in
match xs with
| [] -> ()
| xs ->
changed := true;
xs |> List.iter aux
in
xs |> List.iter aux;
if !changed then
match !res with
| [] -> None
| last :: _ ->
(* Return the last element of the list as a pair.
This is used as part of the caching optimization. *)
Some (List.rev !res, last)
else None
[@@profiling]
| (* Yoann Padioleau
*
* Copyright (C) 2019-2022 r2c
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation, with the
* special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
* LICENSE for more details.
*) |
set_coeff_fq_nmod_fmpz.c |
#include "fq_nmod_mpoly.h"
void _fq_nmod_mpoly_set_coeff_fq_nmod_fmpz(
fq_nmod_mpoly_t A,
const fq_nmod_t c,
const fmpz * exp,
const fq_nmod_mpoly_ctx_t ctx)
{
slong d = fq_nmod_ctx_degree(ctx->fqctx);
flint_bitcnt_t exp_bits;
slong i, N, index;
ulong * cmpmask;
ulong * packed_exp;
int exists;
TMP_INIT;
TMP_START;
exp_bits = mpoly_exp_bits_required_ffmpz(exp, ctx->minfo);
exp_bits = mpoly_fix_bits(exp_bits, ctx->minfo);
fq_nmod_mpoly_fit_length_fit_bits(A, A->length, exp_bits, ctx);
N = mpoly_words_per_exp(A->bits, ctx->minfo);
cmpmask = (ulong*) TMP_ALLOC(N*sizeof(ulong));
mpoly_get_cmpmask(cmpmask, N, A->bits, ctx->minfo);
packed_exp = (ulong *) TMP_ALLOC(N*sizeof(ulong));
mpoly_set_monomial_ffmpz(packed_exp, exp, A->bits, ctx->minfo);
exists = mpoly_monomial_exists(&index, A->exps,
packed_exp, A->length, N, cmpmask);
if (!exists)
{
if (!fq_nmod_is_zero(c, ctx->fqctx))
{
/* make new term only if coeff is nonzero*/
fq_nmod_mpoly_fit_length(A, A->length + 1, ctx);
for (i = A->length; i >= index + 1; i--)
{
_n_fq_set(A->coeffs + d*i, A->coeffs + d*(i - 1), d);
mpoly_monomial_set(A->exps + N*i, A->exps + N*(i - 1), N);
}
n_fq_set_fq_nmod(A->coeffs + d*index, c, ctx->fqctx);
mpoly_monomial_set(A->exps + N*index, packed_exp, N);
_fq_nmod_mpoly_set_length(A, A->length + 1, ctx);
}
}
else if (fq_nmod_is_zero(c, ctx->fqctx)) /* zero coeff, remove term */
{
for (i = index; i < A->length - 1; i++)
{
_n_fq_set(A->coeffs + d*i, A->coeffs + d*(i + 1), d);
mpoly_monomial_set(A->exps + N*i, A->exps + N*(i + 1), N);
}
_fq_nmod_mpoly_set_length(A, A->length - 1, ctx);
}
else /* term with that monomial exists, coeff is nonzero */
{
n_fq_set_fq_nmod(A->coeffs + d*index, c, ctx->fqctx);
}
TMP_END;
}
void fq_nmod_mpoly_set_coeff_fq_nmod_fmpz(
fq_nmod_mpoly_t A,
const fq_nmod_t c,
fmpz * const * exp,
const fq_nmod_mpoly_ctx_t ctx)
{
slong i, nvars = ctx->minfo->nvars;
fmpz * newexp;
TMP_INIT;
TMP_START;
newexp = (fmpz *) TMP_ALLOC(nvars*sizeof(fmpz));
for (i = 0; i < nvars; i++)
{
fmpz_init(newexp + i);
fmpz_set(newexp + i, exp[i]);
}
_fq_nmod_mpoly_set_coeff_fq_nmod_fmpz(A, c, newexp, ctx);
for (i = 0; i < nvars; i++)
fmpz_clear(newexp + i);
TMP_END;
}
| /*
Copyright (C) 2019 Daniel Schultz
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/ |
verbose_newlines.ml | (** Reproduction for https://github.com/mirage/alcotest/issues/225, testing the
interaction between `--verbose` and newlines in the test stdout. *)
let () =
Alcotest.run ~verbose:true __FILE__
[
( "alpha",
[
Alcotest.test_case "0 newlines" `Quick (fun () ->
Format.printf "Print inside alpha");
] );
( "beta",
(* Progressive result reporting is broken for this test, since the
carriage return happens on the wrong line. *)
[
Alcotest.test_case "1 newline" `Quick (fun () ->
Format.printf "Print inside beta\n");
] );
( "gamma",
[
(* Reporting is also broken here. Even worse, some of the test std.out
is clipped by the eventual result of the test. *)
Alcotest.test_case "1 newline + long line" `Quick (fun () ->
Format.printf
"Print inside gamma\n\
Lorem ipsum dolor sit amet, consectetur adipiscing elit, \
nullam malesuada dictum tortor in venenatis.");
] );
( "delta",
[
Alcotest.test_case "1 newline + long check" `Quick (fun () ->
Format.printf "Print inside delta\n";
Alcotest.(check unit)
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, \
nullam malesuada dictum tortor in venenatis."
() ());
] );
]
| (** Reproduction for https://github.com/mirage/alcotest/issues/225, testing the
interaction between `--verbose` and newlines in the test stdout. *) |
michelson_v1_emacs.mli | open Protocol
open Alpha_context
val print_expr : Format.formatter -> Script.expr -> unit
val print_type_map :
Format.formatter ->
Michelson_v1_parser.parsed * Script_tc_errors.type_map ->
unit
val report_errors :
Format.formatter ->
Michelson_v1_parser.parsed * Error_monad.error list ->
unit
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
dune |
(test
(name test_client)
(package charrua-client)
(libraries cstruct-unix alcotest charrua-client charrua-server tcpip.unix
mirage-random-test))
| |
test_sampling_data.ml | open Tezos_benchmark
(* Input parameter parsing *)
let verbose =
if Array.length Sys.argv < 2 then (
Format.eprintf "Executable expects random seed on input\n%!" ;
exit 1)
else
(Random.init (int_of_string Sys.argv.(1)) ;
List.exists (( = ) "-v"))
(Array.to_list Sys.argv)
(* ------------------------------------------------------------------------- *)
(* MCMC instantiation *)
let state = Random.State.make [|42; 987897; 54120|]
module Crypto_samplers = Crypto_samplers.Make_finite_key_pool (struct
let algo = `Default
let size = 16
end)
module Michelson_base_samplers = Michelson_samplers_base.Make (struct
let parameters =
let size = {Base_samplers.min = 4; max = 32} in
{
Michelson_samplers_base.int_size = size;
string_size = size;
bytes_size = size;
}
end)
module Data =
Michelson_mcmc_samplers.Make_data_sampler
(Michelson_base_samplers)
(Crypto_samplers)
(struct
let rng_state = state
let target_size = 500
let verbosity = if verbose then `Trace else `Silent
end)
let start = Unix.gettimeofday ()
let generator = Data.generator ~burn_in:(200 * 7) state
let stop = Unix.gettimeofday ()
let () = Format.printf "Burn in time: %f seconds@." (stop -. start)
let _ =
for _i = 0 to 1000 do
let Michelson_mcmc_samplers.{term = michelson; typ} = generator state in
if verbose then (
Format.eprintf "result:@." ;
Format.eprintf "type: %a@." Test_helpers.print_script_expr typ ;
Format.eprintf "%a@." Test_helpers.print_script_expr michelson)
done
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2021 Nomadic Labs, <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
lexO.mli | (* Replace -O... by -O0 *)
val tr : string -> string
| (****************************************************************************)
(* the diy toolsuite *)
(* *)
(* Jade Alglave, University College London, UK. *)
(* Luc Maranget, INRIA Paris-Rocquencourt, France. *)
(* *)
(* Copyright 2019-present Institut National de Recherche en Informatique et *)
(* en Automatique and the authors. All rights reserved. *)
(* *)
(* This software is governed by the CeCILL-B license under French law and *)
(* abiding by the rules of distribution of free software. You can use, *)
(* modify and/ or redistribute the software under the terms of the CeCILL-B *)
(* license as circulated by CEA, CNRS and INRIA at the following URL *)
(* "http://www.cecill.info". We also give a copy in LICENSE.txt. *)
(****************************************************************************)
|
scheduling.mli | (* Instruction scheduling *)
val fundecl: Linearize.fundecl -> Linearize.fundecl
| (**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *)
(* *)
(* Copyright 1996 Institut National de Recherche en Informatique et *)
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
(* the GNU Lesser General Public License version 2.1, with the *)
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
|
validate.ml |
type 'a t = ('a, exn Nonempty_list.t) Validation.t
let pure = Validation.pure
let valid = Validation.valid
let invalid = Validation.invalid
let error err = invalid (Nonempty_list.create err)
module Exn_list = Preface_make.Semigroup.From_alt (Nonempty_list.Alt) (Exn)
module Functor = Validation.Functor (Exn_list)
module Invariant = Preface_make.Invariant.From_functor (Functor)
module Alt = Validation.Alt (Exn_list)
module Applicative = Validation.Applicative (Exn_list)
module Selective = Validation.Selective (Exn_list)
module Monad = Validation.Monad (Exn_list)
module Foldable = Validation.Foldable (Exn_list)
let case = Validation.case
let to_result = function
| Validation.Valid x -> Ok x
| Validation.Invalid x -> Error x
;;
let equal f = Validation.equal f (Nonempty_list.equal Exn.equal)
let pp f = Validation.pp f (Nonempty_list.pp Exn.pp)
| |
Hacl_Impl_Blake2_Constants.h |
#ifndef __internal_Hacl_Impl_Blake2_Constants_H
#define __internal_Hacl_Impl_Blake2_Constants_H
#if defined(__cplusplus)
extern "C" {
#endif
#include <string.h>
#include "krml/internal/types.h"
#include "krml/lowstar_endianness.h"
#include "krml/internal/target.h"
static const
uint32_t
Hacl_Impl_Blake2_Constants_sigmaTable[160U] =
{
(uint32_t)0U, (uint32_t)1U, (uint32_t)2U, (uint32_t)3U, (uint32_t)4U, (uint32_t)5U,
(uint32_t)6U, (uint32_t)7U, (uint32_t)8U, (uint32_t)9U, (uint32_t)10U, (uint32_t)11U,
(uint32_t)12U, (uint32_t)13U, (uint32_t)14U, (uint32_t)15U, (uint32_t)14U, (uint32_t)10U,
(uint32_t)4U, (uint32_t)8U, (uint32_t)9U, (uint32_t)15U, (uint32_t)13U, (uint32_t)6U,
(uint32_t)1U, (uint32_t)12U, (uint32_t)0U, (uint32_t)2U, (uint32_t)11U, (uint32_t)7U,
(uint32_t)5U, (uint32_t)3U, (uint32_t)11U, (uint32_t)8U, (uint32_t)12U, (uint32_t)0U,
(uint32_t)5U, (uint32_t)2U, (uint32_t)15U, (uint32_t)13U, (uint32_t)10U, (uint32_t)14U,
(uint32_t)3U, (uint32_t)6U, (uint32_t)7U, (uint32_t)1U, (uint32_t)9U, (uint32_t)4U,
(uint32_t)7U, (uint32_t)9U, (uint32_t)3U, (uint32_t)1U, (uint32_t)13U, (uint32_t)12U,
(uint32_t)11U, (uint32_t)14U, (uint32_t)2U, (uint32_t)6U, (uint32_t)5U, (uint32_t)10U,
(uint32_t)4U, (uint32_t)0U, (uint32_t)15U, (uint32_t)8U, (uint32_t)9U, (uint32_t)0U,
(uint32_t)5U, (uint32_t)7U, (uint32_t)2U, (uint32_t)4U, (uint32_t)10U, (uint32_t)15U,
(uint32_t)14U, (uint32_t)1U, (uint32_t)11U, (uint32_t)12U, (uint32_t)6U, (uint32_t)8U,
(uint32_t)3U, (uint32_t)13U, (uint32_t)2U, (uint32_t)12U, (uint32_t)6U, (uint32_t)10U,
(uint32_t)0U, (uint32_t)11U, (uint32_t)8U, (uint32_t)3U, (uint32_t)4U, (uint32_t)13U,
(uint32_t)7U, (uint32_t)5U, (uint32_t)15U, (uint32_t)14U, (uint32_t)1U, (uint32_t)9U,
(uint32_t)12U, (uint32_t)5U, (uint32_t)1U, (uint32_t)15U, (uint32_t)14U, (uint32_t)13U,
(uint32_t)4U, (uint32_t)10U, (uint32_t)0U, (uint32_t)7U, (uint32_t)6U, (uint32_t)3U,
(uint32_t)9U, (uint32_t)2U, (uint32_t)8U, (uint32_t)11U, (uint32_t)13U, (uint32_t)11U,
(uint32_t)7U, (uint32_t)14U, (uint32_t)12U, (uint32_t)1U, (uint32_t)3U, (uint32_t)9U,
(uint32_t)5U, (uint32_t)0U, (uint32_t)15U, (uint32_t)4U, (uint32_t)8U, (uint32_t)6U,
(uint32_t)2U, (uint32_t)10U, (uint32_t)6U, (uint32_t)15U, (uint32_t)14U, (uint32_t)9U,
(uint32_t)11U, (uint32_t)3U, (uint32_t)0U, (uint32_t)8U, (uint32_t)12U, (uint32_t)2U,
(uint32_t)13U, (uint32_t)7U, (uint32_t)1U, (uint32_t)4U, (uint32_t)10U, (uint32_t)5U,
(uint32_t)10U, (uint32_t)2U, (uint32_t)8U, (uint32_t)4U, (uint32_t)7U, (uint32_t)6U,
(uint32_t)1U, (uint32_t)5U, (uint32_t)15U, (uint32_t)11U, (uint32_t)9U, (uint32_t)14U,
(uint32_t)3U, (uint32_t)12U, (uint32_t)13U
};
static const
uint32_t
Hacl_Impl_Blake2_Constants_ivTable_S[8U] =
{
(uint32_t)0x6A09E667U, (uint32_t)0xBB67AE85U, (uint32_t)0x3C6EF372U, (uint32_t)0xA54FF53AU,
(uint32_t)0x510E527FU, (uint32_t)0x9B05688CU, (uint32_t)0x1F83D9ABU, (uint32_t)0x5BE0CD19U
};
static const
uint64_t
Hacl_Impl_Blake2_Constants_ivTable_B[8U] =
{
(uint64_t)0x6A09E667F3BCC908U, (uint64_t)0xBB67AE8584CAA73BU, (uint64_t)0x3C6EF372FE94F82BU,
(uint64_t)0xA54FF53A5F1D36F1U, (uint64_t)0x510E527FADE682D1U, (uint64_t)0x9B05688C2B3E6C1FU,
(uint64_t)0x1F83D9ABFB41BD6BU, (uint64_t)0x5BE0CD19137E2179U
};
#if defined(__cplusplus)
}
#endif
#define __internal_Hacl_Impl_Blake2_Constants_H_DEFINED
#endif
| /* MIT License
*
* Copyright (c) 2016-2022 INRIA, CMU and Microsoft Corporation
* Copyright (c) 2022-2023 HACL* Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/ |
e_gitcommit.h |
#define ECOMMITID "ab0ade87bb070b853105f79bf59e8fc27f915b4f"
| |
dune |
(executable
(name test)
(modules test)
(libraries functoria.test fmt mirage))
(rule
(targets main.ml)
(action
(run ./test.exe)))
(rule
(alias runtest)
(package mirage)
(action
(progn
(diff main.ml.expected main.ml))))
| |
display.ml | (* ------------------------------------------------------------------------- *)
(* Smart display of inference result.
We're only able to plot simple cases automatically: models with one or
two dependent variables + constants. Above this, we bail out.
We plot each Inference.problem in two ways:
1. The "empirical" plot consists in plotting the vector of sizes vs the
execution time. Doing this requires access to the raw data (i.e. before
applying the model). Since we can't plot in 4d, we limit these plots to
set of data where there are at most two size inputs, e.g. a binary
instruction such as Add is ok, or a sequence of two unary instructions,
etc.
2. The "validator" plot consists in plotting the predicted execution
time as a function of the basis vs the empirical execution time.
For 2. we make some effort for simplifiying the model, i.e. we get rid
of the constant base "functions".
*)
open Pyplot
(* A [raw_row] is consists in a list of named values called a workload
(corresponding to time measurement of events) together with the
corresponding measured execution time. Hence, if [n] is the length
of the workload, there are [n+1] columns. *)
type raw_row = {workload : (string * float) list; qty : float}
let convert_workload_data : (Sparse_vec.String.t * float) list -> raw_row list =
fun workload_data ->
List.map
(fun (vec, qty) -> {workload = Sparse_vec.String.to_list vec; qty})
workload_data
let style i =
match i with
| 0 -> Plot.(Blue, Dot)
| 1 -> Plot.(Red, Square)
| 2 -> Plot.(Green, Triangle)
| _ -> Stdlib.failwith "Display.style: style overflow"
let scatterplot_2d title (name, input) outputs =
let data =
List.mapi
(fun i output -> (Plot.Dim2Scatter {xs = input; ys = output}, style i))
outputs
in
let plot : Plot.dim2 Plot.scatter =
let open Plot in
Scatter {data; axes = Dim2Axes {xaxis = name; yaxis = "timing"}; title}
in
`Dim2 (Plot.scatter plot)
let scatterplot_3d title (name_x, input_x) (name_y, input_y) outputs =
let data =
List.mapi
(fun i output ->
let sty = style i in
(Plot.Dim3Scatter {xs = input_x; ys = input_y; zs = output}, sty))
outputs
in
let plot : Plot.dim3 Plot.scatter =
Scatter
{
data;
axes = Dim3Axes {xaxis = name_x; yaxis = name_y; zaxis = "timing"};
title;
}
in
`Dim3 (Plot.scatter plot)
(* Scatter plot/s/ of the input vectors specified by [input_columns]
against the [outputs]. This will superpose [List.length outputs]
scatter plots on the same page. *)
let plot_scatter title input_columns outputs =
match input_columns with
| [] ->
let msg =
Format.asprintf "Display.plot_scatter (%s): empty scatter data" title
in
Error msg
| [column] ->
let plot = scatterplot_2d title column outputs in
Ok [plot]
| [column1; column2] ->
let plot = scatterplot_3d title column1 column2 outputs in
Ok [plot]
| _ ->
let subsets = Benchmark_helpers.enumerate_subsets 2 input_columns in
let plots =
List.map
(function
| [((dim1, _) as col1); ((dim2, _) as col2)] ->
let title = Format.asprintf "%s\n(%s, %s)" title dim1 dim2 in
scatterplot_3d title col1 col2 outputs
| _ -> assert false)
subsets
in
Ok plots
(* Extract size vs timing information from the [workload_data], e.g.
if workload_data = (Add_int_int 10 20, 2879) :: (Add_int_int 3 2, 768) :: []
then outputs named column vectors:
[ ("Add_int_int1", [| 10 ; 20 |]) ; ("Add_int_int2", [| 3 ; 2 |]) ]
together with timings:
[| 2879 ; 768 |] *)
let empirical_data (workload_data : (Sparse_vec.String.t * float) list) =
let samples = convert_workload_data workload_data in
(* Extract name of variables and check well-formedness *)
let variables =
List.map (fun {workload; _} -> List.map fst workload) samples
in
let variables = List.sort_uniq Stdlib.compare variables in
match variables with
| [] | _ :: _ :: _ ->
let msg =
Format.asprintf
"Display.empirical_data: variables not named consistenly@."
in
Error msg
| [vars] ->
let rows = List.length samples in
let input_dims = List.length vars in
let columns =
Array.init input_dims (fun _ -> Matrix.create ~lines:rows ~cols:1)
in
let timings = Matrix.create ~lines:rows ~cols:1 in
List.iteri
(fun i {workload; qty} ->
assert (Compare.List_length_with.(workload = input_dims)) ;
List.iteri
(fun input_dim (_, size) -> Matrix.set columns.(input_dim) i 0 size)
workload ;
Matrix.set timings i 0 qty)
samples ;
let columns = Array.to_list columns in
let named_columns =
List.combine ~when_different_lengths:() vars columns
|> (* [columns = Array.to_list (Array.init (List.length vars))] *)
WithExceptions.Result.get_ok ~loc:__LOC__
in
Ok (named_columns, timings)
let column_is_constant (m : Matrix.t) =
let (rows, cols) = Matrix.shape m in
assert (cols = 1) ;
let fst = Matrix.get m 0 0 in
let flg = ref true in
for i = 1 to rows - 1 do
let v = Matrix.get m i 0 in
flg := !flg && v = fst
done ;
!flg
(* Prune the dimensions of the input matrix which are constant. *)
let prune_problem problem : (Free_variable.t * Matrix.t) list * Matrix.t =
match problem with
| Inference.Degenerate _ -> assert false
| Inference.Non_degenerate {input; output; nmap; _} ->
let (_, cols) = Matrix.shape input in
let named_columns =
List.init ~when_negative_length:() cols (fun c ->
let name = Inference.NMap.nth_exn nmap c in
let col = Matrix.column input c in
(name, col))
|> (* column count cannot be negative *)
WithExceptions.Result.get_ok ~loc:__LOC__
in
let columns =
List.filter (fun (_, col) -> not (column_is_constant col)) named_columns
in
(columns, output)
let rec plot_stacked :
int ->
int ->
[> `Dim2 of (unit, Pyplot.Plot.dim2) Pyplot.Plot.Axis.t
| `Dim3 of (unit, Pyplot.Plot.dim3) Pyplot.Plot.Axis.t ]
list ->
unit Plot.t =
fun row col plots ->
match plots with
| [] -> Plot.return ()
| `Dim2 ax :: tl ->
let open Plot in
let* () = subplot_2d ~row ~col ax in
plot_stacked (row + 1) col tl
| `Dim3 ax :: tl ->
let open Plot in
let* () = subplot_3d ~row ~col ax in
plot_stacked (row + 1) col tl
let validator (problem : Inference.problem) (solution : Inference.solution) =
match problem with
| Inference.Degenerate _ -> Error "Display.validator: degenerate plot"
| Inference.Non_degenerate {input; _} ->
let {Inference.weights; _} = solution in
let predicted = Matrix.numpy_mul input weights in
let (columns, timings) = prune_problem problem in
let columns =
List.map
(fun (c, m) -> (Format.asprintf "%a" Free_variable.pp c, m))
columns
in
Result.bind
(plot_scatter "Validation (chosen basis)" columns [timings; predicted])
(fun plots ->
let nrows = List.length plots in
let plot ~col = plot_stacked 0 col plots in
Result.ok (nrows, plot))
let empirical (workload_data : (Sparse_vec.String.t * float) list) :
(int * (col:int -> unit Plot.t), string) result =
let open Result_syntax in
let* (columns, timings) = empirical_data workload_data in
let* plots = plot_scatter "Empirical" columns [timings] in
let nrows = List.length plots in
Ok (nrows, fun ~col -> plot_stacked 0 col plots)
let eval_mset (mset : Free_variable.Sparse_vec.t)
(eval : Free_variable.t -> float) =
Free_variable.Sparse_vec.fold
(fun var coeff acc -> acc +. (eval var *. coeff))
mset
0.0
let eval_affine (aff : Costlang.affine) (eval : Free_variable.t -> float) =
eval_mset aff.linear_comb eval +. aff.const
let validator_empirical workload_data (problem : Inference.problem)
(solution : Inference.solution) :
(int * (col:int -> unit Plot.t), string) result =
let {Inference.mapping; _} = solution in
let valuation name =
WithExceptions.Option.get ~loc:__LOC__
@@ List.assoc ~equal:Free_variable.equal name mapping
in
let predicted =
match problem with
| Inference.Degenerate {predicted; _} -> predicted
| Inference.Non_degenerate {lines; _} ->
let predicted_list =
List.map
(fun ols_line ->
let (Inference.Full (affine, _)) = ols_line in
eval_affine affine valuation)
lines
in
let array = Array.of_list predicted_list in
Matrix.init ~lines:(Array.length array) ~cols:1 ~f:(fun l _ ->
array.(l))
in
Result.bind (empirical_data workload_data) @@ fun (columns, timings) ->
Result.bind (plot_scatter "Validation (raw)" columns [timings; predicted])
@@ fun plots ->
let nrows = List.length plots in
let plot ~col = plot_stacked 0 col plots in
Result.ok (nrows, plot)
type plot_target =
| Save of {file : string option}
| Show
| ShowAndSave of {file : string option}
let perform_plot ~measure ~model_name ~problem ~solution ~plot_target =
Pyinit.pyinit () ;
let (Measure.Measurement ((module Bench), measurement)) = measure in
let filename kind =
Format.asprintf "%s_%s_%s.pdf" Bench.name model_name kind
in
let main_plot_opt =
let workload_data =
List.map
(fun {Measure.workload; qty} ->
(Bench.workload_to_vector workload, qty))
measurement.workload_data
in
let current_col = ref 0 in
let max_rows = ref 0 in
let with_col f =
let col = !current_col in
incr current_col ;
f col
in
let empirical =
Result.fold
(empirical workload_data)
~ok:(fun (rows, plot) ->
max_rows := max !max_rows rows ;
with_col (fun col -> plot ~col))
~error:(fun _err -> Plot.return ())
in
let validator =
Result.fold
(validator problem solution)
~ok:(fun (rows, plot) ->
max_rows := max !max_rows rows ;
with_col (fun col -> plot ~col))
~error:(fun _err -> Plot.return ())
in
let validator_emp =
Result.fold
(validator_empirical workload_data problem solution)
~ok:(fun (rows, plot) ->
max_rows := max !max_rows rows ;
with_col (fun col -> plot ~col))
~error:(fun _err -> Plot.return ())
in
if !current_col = 0 then (* Nothing to plot. *)
None
else
let plot =
let open Plot in
let* () = empirical in
let* () = validator in
validator_emp
in
Some (!max_rows, !current_col, plot)
in
match main_plot_opt with
| None -> false
| Some (nrows, ncols, plot) ->
Plot.run
~nrows
~ncols
(let open Plot in
let* () = plot in
let* () = suptitle ~title:Bench.name ~fontsize:None in
match plot_target with
| Save {file} -> (
match file with
| None ->
savefig ~filename:(filename "collected") ~dpi:3000 ~quality:95
| Some filename -> savefig ~filename ~dpi:3000 ~quality:95)
| Show ->
Plot.(
let* () = show () in
return ())
| ShowAndSave {file} -> (
match file with
| None ->
Plot.(
let* () = show () in
savefig ~filename:(filename "collected") ~dpi:3000 ~quality:95)
| Some filename ->
Plot.(
let* () = show () in
savefig ~filename ~dpi:3000 ~quality:95))) ;
true
| (*****************************************************************************)
(* *)
(* Open Source License *)
(* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <[email protected]> *)
(* Copyright (c) 2020 Nomadic Labs. <[email protected]> *)
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
(* to deal in the Software without restriction, including without limitation *)
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
(* and/or sell copies of the Software, and to permit persons to whom the *)
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
|
Subsets and Splits