language
stringlengths 0
24
| filename
stringlengths 9
214
| code
stringlengths 99
9.93M
|
---|---|---|
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_to_refactor.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val of_candidate :
source_text:Full_fidelity_source_text.t ->
path:Relative_path.t ->
Extract_method_types.candidate ->
Code_action_types.Refactor.t |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_traverse.ml | open Hh_prelude
module T = Extract_method_types
let rec calc_placeholder_name taken_names n =
let placeholder = "method" ^ string_of_int n in
if String.Set.mem taken_names placeholder then
calc_placeholder_name taken_names (n + 1)
else
placeholder
(** tracks what variables are defined *)
module Scopes = struct
type t = String.Set.t list
let empty = []
let enter t = String.Set.empty :: t
let exit t = List.tl t |> Option.value ~default:empty
let set_defined t var_name =
match t with
| hd :: tl -> String.Set.add hd var_name :: tl
| [] -> [String.Set.singleton var_name]
let is_defined t var_name =
List.exists t ~f:(fun vars -> String.Set.mem vars var_name)
end
(**
We have 3 regions:
- pre-selection
- selection
- post-selection
[Region.t] tracks sundry information we collect from a region
*)
module Region = struct
type t = {
referenced: T.var_tys;
(** variables referenced from the region, along with their types *)
defined: Scopes.t; (** what variables are defined in the region *)
iterator_kind: T.iterator_kind option;
is_async: bool;
has_return: bool;
(** When a region contains a `return`, that effects whether/how we can extract a method.
As of this writing, we do not provide the refactor if the region containing the selection has a `return`.
*)
}
let empty =
{
referenced = String.Map.empty;
defined = Scopes.empty;
iterator_kind = None;
is_async = false;
has_return = false;
}
let free { referenced; defined; _ } =
referenced
|> String.Map.filter_keys ~f:(fun key ->
(Fn.non @@ Scopes.is_defined defined) key)
let used_from ~(defined_in : t) ~(referenced_from : t) =
referenced_from
|> free
|> String.Map.filter_keys ~f:(Scopes.is_defined defined_in.defined)
end
let plus_candidate (a : T.candidate option) (b : T.candidate option) =
match (a, b) with
| (Some a, Some b) ->
let use_first ~key:_ v1 _v2 = v1 in
Some
T.
{
(* For these fields, just take [b]'s value, since [a] and [b]
are always the same anyway *)
method_pos = b.method_pos;
method_is_static = b.method_is_static;
placeholder_name = b.placeholder_name;
selection_kind = b.selection_kind;
(* We grow `pos` to fill the selection
and learn more about whether to make an async or static function, etc. *)
pos = Pos.merge a.pos b.pos;
is_async = a.is_async || b.is_async;
(* Note: if `(a, b)` is `(Some it_kind_a, Some it_kind_b)` then it doesn't matter whether
we use a or b assuming the user was consistent in yielding values or yielding key=>value pairs.
If the user is inconsistent then hopefully they don't mind we go with `b`.
*)
iterator_kind = Option.first_some b.iterator_kind a.iterator_kind;
(* `params` are calculated based on variables that are defined in the
pre-selection region and used in the selection region.
If the same variable has two different types then something weird is happening. Just use b's type.
*)
params = String.Map.merge_skewed a.params b.params ~combine:use_first;
(* `return` is calculated based on variables that are defined in the
selection region and used in the post-selection region.
If the same variable has two different types, then something weird is happening. Just use b's type.
*)
return = String.Map.merge_skewed a.return b.return ~combine:use_first;
}
| _ -> Option.first_some a b
let positions_visitor
(selection : Pos.t) ~method_pos ~method_is_static ~method_names =
(* These refs are used to accumulate context top->down left->right *)
(*
The region before the user's selection. This is important for finding
the types of variables that are free in the selection, which is used in calculating
the parameter types for the extracted method.
*)
let pre_selection_region = ref Region.empty in
(*
The selection region is used for finding which variables
are free in the selection, which is used in calculating parameters for the extracted method.
The selection region is also used for finding the types
of variables that are used in the post-selection region, which is used for calculating
the return of the extracted method.
*)
let selection_region = ref Region.empty in
(*
See [selection region]
*)
let post_selection_region = ref Region.empty in
(* positions that overlap the selection, used for ensuring that a selection is valid.
See [ensure_selection_common_root] *)
let expr_positions_overlapping_selection = ref [] in
(* Count of statements in the selection. Used to calculate [selection_kind] *)
let stmts_in_selection_count = ref 0 in
(* Whether we are currently in an lvalue. This affects whether a variable is being used or defined. *)
let in_lvalue = ref false in
let with_in_lvalue f =
let orig_in_lvalue = !in_lvalue in
in_lvalue := true;
let res = f () in
in_lvalue := orig_in_lvalue;
res
in
let ensure_selection_common_root : T.candidate option -> T.candidate option =
(* filter out invalid selection like this:
(1 + 2) + 3
^-----^ selection
and this:
$x = 1 + 2; $y = 3;
^--------------^ selection
We do not offer refactorings for invalid selections. *)
Option.filter ~f:(fun candidate ->
List.for_all !expr_positions_overlapping_selection ~f:(fun p ->
Pos.(contains candidate.T.pos p || contains p candidate.T.pos)))
in
let placeholder_name = calc_placeholder_name method_names 0 in
let current_region pos : Region.t ref =
if Pos.start_offset selection > Pos.start_offset pos then
pre_selection_region
else if Pos.contains selection pos then
selection_region
else
post_selection_region
in
let make acc pos ty_string =
if (not !in_lvalue) && Pos.contains selection pos then
if !selection_region.Region.has_return then
None
else
let params = Region.free !selection_region in
let return = String.Map.empty (* adjusted after the selection *) in
let selection_kind =
if !stmts_in_selection_count = 0 then
T.SelectionKindExpression ty_string
else
T.SelectionKindStatement
in
plus_candidate acc
@@ Some
T.
{
pos;
method_pos;
method_is_static;
placeholder_name;
selection_kind;
params;
return;
is_async = !selection_region.Region.is_async;
iterator_kind = !selection_region.Region.iterator_kind;
}
else
acc
in
object (self)
inherit [T.candidate option] Tast_visitor.reduce as super
method zero = None
method plus = plus_candidate
method! on_method_ env meth =
super#on_method_ env meth
|> ensure_selection_common_root
|> Option.map ~f:(fun acc ->
T.
{
acc with
return =
Region.used_from
~defined_in:!selection_region
~referenced_from:!post_selection_region;
})
method! on_as_expr env as_expr =
with_in_lvalue (fun () -> super#on_as_expr env as_expr)
method! on_stmt env ((pos, stmt_) as stmt) =
if Pos.contains selection pos then incr stmts_in_selection_count;
let region = current_region pos in
(match stmt_ with
| Aast.Awaitall (tmp_var_block_pairs, _) ->
tmp_var_block_pairs
|> List.iter ~f:(function
| (Some (_, tmp_lid), _) ->
let name = Local_id.get_name tmp_lid in
region :=
Region.
{
!region with
defined = Scopes.set_defined !region.defined name;
}
| _ -> ())
| Aast.Foreach (_, as_, _) ->
let (iterator_kind, is_async) =
Aast.(
match as_ with
| As_v _ -> (Some T.Iterator, false)
| Await_as_v _ -> (Some T.KeyedIterator, true)
| As_kv _ -> (Some T.KeyedIterator, false)
| Await_as_kv _ -> (Some T.KeyedIterator, true))
in
region := Region.{ !region with iterator_kind; is_async }
| Aast.Return _ -> region := Region.{ !region with has_return = true }
| _ -> ());
let acc = super#on_stmt env stmt in
let ty = Typing_make_type.void Typing_reason.Rnone in
make acc pos (T.ty_string_of_ty env ty)
method! on_fun_ env fun_ =
let open Aast_defs in
let region = current_region fun_.f_span in
let add_param { param_name; _ } =
region :=
Region.
{
!region with
defined = Scopes.set_defined !region.defined param_name;
}
in
fun_.f_params |> List.iter ~f:add_param;
super#on_fun_ env fun_
method! on_expr env expr =
let (ty, pos, expr_) = expr in
let ty_string = T.ty_string_of_ty env ty in
if Pos.overlaps selection pos then
expr_positions_overlapping_selection :=
pos :: !expr_positions_overlapping_selection;
let region = current_region pos in
let acc =
(* mutates refs *)
match expr_ with
| Aast.Lfun (fun_, _)
| Aast.Efun Aast.{ ef_fun = fun_; _ } ->
(region :=
Region.{ !region with defined = Scopes.enter !region.defined });
let acc =
match Aast_defs.(fun_.f_body.fb_ast) with
| [(_, Aast.Return (Some e))] ->
(* `() ==> 3 + 3` has a `return` in the tast, which we remove because "extract method" isn't
safe for return statements in general. But it's fine to offer the refactor this case: we can extract the `3 + 3`.
*)
super#on_expr env e
| _ -> super#on_expr_ env expr_
in
(region :=
Region.{ !region with defined = Scopes.exit !region.defined });
acc
| Aast.(Binop { bop = Ast_defs.Eq _; lhs; rhs }) ->
let rhs = self#on_expr env rhs in
self#plus rhs (with_in_lvalue (fun () -> self#on_expr env lhs))
| Aast.Lvar (_, lid) when !in_lvalue ->
let (_ : T.candidate option) = super#on_expr env expr in
(region :=
Region.
{
!region with
defined =
Scopes.set_defined !region.defined (Local_id.get_name lid);
});
None
| Aast.Lvar (_, lid) ->
let name = Local_id.get_name lid in
Region.(
if not @@ Scopes.is_defined !region.defined name then
region :=
{
!region with
referenced =
String.Map.set ~key:name ~data:ty_string !region.referenced;
});
super#on_expr env expr
| Aast.Yield af ->
let iterator_kind =
Aast.(
match af with
| AFvalue _ -> Some T.Iterator
| AFkvalue _ -> Some T.KeyedIterator)
in
(region := Region.{ !region with iterator_kind });
super#on_expr env expr
| Aast.Await _ ->
(region := Region.{ !region with is_async = true });
super#on_expr env expr
| _ -> super#on_expr env expr
in
make acc pos ty_string
end
(**
- Avoid traversing methods outside the selection
- Pass information [positions_visitor] needs
*)
let top_visitor ~(selection : Pos.t) =
object
inherit [_] Tast_visitor.reduce
method zero = None
method plus = Option.first_some
method! on_class_ env class_ =
let open Aast in
if Pos.contains class_.c_span selection then
let methods = class_.c_methods in
let method_names =
methods
|> List.map ~f:(fun { m_name; _ } -> snd m_name)
|> String.Set.of_list
in
methods
|> List.find ~f:(fun { m_span; _ } -> Pos.contains m_span selection)
|> Option.bind ~f:(fun meth ->
let visitor =
positions_visitor
selection
~method_pos:meth.Aast.m_span
~method_is_static:meth.Aast.m_static
~method_names
in
visitor#on_method_ env meth)
else
None
end
let find_candidate ~selection ~entry ctx =
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
(top_visitor ~selection)#go
ctx
tast.Tast_with_dynamic.under_normal_assumptions |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_traverse.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find_candidate :
selection:Pos.t ->
entry:Provider_context.entry ->
Provider_context.t ->
Extract_method_types.candidate option |
hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_plugins/dune | (* -*- tuareg -*- *)
let library_entry name suffix =
Printf.sprintf
"(library
(name %s)
(wrapped false)
(modules)
(libraries %s_%s))" name name suffix
let fb_entry name =
library_entry name "fb"
let stubs_entry name =
library_entry name "stubs"
let entry is_fb name =
if is_fb then
fb_entry name
else
stubs_entry name
let () =
(* test presence of "facebook" subfolder *)
let current_dir = Sys.getcwd () in
let fb_dir = Filename.concat current_dir "facebook" in
(* locate facebook/dune *)
let fb_dune = Filename.concat fb_dir "dune" in
let is_fb = Sys.file_exists fb_dune in
let extract_method_plugins = entry is_fb "extract_method_plugins" in
Jbuild_plugin.V1.send extract_method_plugins |
|
hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_types/dune | (library
(name extract_method_types)
(libraries
code_action_types
lsp
provider_context
parser
tast_env
utils_core)) |
|
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_types/extract_method_types.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type iterator_kind =
| Iterator
| KeyedIterator
type ty_string = Ty of string
type var_tys = ty_string String.Map.t
type selection_kind =
| SelectionKindExpression of ty_string
| SelectionKindStatement
let ty_string_of_ty env locl_ty =
if Typing_defs.is_denotable locl_ty then
let env = Tast_env.tast_env_as_typing_env env in
Ty (Typing_print.full env locl_ty)
else
Ty "_"
type candidate = {
pos: Pos.t;
placeholder_name: string;
selection_kind: selection_kind;
params: var_tys;
return: var_tys;
iterator_kind: iterator_kind option;
is_async: bool;
method_pos: Pos.t;
method_is_static: bool;
} |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_types/extract_method_types.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type iterator_kind =
| Iterator
| KeyedIterator
type ty_string = Ty of string
val ty_string_of_ty : Tast_env.t -> Typing_defs.locl_ty -> ty_string
type selection_kind =
| SelectionKindExpression of ty_string
(**
Examples of expression selections:
$x = ( 100 + 200 ) + 300
^-^
^---^
^-----------^
^--------------------------^
*As of this writing*, when the selection corresponds to an expression, we:
- replace the original expression with a call expression,
but don't assign to a variable.
- calculate the return type of the extracted method based on the type of
the expression we are replacing.
*)
| SelectionKindStatement
(**
Examples of statement selections:
if (cond()) { $x0 = true; $x1 = true; $z = 5; }
^---------^
^------------^
^-------------------------^
^-------------------------------------------------^
*As of this writing, when the selection corresponds to a statement, we:
- Add a method call. If the method is not an iterator then we assign to a variable.
Example: `$x = $this->placeholder()`. If the method is an iterator, we do something
more interesting, see test cases.
- Calculate the return type based on what variables are reassigned in the selection region
and used after the selection region. For example, if the selection includes `$x =p 3; $y = 5;`
and both `$x` and `$y` are used later, then the return type is `(int, int)` and the call site
is something like `list($x, $y) = $this->placeholder();`
*)
type var_tys = ty_string String.Map.t
type candidate = {
pos: Pos.t;
placeholder_name: string;
selection_kind: selection_kind;
params: var_tys;
(** The paramaters for the method we extract, along with their types. *)
return: var_tys;
(** The returns for the method we extract, along with their types.
We simulate multiple returns using tuple types and `list()`
*)
iterator_kind: iterator_kind option;
(** Describes the method we are extracting. For example, if the selected code
contains `yield 3` then the [iterator_kind] will be `Some Iterator` and if the selected
code does not yield anything then the [iterator_kind] is `None` *)
is_async: bool;
(** Describes the method we are extracting. For example, if the selected code contains
`await` then `is_async` will be `true`. *)
method_pos: Pos.t;
(** Describes the method containing the user's selection.
Used to calculate indentation for the code we generate.
*)
method_is_static: bool;
(** Describes the method containing the user's selection.
If the method we are extracting *from* is static then the method we extract will be static. *)
} |
hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/dune | (library
(name inline_method)
(wrapped true)
(libraries
annotated_ast
ast
code_action_types
full_fidelity
lsp
pos
provider_context
server_command_types
tast_env
tast_provider
utils_core)) |
|
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let find ~entry ~(range : Lsp.range) ctx =
let source_text = Ast_provider.compute_source_text ~entry in
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
let path = entry.Provider_context.path in
let cursor = Lsp_helpers.lsp_range_to_pos ~line_to_offset path range in
Inline_method_find_candidate.find_candidate ~cursor ~entry ctx
|> Option.map ~f:(Inline_method_to_refactor.to_refactor ~path ~source_text)
|> Option.to_list |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_find_candidate.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module T = Inline_method_types
let method_info_create (m : Tast.method_) : T.method_info =
let open Aast_defs in
let is_private =
match m.m_visibility with
| Private -> true
| Public
| Protected
| Internal ->
false
in
let block = m.m_body.fb_ast in
let last_stmt_is_return =
List.last block
|> Option.map ~f:(function
| (_, Return _) -> true
| _ -> false)
|> Option.value ~default:false
in
let is_normal_param p =
(not p.param_is_variadic)
&& Option.is_none p.param_expr
&& List.is_empty p.param_user_attributes
&&
match p.param_callconv with
| Ast_defs.Pinout _ -> false
| Ast_defs.Pnormal -> true
in
let has_void_return =
match snd m.Aast.m_ret with
| Some Aast_defs.(_, Hprim Tvoid) -> true
| None
| Some _ ->
false
in
let all_params_are_normal = m.m_params |> List.for_all ~f:is_normal_param in
T.
{
method_pos = m.m_span;
block;
param_names = m.m_params |> List.map ~f:(fun p -> p.param_name);
(* only the following two are computed from the body *)
var_names = [];
return_cnt = 0;
has_void_return;
is_private;
last_stmt_is_return;
all_params_are_normal;
}
let find_candidate ~(cursor : Pos.t) ~entry ctx : T.candidate option =
let method_use_counts : int String.Map.t ref = ref String.Map.empty in
let method_infos : T.method_info String.Map.t ref = ref String.Map.empty in
let target_class : string option ref = ref None in
let current_method : string option ref = ref None in
let stmt_pos = ref Pos.none in
let call_info : T.call_info option ref = ref None in
let method_info_update method_name ~f =
method_infos :=
String.Map.update !method_infos method_name ~f:(fun v ->
(* method_infos map is guaranteed to have all method names in it
since we traverse methods after adding them to the map *)
f @@ Option.value_exn v)
in
let visitor =
object (self)
inherit Tast_visitor.iter as super
method! on_stmt env ((pos, _) as stmt) =
if Option.is_some !current_method then stmt_pos := pos;
super#on_stmt env stmt
method! on_Lvar env lvar =
match !current_method with
| Some current_method ->
let var = Local_id.get_name @@ snd lvar in
method_info_update current_method ~f:(fun method_info ->
T.{ method_info with var_names = var :: method_info.var_names });
super#on_Lvar env lvar
| None -> ()
method! on_Return env return =
match !current_method with
| Some current_method ->
method_info_update current_method ~f:(fun method_info ->
T.{ method_info with return_cnt = method_info.return_cnt + 1 });
super#on_Return env return
| None -> ()
method! on_expr env expr =
let open Aast_defs in
match !current_method with
| Some current_method -> begin
super#on_expr env expr;
let (_, expr_pos, expr_) = expr in
let on_call ~call_id_pos ~callee_name ~param_kind_arg_pairs =
method_use_counts :=
String.Map.update
!method_use_counts
callee_name
~f:(fun count_opt -> 1 + Option.value count_opt ~default:0);
if Pos.contains call_id_pos cursor then
let call_arg_positions =
List.map param_kind_arg_pairs ~f:(fun (_, (_, arg_pos, _)) ->
arg_pos)
in
call_info :=
Some
T.
{
callee_name;
call_stmt_pos = !stmt_pos;
caller_name = current_method;
call_pos = expr_pos;
call_arg_positions;
}
in
match expr_ with
| Call
{
func =
( _,
call_id_pos,
Class_const ((_, _, (CIself | CIstatic)), (_, callee_name))
);
args = param_kind_arg_pairs;
_;
} -> begin
on_call ~call_id_pos ~callee_name ~param_kind_arg_pairs
end
| Call
{
func =
( _,
_,
Obj_get
((_, _, This), (_, _, Id (call_id_pos, callee_name)), _, _)
);
args = param_kind_arg_pairs;
_;
} ->
on_call ~call_id_pos ~callee_name ~param_kind_arg_pairs
| _ -> ()
end
| None -> ()
method! on_class_ env class_ =
let open Aast_defs in
if Pos.contains class_.c_span cursor then begin
target_class := Some (snd class_.c_name);
let method_infos_res =
class_.c_methods
|> List.map ~f:(fun m -> (snd m.m_name, method_info_create m))
|> String.Map.of_alist
in
match method_infos_res with
| `Ok method_infos_ ->
method_infos := method_infos_;
class_.c_methods
|> List.iter ~f:(fun m ->
current_method := Some (snd m.m_name);
self#on_method_ env m)
| `Duplicate_key _ -> ()
end
method! on_fun_ _ _ = ()
end
in
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
visitor#go ctx tast.Tast_with_dynamic.under_normal_assumptions;
let open Option.Let_syntax in
let* call = !call_info in
let* (T.
{
is_private;
all_params_are_normal;
last_stmt_is_return;
return_cnt;
_;
} as callee) =
String.Map.find !method_infos call.T.callee_name
in
let* caller = String.Map.find !method_infos call.T.caller_name in
let is_inlineable =
let has_ok_returns =
return_cnt = 0 || (return_cnt = 1 && last_stmt_is_return)
in
has_ok_returns && is_private && all_params_are_normal && has_ok_returns
in
let* called_count = String.Map.find !method_use_counts call.T.callee_name in
if called_count = 1 && is_inlineable then
Some T.{ call; callee; caller }
else
None |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_find_candidate.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find_candidate :
cursor:Pos.t ->
entry:Provider_context.entry ->
Provider_context.t ->
Inline_method_types.candidate option |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_rename.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type t = {
used_vars: String.Set.t;
old_to_new: string String.Map.t;
}
let create ~used_vars = { used_vars; old_to_new = String.Map.empty }
let rename ({ used_vars; old_to_new } as t) old_var : t * string =
let rec next_var (s : string) : string =
if String.Set.mem used_vars s then
next_var (s ^ "_")
else
s
in
match String.Map.find old_to_new old_var with
| Some new_var -> (t, new_var)
| None ->
let new_var = next_var old_var in
let t =
{
used_vars = String.Set.add used_vars new_var;
old_to_new = String.Map.update old_to_new old_var ~f:(Fn.const new_var);
}
in
(t, new_var)
let rename_all t old_vars =
let fold old_var (t, new_vars) =
let (t, new_var) = rename t old_var in
(t, new_var :: new_vars)
in
List.fold_right old_vars ~init:(t, []) ~f:fold |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_rename.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
(**
* Enables creating fresh variables that resemble existing variables,
* for use in generated code in the refactor.
*
* Given `used_vars` containing "$a" and "$b",
* `rename_all ["$b"; "$c"]` is `["$b_"; "$c"]
*)
type t
val create : used_vars:String.Set.t -> t
(** idempotent, generates a fresh name not in `used_vars` *)
val rename : t -> string -> t * string
(** idempotent, preserves order *)
val rename_all : t -> string list -> t * string list |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_rewrite_block.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Syn = Full_fidelity_positioned_syntax
module PositionedTree = Full_fidelity_syntax_tree.WithSyntax (Syn)
let apply_patches_to_string old_content (patches : ServerRenameTypes.patch list)
: string =
let buf = Buffer.create (String.length old_content) in
let patch_list =
List.sort ~compare:ServerRenameTypes.compare_result patches
in
ServerRenameTypes.write_patches_to_buffer buf old_content patch_list;
Buffer.contents buf
(** given the source text of a block, apply `f` to source text wrapped such that it's
a valid Hack program (with `<?hh` and all).
Then strip off the wrapping (`<?hh` and all).
*)
let with_wrapped_source_text ~block_source_text ~(f : string -> 'a * string) :
'a * string =
let unwrap (wrapped_source_text : string) : string =
wrapped_source_text
|> String.split_lines
|> (fun lines -> List.drop lines 2) (* drop "<?hh" and "function foo ..." *)
|> List.drop_last_exn
(* we wrapped s.t. the string is guaranteed to have a last line *)
|> String.concat ~sep:"\n"
in
let wrapped =
Printf.sprintf "<?hh\nfunction foo(): void {\n%s\n}" block_source_text
in
let (res, rewritten_block_wrapped) = f wrapped in
let rewritten_block = unwrap rewritten_block_wrapped in
(res, rewritten_block)
let tree_of_text path source_text =
Full_fidelity_source_text.make path source_text |> PositionedTree.make
(**
Re the `start_indent` param:
We remember `start_indent` so we can restore it after formatting.
In the following, `start_indent` is `2`:
```
if (1 < 2) {
// start
$x = 3;
//end
}
```
*)
let format_block path block_source_text ~start_indent_amount =
let add_indent text =
let indent = String.make start_indent_amount ' ' in
let inner_indent =
(* adjusted because the autoformatter already indents once, due to our
trick of wrapping the code-to-format in a function *)
String.make (start_indent_amount - Format_env.(default.indent_width)) ' '
in
text
|> String.split_lines
|> List.map ~f:(Printf.sprintf "%s%s" inner_indent)
|> String.concat ~sep:"\n"
|> fun text -> Printf.sprintf "%s\n%s" text indent
in
with_wrapped_source_text ~block_source_text ~f:(fun wrapped_source_text ->
((), Libhackfmt.format_tree @@ tree_of_text path wrapped_source_text))
|> snd
|> add_indent
let rewrite_block r path block_source_text ~return_var_raw_name :
Inline_method_rename.t * string =
with_wrapped_source_text ~block_source_text ~f:(fun wrapped_source_text ->
let module Ff_rewriter = Full_fidelity_rewriter.WithSyntax (Syn) in
let fold node ((r, patches) as acc) =
let acc =
match Syn.syntax node with
| Syn.VariableExpression { variable_expression } ->
let var = Syn.text variable_expression in
(match Syn.position_exclusive path node with
| Some pos ->
let (r, var) = Inline_method_rename.rename r var in
let patch =
ServerRenameTypes.Replace
ServerRenameTypes.{ pos = Pos.to_absolute pos; text = var }
in
(r, patch :: patches)
| None -> acc)
| Syn.ReturnStatement { return_keyword; _ } ->
(match Syn.position_exclusive path return_keyword with
| Some pos ->
let (r, return_var) =
Inline_method_rename.rename r return_var_raw_name
in
let text = Printf.sprintf "%s = " return_var in
let patch =
ServerRenameTypes.Replace
ServerRenameTypes.{ pos = Pos.to_absolute pos; text }
in
(r, patch :: patches)
| None -> acc)
| _ -> acc
in
(acc, Ff_rewriter.Result.Keep)
in
let root = tree_of_text path wrapped_source_text |> PositionedTree.root in
let (r, patches) =
fst @@ Ff_rewriter.aggregating_rewrite_post fold root (r, [])
in
let rewritten = apply_patches_to_string wrapped_source_text patches in
(r, rewritten)) |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_rewrite_block.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
* Apply libhackfmt to a string corresponding to a block (one or multiple statements).
* `start_indent_amount` is the amount the beginning of the text is *already* indented
* *)
val format_block :
Relative_path.t -> string -> start_indent_amount:int -> string
(** rename variables and replace `return` with a variable assignment *)
val rewrite_block :
Inline_method_rename.t ->
Relative_path.t ->
string ->
return_var_raw_name:string ->
Inline_method_rename.t * string |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_to_refactor.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module T = Inline_method_types
(** We convert a `return` in the inlined method to a variable assignment.
* This is the name for the variable, modulo renaming for hygiene *)
let return_var_raw_name = "res"
let pos_of_block block =
let open Option.Let_syntax in
let* (hd_pos, _) = List.hd block in
let* (last_pos, _) = List.last block in
let pos = Pos.merge hd_pos last_pos in
Option.some_if (Pos.length pos > 0) pos
let to_lsp_range pos =
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal pos
(** Pair arguments and parameters *)
let calc_pre_assignments_text ~source_text ~call_arg_positions ~param_names :
string =
List.Monad_infix.(
call_arg_positions
>>| Full_fidelity_source_text.sub_of_pos source_text
|> List.zip_exn param_names
>>| Tuple2.uncurry @@ Format.sprintf "%s = %s;"
|> String.concat ~sep:" ")
let calc_body_text r path ~source_text T.{ callee; _ } :
Inline_method_rename.t * string =
let block_source_text =
Option.(
pos_of_block callee.T.block
>>| Full_fidelity_source_text.sub_of_pos source_text
|> value ~default:"")
in
Inline_method_rewrite_block.rewrite_block
r
path
block_source_text
~return_var_raw_name
let strip_leading_spaces ~source_text pos : Pos.t =
let strip_length =
pos
|> Pos.set_col_start 0
|> Full_fidelity_source_text.sub_of_pos source_text
|> String.take_while ~f:(Char.equal ' ')
|> String.length
in
let col_start = snd @@ Pos.line_column pos in
pos |> Pos.set_col_start (col_start - strip_length)
(** Add the inlined method contents before the call site and
* calculate a variable name for the `return` of the inlined method.
* We use the return variable when replacing the call site.
* *)
let edit_inline_and_return_var_of_candidate
path ~source_text (T.{ caller; callee; call } as candidate) :
Lsp.TextEdit.t * string =
let used_vars = String.Set.of_list caller.T.var_names in
let r = Inline_method_rename.create ~used_vars in
let (r, param_names) =
Inline_method_rename.rename_all r callee.T.param_names
in
let assignments_before_body =
calc_pre_assignments_text
~source_text
~param_names
~call_arg_positions:call.T.call_arg_positions
in
let (r, body) = calc_body_text r path ~source_text candidate in
(* Gives the correct `return_var` because `Inline_method_rename.rename` is idempotent *)
let (_r, return_var) = Inline_method_rename.rename r return_var_raw_name in
let range =
let pre_call_pos = Pos.shrink_to_start call.T.call_stmt_pos in
to_lsp_range pre_call_pos
in
let start_indent_amount = snd @@ Pos.line_column call.T.call_stmt_pos in
let text =
assignments_before_body ^ body
|> Inline_method_rewrite_block.format_block ~start_indent_amount path
|> String.lstrip
in
({ Lsp.TextEdit.range; newText = text }, return_var)
(** Replace the call with a variable (`return_var`) and adjust *)
let edit_replace_call_of_candidate
~source_text ~(return_var : string) T.{ call; callee; _ } =
if callee.T.has_void_return then
let strip_trailing_semicolon p =
let semicolon_pos = Pos.shrink_to_end p in
if
String.equal
";"
(Full_fidelity_source_text.sub_of_pos
source_text
~length:1
semicolon_pos)
then
Pos.advance_one p
else
p
in
let range =
call.T.call_pos
|> strip_trailing_semicolon
|> strip_leading_spaces ~source_text
|> to_lsp_range
in
Lsp.{ TextEdit.range; newText = "" }
else
let range = to_lsp_range call.T.call_pos in
{ Lsp.TextEdit.range; newText = return_var }
(** Remove the inlined method *)
let edit_remove_method_of_candidate ~source_text candidate : Lsp.TextEdit.t =
let text = "" in
let range =
T.(candidate.callee.method_pos)
|> strip_leading_spaces ~source_text
|> to_lsp_range
in
{ Lsp.TextEdit.range; newText = text }
let edit_of_candidate ~source_text ~path candidate : Lsp.WorkspaceEdit.t =
let edit_remove_method =
edit_remove_method_of_candidate ~source_text candidate
in
let (edit_inline, return_var) =
edit_inline_and_return_var_of_candidate path ~source_text candidate
in
let edit_replace_call =
edit_replace_call_of_candidate ~source_text ~return_var candidate
in
let changes =
SMap.singleton
(Relative_path.to_absolute path)
[edit_remove_method; edit_inline; edit_replace_call]
in
Lsp.WorkspaceEdit.{ changes }
let to_refactor ~source_text ~path candidate =
let edit = lazy (edit_of_candidate ~source_text ~path candidate) in
Code_action_types.Refactor.{ title = "Inline method"; edit } |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_to_refactor.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val to_refactor :
source_text:Full_fidelity_source_text.t ->
path:Relative_path.t ->
Inline_method_types.candidate ->
Code_action_types.Refactor.t |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_types.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type method_info = {
method_pos: Pos.t;
block: Tast.block;
param_names: string list;
var_names: string list;
has_void_return: bool;
is_private: bool;
return_cnt: int;
last_stmt_is_return: bool;
all_params_are_normal: bool;
}
type call_info = {
callee_name: string;
caller_name: string;
call_stmt_pos: Pos.t;
call_pos: Pos.t;
call_arg_positions: Pos.t list;
}
type candidate = {
call: call_info;
callee: method_info;
caller: method_info;
} |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/inline_method/inline_method_types.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type method_info = {
method_pos: Pos.t;
block: Tast.block;
param_names: string list;
var_names: string list;
has_void_return: bool;
is_private: bool;
return_cnt: int;
last_stmt_is_return: bool;
all_params_are_normal: bool;
(** As of this writing, we have
* no special logic to handle varargs and inout.
* We don't offer refactorings in such cases *)
}
type call_info = {
callee_name: string;
caller_name: string;
call_stmt_pos: Pos.t; (** Helpful for formatting *)
call_pos: Pos.t;
call_arg_positions: Pos.t list;
}
type candidate = {
call: call_info;
(** Information about the location of the call
* to the method to be inlined *)
callee: method_info; (** The method to inline *)
caller: method_info; (** the method we are inlining the callee into *)
} |
hhvm/hphp/hack/src/shape_analysis/dune | (library
(name shape_analysis)
(wrapped false)
(flags
(:standard -linkall))
(modules
shape_analysis
shape_analysis_codemod
shape_analysis_env
shape_analysis_logic
shape_analysis_options
shape_analysis_pretty_printer
shape_analysis_files
shape_analysis_solver
shape_analysis_types
shape_analysis_walker
shape_analysis_hips
shape_analysis_files
wipe_type_reason
)
(libraries
core_kernel
provider_context
aast_names_utils
relative_path
tast_env
typing_defs
typing_env_types
hips
sys_utils)
(preprocess
(pps visitors.ppx ppx_deriving.std))) |
|
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Shape_analysis_types
open Shape_analysis_pretty_printer
module T = Tast
module Solver = Shape_analysis_solver
module Walker = Shape_analysis_walker
module Codemod = Shape_analysis_codemod
module SAF = Shape_analysis_files
module JSON = Hh_json
module Inter_shape = Hips_solver.Inter (Shape_analysis_hips.Intra_shape)
exception Shape_analysis_exn = Shape_analysis_exn
let simplify env constraints =
Solver.deduce constraints |> Solver.produce_results env
let strip_decoration_of_lists
((intra_dec_list, inter_dec_list) : decorated_constraints) :
any_constraint list =
(DecoratedConstraintSet.elements intra_dec_list
|> List.map ~f:(fun { constraint_; _ } -> HT.Intra constraint_))
@ (DecoratedInterConstraintSet.elements inter_dec_list
|> List.map ~f:(fun { constraint_; _ } -> HT.Inter constraint_))
let process_errors_out (constraints, errors) =
if not (List.is_empty errors) then Printf.eprintf "\nErrors:\n";
let print_error err = Printf.eprintf "%s\n" (Error.show err) in
List.iter ~f:print_error errors;
constraints
let intra_constraints_of_any_constraints =
List.filter_map ~f:(function
| HT.Intra intra_constr -> Some intra_constr
| HT.Inter _ -> None)
let strip_decorations { constraint_; _ } = constraint_
let any_constraints_of_decorated_constraints decorated_constraints =
let intra_constraints =
fst decorated_constraints
|> DecoratedConstraintSet.elements
|> List.map ~f:strip_decorations
|> List.map ~f:(fun c -> HT.Intra c)
in
let inter_constraints =
snd decorated_constraints
|> DecoratedInterConstraintSet.elements
|> List.map ~f:strip_decorations
|> List.map ~f:(fun c -> HT.Inter c)
in
List.append intra_constraints inter_constraints
let analyse (constraints : any_constraint list SMap.t) ~verbose :
any_constraint list SMap.t =
constraints |> Inter_shape.analyse ~verbose |> function
| Inter_shape.Convergent constr_map -> constr_map
| Inter_shape.Divergent constr_map -> constr_map
let shape_results_using_hips_internal ~verbose tenv entries =
entries
|> List.map ~f:(fun ConstraintEntry.{ id; constraints; _ } ->
(id, constraints))
|> SMap.of_list
|> analyse ~verbose
|> SMap.map intra_constraints_of_any_constraints
|> SMap.map (simplify tenv)
let shape_results_no_hips tenv entries =
let simplify ConstraintEntry.{ id; constraints; _ } =
let shape_results =
constraints |> intra_constraints_of_any_constraints |> simplify tenv
in
(id, shape_results)
in
entries |> List.map ~f:simplify |> SMap.of_list
let read_constraints ~constraints_dir : ConstraintEntry.t list =
let read_one constraints_file =
SAF.read_entries_by_source_file ~constraints_file |> Sequence.hd_exn
in
Sys.readdir constraints_dir
|> Array.to_list
|> List.filter ~f:(String.is_suffix ~suffix:SAF.constraints_file_extension)
|> List.hd_exn
|> Filename.concat constraints_dir
|> read_one
let do_ (options : options) (ctx : Provider_context.t) (tast : T.program) =
let { command; mode; verbosity } = options in
let verbose = verbosity > 0 in
let empty_typing_env = Tast_env.tast_env_as_typing_env (Tast_env.empty ctx) in
let source_file = Relative_path.create Relative_path.Dummy "dummy.php" in
let dump_marshalled_constraints ~constraints_dir =
Sys_utils.mkdir_p ~skip_mocking:true constraints_dir;
Walker.program mode ctx tast
|> SMap.iter (fun id (decorated_constraints, errors) ->
let constraints = strip_decoration_of_lists decorated_constraints in
let error_count = List.length errors in
let constraint_entry =
ConstraintEntry.{ source_file; id; constraints; error_count }
in
SAF.write_constraints ~constraints_dir ~worker:0 constraint_entry;
SAF.flush ())
in
let solve_marshalled_constraints ~constraints_dir =
let print_callable_summary (id : string) (results : shape_result list) :
unit =
Format.printf "Summary after closing and simplifying for %s:\n" id;
List.iter results ~f:(fun result ->
Format.printf "%s\n" (show_shape_result empty_typing_env result))
in
read_constraints ~constraints_dir
|> shape_results_using_hips_internal empty_typing_env ~verbose
|> SMap.iter print_callable_summary
in
let fresh_constraints_dir () =
(* enables test parallelization *)
let pid = Unix.getpid () in
let secs = int_of_float @@ Unix.time () in
Format.sprintf "/tmp/shape_analysis_constraints-%d-%d" secs pid
in
let get_constraint_entries () =
(* we persist and unpersist entries to exercise persistence code in tests and share logic *)
let constraints_dir = fresh_constraints_dir () in
Sys_utils.rm_dir_tree ~skip_mocking:true constraints_dir;
dump_marshalled_constraints ~constraints_dir;
read_constraints ~constraints_dir
in
match command with
| DumpConstraints ->
let print_function_constraints
(id : string)
((intra_constraints, inter_constraints) : decorated_constraints) : unit
=
Format.printf "Constraints for %s:\n" id;
DecoratedConstraintSet.elements intra_constraints
|> List.sort ~compare:(fun c1 c2 -> Pos.compare c1.hack_pos c2.hack_pos)
|> List.iter ~f:(fun constr ->
Format.printf
"%s\n"
(show_decorated_constraint ~verbosity empty_typing_env constr));
DecoratedInterConstraintSet.elements inter_constraints
|> List.sort ~compare:(fun c1 c2 -> Pos.compare c1.hack_pos c2.hack_pos)
|> List.iter ~f:(fun constr ->
Format.printf
"%s\n"
(show_decorated_inter_constraint
~verbosity
empty_typing_env
constr));
Format.printf "\n"
in
Walker.program mode ctx tast
|> SMap.map process_errors_out
|> SMap.iter print_function_constraints
| CloseConstraints ->
let print_function_constraints
(id : string) (any_constraints_list : any_constraint list) : unit =
Format.printf "Constraints after closing for %s:\n" id;
List.map
~f:(function
| HT.Intra intra_constr ->
show_constraint empty_typing_env intra_constr
| HT.Inter inter_constr ->
show_inter_constraint empty_typing_env inter_constr)
any_constraints_list
|> List.iter ~f:(Format.printf "%s\n");
Format.printf "\n"
in
get_constraint_entries ()
|> List.map ~f:(fun ConstraintEntry.{ id; constraints; _ } ->
(id, constraints))
|> SMap.of_list
|> analyse ~verbose
|> SMap.iter print_function_constraints
| DumpDerivedConstraints ->
let print_function_constraints
(id : string) (intra_constraints : constraint_ list) : unit =
Format.printf "Derived constraints for %s:\n" id;
intra_constraints
|> Solver.deduce
|> List.map ~f:(show_constraint empty_typing_env)
|> List.iter ~f:(Format.printf "%s\n");
Format.printf "\n"
in
let process_entry ConstraintEntry.{ id; constraints; _ } =
intra_constraints_of_any_constraints constraints
|> print_function_constraints id
in
get_constraint_entries () |> List.iter ~f:process_entry
| SimplifyConstraints ->
let print_callable_summary (id : string) (results : shape_result list) :
unit =
Format.printf "Summary for %s:\n" id;
List.iter results ~f:(fun result ->
Format.printf "%s\n" (show_shape_result empty_typing_env result))
in
let process_callable id constraints =
simplify empty_typing_env constraints |> print_callable_summary id
in
Walker.program mode ctx tast
|> SMap.map (fun rs ->
process_errors_out rs
|> fst
|> DecoratedConstraintSet.elements
|> List.map ~f:strip_decorations)
|> SMap.iter process_callable
| Codemod ->
get_constraint_entries ()
|> Codemod.codemods_of_entries
empty_typing_env
~solve:shape_results_no_hips
~atomic:true
|> JSON.array_ Fn.id
|> Format.printf "%a" JSON.pp_json
| DumpMarshalledConstraints { constraints_dir } ->
dump_marshalled_constraints ~constraints_dir
| SolveConstraints ->
let constraints_dir = fresh_constraints_dir () in
Sys_utils.rm_dir_tree ~skip_mocking:true constraints_dir;
dump_marshalled_constraints ~constraints_dir;
solve_marshalled_constraints ~constraints_dir
let callable = Walker.callable
let show_shape_result = show_shape_result
let is_shape_like_dict = function
| Shape_like_dict _ -> true
| _ -> false
let shape_results_using_hips : solve_entries =
shape_results_using_hips_internal ~verbose:false |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** A program analysis to find shape like dicts and the static keys used in
these dicts. *)
open Shape_analysis_types
exception Shape_analysis_exn of Error.t
val is_shape_like_dict : shape_result -> bool
val simplify : Typing_env_types.env -> constraint_ list -> shape_result list
val callable :
mode ->
A.id_ ->
Tast_env.t ->
Tast.fun_param list ->
return:Tast.type_hint ->
Tast.func_body ->
decorated_constraints * Error.t list
val do_ : options -> Provider_context.t -> Tast.program -> unit
val show_shape_result : Typing_env_types.env -> shape_result -> string
val shape_results_using_hips : solve_entries
val shape_results_no_hips : solve_entries
val any_constraints_of_decorated_constraints :
decorated_constraints ->
(constraint_, inter_constraint_) HT.any_constraint_ list |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_codemod.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Shape_analysis_types
module JSON = Hh_json
module Hashtbl = Stdlib.Hashtbl
let of_pos pos =
let (line, scol, ecol) = Pos.info_pos pos in
JSON.JSON_Object
[
("path", JSON.string_ (Pos.to_absolute pos |> Pos.filename));
("line", JSON.int_ line);
("start", JSON.int_ scol);
("end", JSON.int_ ecol);
]
let of_marker env pos fields kind =
let shape_ty = Typing_make_type.closed_shape Typing_reason.Rnone fields in
JSON.JSON_Object
[
("pos", of_pos pos);
("kind", JSON.string_ (Codemod.show_kind kind));
("type", JSON.string_ (Typing_print.full env shape_ty));
]
let codemod_kind_of_marker_kind = function
| Allocation -> Some Codemod.Allocation
| Parameter
| Return
| Constant ->
Some Codemod.Hint
| Debug -> None
let group_of_results ~error_count env results =
let directives =
List.filter_map
~f:(function
| Shape_like_dict (pos, kind, fields) ->
Option.map
~f:(of_marker env pos fields)
(codemod_kind_of_marker_kind kind)
| Dynamically_accessed_dict _ -> None)
results
|> JSON.array_ (fun x -> x)
in
JSON.JSON_Object
[("directives", directives); ("error_count", JSON.int_ error_count)]
let to_singletons = List.map ~f:(fun l -> [l])
let codemods_of_entries env ~(solve : solve_entries) ~atomic constraint_entries
: Hh_json.json list =
let errors = Hashtbl.create (List.length constraint_entries) in
let () =
let add_errors ConstraintEntry.{ id; error_count; _ } =
let _ = Hashtbl.add errors id error_count in
()
in
constraint_entries |> List.iter ~f:add_errors
in
let add_errors_back id shape_result =
let error_count = Option.value ~default:0 @@ Hashtbl.find_opt errors id in
(error_count, shape_result)
in
let groups_of_results (error_count, shape_result) =
let shape_results =
if not atomic then
to_singletons shape_result
else
[shape_result]
in
List.map shape_results ~f:(group_of_results ~error_count env)
in
solve env constraint_entries
|> SMap.mapi add_errors_back
|> SMap.values
|> List.bind ~f:groups_of_results |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_codemod.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Shape_analysis_types
val group_of_results :
error_count:int -> Typing_env_types.env -> shape_result list -> Hh_json.json
val codemods_of_entries :
Typing_env_types.env ->
solve:solve_entries ->
atomic:bool (* TODO(T138659101): remove this option *) ->
ConstraintEntry.t list ->
Hh_json.json list |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_env.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Shape_analysis_types
module LMap = Local_id.Map
module Cont = Typing_continuations
let var_counter : int ref = ref 0
let fresh_var () : entity_ =
var_counter := !var_counter + 1;
Variable !var_counter
let union_continuation_at_lid ~pos ~origin (entity1 : entity) (entity2 : entity)
: DecoratedConstraintSet.t * entity =
let decorate constraint_ = { hack_pos = pos; origin; constraint_ } in
match (entity1, entity2) with
| (Some left, Some right) ->
let join = fresh_var () in
let constraints =
List.map ~f:decorate [Subsets (left, join); Subsets (right, join)]
|> DecoratedConstraintSet.of_list
in
(constraints, Some join)
| (entity, None)
| (None, entity) ->
(DecoratedConstraintSet.empty, entity)
let union_continuation
~pos ~origin (constraints : DecoratedConstraintSet.t) cont1 cont2 =
let union_continuation_at_lid constraints _lid entity1_opt entity2_opt :
DecoratedConstraintSet.t * entity option =
match (entity1_opt, entity2_opt) with
| (Some entity1, Some entity2) ->
let (new_constraints, entity) =
union_continuation_at_lid ~pos ~origin entity1 entity2
in
let constraints =
DecoratedConstraintSet.union new_constraints constraints
in
(constraints, Some entity)
| (Some entity, None)
| (None, Some entity) ->
(constraints, Some entity)
| (None, None) -> (constraints, None)
in
let (constraints, cont) =
LMap.merge_env constraints cont1 cont2 ~combine:union_continuation_at_lid
in
(constraints, cont)
module LEnv = struct
type t = lenv
let init bindings : t = Cont.Map.add Cont.Next bindings Cont.Map.empty
let get_local_in_continuation lenv cont lid : entity =
let open Option.Monad_infix in
lenv |> Cont.Map.find_opt cont >>= LMap.find_opt lid |> Option.join
let get_local lenv : LMap.key -> entity =
get_local_in_continuation lenv Cont.Next
let set_local_in_continuation lenv cont lid entity : t =
let update_cont = function
| None -> None
| Some lenv_per_cont -> Some (LMap.add lid entity lenv_per_cont)
in
Cont.Map.update cont update_cont lenv
let set_local lenv lid entity : t =
set_local_in_continuation lenv Cont.Next lid entity
let drop_cont lenv cont : t = Cont.Map.remove cont lenv
let drop_conts lenv conts : t = List.fold ~f:drop_cont ~init:lenv conts
let replace_cont lenv cont_key cont_opt : t =
match cont_opt with
| None -> drop_cont lenv cont_key
| Some cont -> Cont.Map.add cont_key cont lenv
let restore_cont_from lenv ~from cont_key : t =
let ctxopt = Cont.Map.find_opt cont_key from in
replace_cont lenv cont_key ctxopt
let restore_conts_from lenv ~from conts : t =
List.fold ~f:(restore_cont_from ~from) ~init:lenv conts
let union ~pos ~origin (lenv1 : t) (lenv2 : t) : DecoratedConstraintSet.t * t
=
let combine constraints _ cont1 cont2 =
let (constraints, cont) =
union_continuation ~pos ~origin constraints cont1 cont2
in
(constraints, Some cont)
in
Cont.Map.union_env DecoratedConstraintSet.empty lenv1 lenv2 ~combine
let refresh (lenv : t) : constraint_ list * t =
let refresh_local constraints _ = function
| Some entity_ ->
let var = fresh_var () in
(Subsets (entity_, var) :: constraints, Some var)
| None -> (constraints, None)
in
let refresh_cont constraints _ cont =
LMap.map_env refresh_local constraints cont
in
Cont.Map.map_env refresh_cont [] lenv
end
let init mode tast_env constraints inter_constraints bindings ~return =
{
constraints;
inter_constraints;
lenv = LEnv.init bindings;
return;
tast_env;
errors = [];
mode;
}
let add_constraint env constraint_ =
{
env with
constraints = DecoratedConstraintSet.add constraint_ env.constraints;
}
let add_inter_constraint env inter_constraint_ =
{
env with
inter_constraints =
DecoratedInterConstraintSet.add inter_constraint_ env.inter_constraints;
}
let add_error env err = { env with errors = err :: env.errors }
let reset_constraints env =
{ env with constraints = DecoratedConstraintSet.empty }
let get_local env = LEnv.get_local env.lenv
let set_local env lid entity =
let lenv = LEnv.set_local env.lenv lid entity in
{ env with lenv }
let union ~pos ~origin (parent_env : env) (env1 : env) (env2 : env) : env =
let (points_to_constraints, lenv) =
LEnv.union ~pos ~origin env1.lenv env2.lenv
in
let constraints =
let open DecoratedConstraintSet in
union points_to_constraints
@@ union env1.constraints
@@ union env2.constraints
@@ parent_env.constraints
in
{ parent_env with lenv; constraints }
let drop_cont env cont =
let lenv = LEnv.drop_cont env.lenv cont in
{ env with lenv }
let drop_conts env conts =
let lenv = LEnv.drop_conts env.lenv conts in
{ env with lenv }
let replace_cont env cont_key cont_opt =
let lenv = LEnv.replace_cont env.lenv cont_key cont_opt in
{ env with lenv }
let restore_conts_from env ~from conts : env =
let lenv = LEnv.restore_conts_from env.lenv ~from conts in
{ env with lenv }
let stash_and_do env conts f : env =
let parent_locals = env.lenv in
let env = drop_conts env conts in
let env = f env in
restore_conts_from env ~from:parent_locals conts
let union_cont_opt
~pos ~origin (constraints : DecoratedConstraintSet.t) cont_opt1 cont_opt2 =
match (cont_opt1, cont_opt2) with
| (None, opt)
| (opt, None) ->
(constraints, opt)
| (Some cont1, Some cont2) ->
let (constraints, cont) =
union_continuation ~pos ~origin constraints cont1 cont2
in
(constraints, Some cont)
(* Union a list of continuations *)
let union_conts ~pos ~origin (env : env) lenv cont_keys =
let union_two (constraints, cont_opt1) cont_key =
let cont_opt2 = Cont.Map.find_opt cont_key lenv in
union_cont_opt ~pos ~origin constraints cont_opt1 cont_opt2
in
let (constraints, cont_opt) =
List.fold cont_keys ~f:union_two ~init:(env.constraints, None)
in
let env = { env with constraints } in
(env, cont_opt)
(* Union a list of source continuations and store the result in a
* destination continuation. *)
let union_conts_and_update ~pos ~origin (env : env) ~from_conts ~to_cont =
let lenv = env.lenv in
let (env, unioned_cont) = union_conts ~pos ~origin env lenv from_conts in
replace_cont env to_cont unioned_cont
let update_next_from_conts ~pos ~origin (env : env) from_conts =
union_conts_and_update ~pos ~origin env ~from_conts ~to_cont:Cont.Next
let save_and_merge_next_in_cont ~pos ~origin (env : env) to_cont =
let from_conts = [Cont.Next; to_cont] in
union_conts_and_update ~pos ~origin env ~from_conts ~to_cont
let move_and_merge_next_in_cont ~pos ~origin (env : env) cont_key =
let env = save_and_merge_next_in_cont ~pos ~origin env cont_key in
drop_cont env Cont.Next
let loop_continuation
~pos ~origin cont_key ~env_before_iteration ~env_after_iteration =
let decorate constraint_ = { hack_pos = pos; origin; constraint_ } in
let cont_before_iteration_opt =
Cont.Map.find_opt cont_key env_before_iteration.lenv
in
let cont_after_iteration_opt =
Cont.Map.find_opt cont_key env_after_iteration.lenv
in
let new_constraints =
let combine constraints _key entity_before_opt entity_after_opt =
let new_constraints =
match (entity_before_opt, entity_after_opt) with
| (Some (Some entity_before), Some (Some entity_after)) ->
DecoratedConstraintSet.singleton
@@ decorate
@@ Subsets (entity_after, entity_before)
| _ -> DecoratedConstraintSet.empty
in
let constraints =
DecoratedConstraintSet.union new_constraints constraints
in
(constraints, None)
in
match (cont_before_iteration_opt, cont_after_iteration_opt) with
| (Some cont_before_iteration, Some cont_after_iteration) ->
fst
@@ LMap.merge_env
DecoratedConstraintSet.empty
cont_before_iteration
cont_after_iteration
~combine
| _ -> DecoratedConstraintSet.empty
in
let constraints =
DecoratedConstraintSet.union new_constraints env_after_iteration.constraints
in
{ env_after_iteration with constraints }
let refresh ~pos ~origin (env : env) : env =
let (redirection_constraints, lenv) = LEnv.refresh env.lenv in
let decorate constraint_ = { hack_pos = pos; origin; constraint_ } in
let redirection_constraints =
List.map ~f:decorate redirection_constraints
|> DecoratedConstraintSet.of_list
in
let constraints =
DecoratedConstraintSet.union redirection_constraints env.constraints
in
{ env with lenv; constraints } |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_env.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Shape_analysis_types
module LMap = Local_id.Map
(** Generates a fresh variable entity *)
val fresh_var : unit -> entity_
(** Initialise shape analysis environment *)
val init :
mode ->
Tast_env.env ->
DecoratedConstraintSet.t ->
DecoratedInterConstraintSet.t ->
entity LMap.t ->
return:entity ->
env
(** Record a shape analysis intra-procedural constraint *)
val add_constraint : env -> constraint_ decorated -> env
(** Record a shape analysis inter-procedural constraint *)
val add_inter_constraint : env -> inter_constraint_ decorated -> env
(** Stashes a critical error to report later in aggregate *)
val add_error : env -> Error.t -> env
(** Ignore all existing constraints. The intention of this is to prevent
unnecessary duplication of constraints when multiple environments need to
be merged. *)
val reset_constraints : env -> env
(** Find an entity that a local variable points to *)
val get_local : env -> Local_id.t -> entity
(** Set an entity to a local variable *)
val set_local : env -> Local_id.t -> entity -> env
(** The first environment is the parent environment. The others are combined.
This is useful in branching code. *)
val union : pos:Pos.t -> origin:int -> env -> env -> env -> env
val stash_and_do : env -> Typing_continuations.t list -> (env -> env) -> env
val update_next_from_conts :
pos:Pos.t -> origin:int -> env -> Typing_continuations.t list -> env
val drop_cont : env -> Typing_continuations.t -> env
val restore_conts_from : env -> from:lenv -> Typing_continuations.t list -> env
val move_and_merge_next_in_cont :
pos:Pos.t -> origin:int -> env -> Typing_continuations.t -> env
val save_and_merge_next_in_cont :
pos:Pos.t -> origin:int -> env -> Typing_continuations.t -> env
val loop_continuation :
pos:Pos.t ->
origin:int ->
Typing_continuations.t ->
env_before_iteration:env ->
env_after_iteration:env ->
env
val refresh : pos:Pos.t -> origin:int -> env -> env |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_files.ml | open Hh_prelude
module T = Shape_analysis_types
type state = {
entry_out_channel: Out_channel.t;
worker: int;
}
let constraints_file_extension = "dmpc"
let state_opt = ref None
let constraints_filename ~constraints_dir ~worker =
Filename.concat
constraints_dir
(Format.sprintf "%d.%s" worker constraints_file_extension)
let get_state ~constraints_dir ~worker =
let create () =
let s =
let entry_out_channel =
Out_channel.create ~append:true
@@ constraints_filename ~constraints_dir ~worker
in
{ entry_out_channel; worker }
in
state_opt := Some s;
s
in
match !state_opt with
| Some s when s.worker = worker -> s
| Some s ->
Out_channel.close s.entry_out_channel;
create ()
| None -> create ()
let persist channel (entry : T.ConstraintEntry.t) =
(* note: We don't marshal closures because we want to be able to unmarshal from a distinct executable. *)
let flags = [] in
try Marshal.to_channel channel entry flags with
| Invalid_argument msg ->
let msg =
Format.sprintf
"%s. This error is likely caused by trying to Marshal a closure, which is likely caused by forgetting to strip `Typing_reason.t_`s from `locl_ty`s"
msg
in
raise @@ Invalid_argument msg
let write_constraints ~constraints_dir ~worker entry : unit =
let { entry_out_channel; _ } = get_state ~constraints_dir ~worker in
persist entry_out_channel entry
let next_entry channel buf : T.ConstraintEntry.t option =
Buffer.reset buf;
match In_channel.input_buffer channel buf ~len:Marshal.header_size with
| None -> None
| Some () ->
let data_size = Marshal.data_size (Buffer.contents_bytes buf) 0 in
Option.value_exn @@ In_channel.input_buffer channel buf ~len:data_size;
let entry : T.ConstraintEntry.t =
Marshal.from_bytes (Buffer.contents_bytes buf) 0
in
Buffer.reset buf;
Some entry
let read_entries_by_grain ~constraints_file ~same_grain :
T.ConstraintEntry.t list Sequence.t =
let channel = In_channel.create constraints_file in
let capacity_guess = Int.pow 2 19 (* based on logging Marshal.total_size *) in
let buf = Buffer.create capacity_guess in
let rec read (prev_entry_opt, entries) =
match next_entry channel buf with
| None ->
if List.is_empty entries then
None
else
Some (entries, (prev_entry_opt, []))
| Some entry ->
let is_same_grain =
match prev_entry_opt with
| None -> true
| Some prev -> same_grain prev entry
in
if is_same_grain then
read @@ (Some entry, entry :: entries)
else
Some (entries, (Some entry, [entry]))
in
Sequence.unfold ~init:(None, []) ~f:read
let read_entries_by_source_file ~constraints_file =
let same_grain entry1 entry2 =
T.ConstraintEntry.(
Relative_path.equal entry1.source_file entry2.source_file)
in
read_entries_by_grain ~constraints_file ~same_grain
let read_entries_by_callable ~constraints_file =
let same_grain _ _ = false in
read_entries_by_grain ~constraints_file ~same_grain
let flush () =
Option.iter !state_opt ~f:(fun { entry_out_channel; _ } ->
Out_channel.flush entry_out_channel) |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_files.mli | open Hh_prelude
module T = Shape_analysis_types
val constraints_file_extension : string
(** write constraints to `constraints_dir`. *)
val write_constraints :
constraints_dir:string -> worker:int -> T.ConstraintEntry.t -> unit
(* Flush the channel used to write constraints.
Only needed if you need to read constraints right away. *)
val flush : unit -> unit
(**
Ephemeral sequence where each step is a list of constraint entries that came
from the same source file.
*)
val read_entries_by_source_file :
constraints_file:Base.string -> T.ConstraintEntry.t list Sequence.t
(**
Ephemeral sequence where each step is a list of constraint entries that came
from the same callable
*)
val read_entries_by_callable :
constraints_file:Base.string -> T.ConstraintEntry.t list Sequence.t |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_hips.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module ST = Shape_analysis_types
module SS = Shape_analysis_solver
module HT = Hips_types
module PP = Shape_analysis_pretty_printer
let empty_typing_env_for_debugging =
Tast_env.tast_env_as_typing_env
(Tast_env.empty
@@ Provider_context.empty_for_debugging
~popt:ParserOptions.default
~tcopt:TypecheckerOptions.default
~deps_mode:(Typing_deps_mode.InMemoryMode None))
module Intra_shape :
HT.Intra
with type intra_entity = ST.entity_
and type intra_constraint = ST.constraint_
and type inter_constraint = ST.inter_constraint_
and type any_constraint = ST.any_constraint = struct
type intra_entity = ST.entity_
type intra_constraint = ST.constraint_
type inter_constraint = ST.inter_constraint_
type any_constraint = ST.any_constraint
let debug_any_constraint = function
| HT.Intra intra_constr ->
PP.show_constraint empty_typing_env_for_debugging intra_constr
| HT.Inter inter_constr ->
PP.show_inter_constraint empty_typing_env_for_debugging inter_constr
let is_same_entity = ST.equal_entity_
let embed_entity = SS.embed_entity
let max_iteration = 15
let compare_any_constraint = ST.compare_any_constraint
let equiv = SS.equiv
let widen = SS.widen
let substitute_inter_intra_forwards = SS.substitute_inter_intra_forwards
let substitute_inter_intra_backwards = SS.substitute_inter_intra_backwards
let deduce = SS.deduce
let subsets = SS.subsets
end |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_hips.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module HT = Hips_types
module ST = Shape_analysis_types
module Intra_shape :
HT.Intra
with type intra_entity = ST.entity_
and type intra_constraint = ST.constraint_
and type inter_constraint = ST.inter_constraint_
and type any_constraint = ST.any_constraint |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_logic.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module T = Typing_defs
open Shape_analysis_types
let singleton key sft_ty sft_optional =
T.TShapeMap.singleton key T.{ sft_ty; sft_optional }
let ( <> ) ~env sk1 sk2 =
let merge_shape_key_map _key ty_opt ty_opt' =
match (ty_opt, ty_opt') with
| (Some sft, Some sft') ->
let (_env, sft_ty) = Typing_union.union env sft.T.sft_ty sft'.T.sft_ty in
let sft_optional = sft.T.sft_optional && sft'.T.sft_optional in
Some T.{ sft_ty; sft_optional }
| (None, (Some _ as ty_opt))
| ((Some _ as ty_opt), None) ->
ty_opt
| (None, None) -> None
in
T.TShapeMap.merge merge_shape_key_map sk1 sk2 |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_logic.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module T = Typing_defs
open Shape_analysis_types
(** Create a singleton shape key, e.g., shape('a' => int) *)
val singleton : T.TShapeField.t -> T.locl_ty -> bool -> shape_keys
(** Merge shape keys disjunctively, e.g.,
shape('a' => int, 'b' => string)
<>
shape(?'a' => string, 'c' => mixed)
=
shape(?'a' => arraykey, 'b' => string, 'c' => mixed)
*)
val ( <> ) : env:Typing_env_types.env -> shape_keys -> shape_keys -> shape_keys |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_options.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Shape_analysis_types
let parse_mode str =
let parse_command ~constraints_dir = function
| "dump" -> Some DumpConstraints
| "dump-marshalled" -> begin
match constraints_dir with
| Some constraints_dir ->
Some (DumpMarshalledConstraints { constraints_dir })
| None ->
failwith
"expected 'dump-marshalled:$MODE:$CONSTRAINTS_DIR' but constraints_dir was not privided"
end
| "dump-derived" -> Some DumpDerivedConstraints
| "simplify" -> Some SimplifyConstraints
| "codemod" -> Some Codemod
| "solve" -> Some SolveConstraints
| "close" -> Some CloseConstraints
| _ -> None
in
let parse_mode = function
| "local" -> Some Local
| "global" -> Some Global
| _ -> None
in
let components = String.split str ~on:':' in
let open Option.Monad_infix in
match components with
| [command; mode; constraints_dir] ->
parse_command command ~constraints_dir:(Some constraints_dir)
>>= fun command ->
parse_mode mode >>= fun mode -> Some (command, mode)
| [command; mode] ->
parse_command command ~constraints_dir:None >>= fun command ->
parse_mode mode >>= fun mode -> Some (command, mode)
| _ -> None
let mk ~command ~mode ~verbosity = { command; mode; verbosity } |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_options.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Shape_analysis_types
val parse_mode : string -> (command * mode) option
val mk : command:command -> mode:mode -> verbosity:int -> options |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_pretty_printer.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Shape_analysis_types
module HT = Hips_types
type 'constraint_ show_constraint_ =
Typing_env_types.env -> 'constraint_ -> string
let mk_shape field_map =
Typing_make_type.closed_shape Typing_reason.Rnone field_map
let show_entity = function
| Literal pos -> Format.asprintf "%a" Pos.pp pos
| Variable var -> Format.sprintf "?%d" var
| Inter ent -> HT.show_entity ent
let show_ty env = Typing_print.full env
let show_variety = function
| Has -> "!"
| Needs -> "?"
let show_constraint env =
let show_ty = show_ty env in
function
| Marks (kind, pos) ->
Format.asprintf "%s at %a" (show_marker_kind kind) Pos.pp pos
| Static_key (variety, certainty, entity, key, ty) ->
let sft_optional =
match certainty with
| Maybe -> true
| Definite -> false
in
let field_map = T.TShapeMap.singleton key T.{ sft_ty = ty; sft_optional } in
let shape = mk_shape field_map in
Format.asprintf
"%s SK %s : %s"
(show_variety variety)
(show_entity entity)
(show_ty shape)
| Has_dynamic_key entity -> "DK " ^ show_entity entity ^ " : dyn"
| Subsets (sub, sup) -> show_entity sub ^ " ⊆ " ^ show_entity sup
let show_inter_constraint _ = function
| HT.ArgLike (((_, f_id), arg_idx), ent) ->
Format.asprintf
"ArgLike(%s, %s, %s)"
f_id
(HT.show_param_like_index arg_idx)
(show_entity ent)
| HT.Constant const ->
Format.asprintf "Constant %s" (HT.show_const_entity const)
| HT.ConstantInitial inst ->
Format.asprintf "Constant initialization at %s" (show_entity inst)
| HT.ConstantIdentifier ident ->
Format.asprintf
"ConstantIdentifier %s"
(HT.show_constant_identifier_entity ident)
| HT.ParamLike param ->
Format.asprintf "ParamLike %s" (HT.show_param_like_entity param)
| HT.ClassExtends ident ->
Format.asprintf "Extends %s " (HT.show_class_identifier_entity ident)
let show_decorated_constraint_general
~verbosity
env
~show_constr
({ hack_pos; origin; constraint_ } : 'constraint_ decorated) =
let line = Pos.line hack_pos in
let constraint_ = show_constr env constraint_ in
if verbosity > 0 then
Format.asprintf "%4d: %4d: %s" line origin constraint_
else
Format.asprintf "%4d: %s" line constraint_
let show_decorated_constraint =
show_decorated_constraint_general ~show_constr:show_constraint
let show_decorated_inter_constraint =
show_decorated_constraint_general ~show_constr:show_inter_constraint
let show_shape_result env = function
| Shape_like_dict (pos, kind, keys_and_types) ->
let show_ty = show_ty env in
let shape = mk_shape keys_and_types in
Format.asprintf
"%s [%s]:\n %s"
(Format.asprintf "%a" Pos.pp pos)
(show_marker_kind kind)
(show_ty shape)
| Dynamically_accessed_dict entity ->
Format.asprintf "%s : dynamic" (show_entity entity) |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_pretty_printer.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Shape_analysis_types
type 'constraint_ show_constraint_ =
Typing_env_types.env -> 'constraint_ -> string
val show_constraint : constraint_ show_constraint_
val show_inter_constraint : inter_constraint_ show_constraint_
val show_decorated_constraint_general :
verbosity:int ->
Typing_env_types.env ->
show_constr:'constraint_ show_constraint_ ->
'constraint_ decorated ->
string
val show_decorated_constraint :
verbosity:int -> Typing_env_types.env -> constraint_ decorated -> string
val show_decorated_inter_constraint :
verbosity:int -> Typing_env_types.env -> inter_constraint_ decorated -> string
val show_shape_result : Typing_env_types.env -> shape_result -> string |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_solver.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Shape_analysis_types
module T = Typing_defs
module Logic = Shape_analysis_logic
module HT = Hips_types
module StaticAccess = struct
type ty = entity_ * T.TShapeField.t * Typing_defs.locl_ty
let compare =
Tuple.T3.compare
~cmp1:compare_entity_
~cmp2:T.TShapeField.compare
~cmp3:Typing_defs.compare_locl_ty
module Set = struct
module S = Caml.Set.Make (struct
type t = ty
let compare = compare
end)
include S
include CommonSet (S)
end
end
type constraints = {
markers: (marker_kind * Pos.t) list;
definitely_has_static_accesses: StaticAccess.Set.t;
maybe_has_static_accesses: StaticAccess.Set.t;
definitely_needs_static_accesses: StaticAccess.Set.t;
maybe_needs_static_accesses: StaticAccess.Set.t;
dynamic_accesses: EntitySet.t;
subsets: (entity_ * entity_) list;
}
let constraints_init =
{
markers = [];
definitely_has_static_accesses = StaticAccess.Set.empty;
maybe_has_static_accesses = StaticAccess.Set.empty;
definitely_needs_static_accesses = StaticAccess.Set.empty;
maybe_needs_static_accesses = StaticAccess.Set.empty;
dynamic_accesses = EntitySet.empty;
subsets = [];
}
let disassemble constraints =
let partition_constraint constraints = function
| Marks (kind, entity) ->
{ constraints with markers = (kind, entity) :: constraints.markers }
| Static_key (Has, Definite, entity, key, ty) ->
{
constraints with
definitely_has_static_accesses =
StaticAccess.Set.add
(entity, key, ty)
constraints.definitely_has_static_accesses;
}
| Static_key (Has, Maybe, entity, key, ty) ->
{
constraints with
maybe_has_static_accesses =
StaticAccess.Set.add
(entity, key, ty)
constraints.maybe_has_static_accesses;
}
| Static_key (Needs, Definite, entity, key, ty) ->
{
constraints with
definitely_needs_static_accesses =
StaticAccess.Set.add
(entity, key, ty)
constraints.definitely_needs_static_accesses;
}
| Static_key (Needs, Maybe, entity, key, ty) ->
{
constraints with
maybe_needs_static_accesses =
StaticAccess.Set.add
(entity, key, ty)
constraints.maybe_needs_static_accesses;
}
| Has_dynamic_key entity ->
{
constraints with
dynamic_accesses = EntitySet.add entity constraints.dynamic_accesses;
}
| Subsets (sub, sup) ->
{ constraints with subsets = (sub, sup) :: constraints.subsets }
in
List.fold ~init:constraints_init ~f:partition_constraint constraints
let assemble
{
markers;
definitely_has_static_accesses;
maybe_has_static_accesses;
definitely_needs_static_accesses;
maybe_needs_static_accesses;
dynamic_accesses;
subsets;
} =
List.map ~f:(fun (kind, entity) -> Marks (kind, entity)) markers
@ List.map
~f:(fun (entity, key, ty) -> Static_key (Has, Definite, entity, key, ty))
(StaticAccess.Set.elements definitely_has_static_accesses)
@ List.map
~f:(fun (entity, key, ty) -> Static_key (Has, Maybe, entity, key, ty))
(StaticAccess.Set.elements maybe_has_static_accesses)
@ List.map
~f:(fun (entity, key, ty) ->
Static_key (Needs, Definite, entity, key, ty))
(StaticAccess.Set.elements definitely_needs_static_accesses)
@ List.map
~f:(fun (entity, key, ty) -> Static_key (Needs, Maybe, entity, key, ty))
(StaticAccess.Set.elements maybe_needs_static_accesses)
@ List.map
~f:(fun entity -> Has_dynamic_key entity)
(EntitySet.elements dynamic_accesses)
@ List.map ~f:(fun (sub, sup) -> Subsets (sub, sup)) subsets
type adjacencies = {
backwards: EntitySet.t;
forwards: EntitySet.t;
}
let mk_adjacency_table subsets =
let adjacency_table = Hashtbl.Poly.create () in
let add_forwards (e1, e2) =
Hashtbl.Poly.change adjacency_table e1 ~f:(function
| None ->
Some
{ forwards = EntitySet.singleton e2; backwards = EntitySet.empty }
| Some adjacency ->
Some { adjacency with forwards = EntitySet.add e2 adjacency.forwards })
in
let add_backwards (e1, e2) =
Hashtbl.Poly.change adjacency_table e2 ~f:(function
| None ->
Some
{ backwards = EntitySet.singleton e1; forwards = EntitySet.empty }
| Some adjacency ->
Some
{ adjacency with backwards = EntitySet.add e1 adjacency.backwards })
in
let add edge =
add_forwards edge;
add_backwards edge
in
List.iter ~f:add subsets;
adjacency_table
(*
Propagates `Static_key` constraints conjunctively through the dataflow graph.
The propagation only happens if all incident edges have the relevant static
key, e.g.,
('a', int) -------\ ('a', int) -------\
('b', string) \ ('b', string) \
\ \
('a', float) -------- . ====> ('a', float) -------- ('a', int)
/ / ('a', float)
/ /
('a', int) -------/ ('a', int) -------/
We use universal quantification over incident edges which in general is not
monotonic (hence not necessarily convergent in a fixpoint setting), however,
the newly introduced edges in a growing adjacency map due to fixpoint
computation cannot change the incident edges of any vertices. So, there is no
way of invalidating a definite derived has static key constraint once it is
established.
TODO(T129093160): The monotonicity argument above is not strictly true due to
parameters with default values.
The implementation is semi-naïve, i.e., at each iteration it only considers
what newly generated facts can affect.
static_key(has, definite, E, K, Ty) :- static_key_base(has, definite, E, K, Ty)
static_key(has, definite, E, K, Ty) :-
common_definite_predecessor_key(E, K),
subsets(E', E),
static_key(has, definite, E', K, Ty).
common_definite_predecessor_key(E, K) :-
forall subsets(E', E).
static_key(has, definite, E', K, _).
*)
let derive_definitely_has_static_accesses adjacency_table static_accesses =
let open StaticAccess.Set in
(* All successors to the entities in recently generated facts can potentially
obtain a new static key constraint. *)
let find_candidates delta =
let accum (e, _, _) =
let nexts =
Hashtbl.Poly.find adjacency_table e
|> Option.value_map ~default:EntitySet.empty ~f:(fun a -> a.forwards)
in
EntitySet.union nexts
in
fold accum delta EntitySet.empty
in
let indexed_acc = Hashtbl.Poly.create () in
let rec close_upwards ~delta ~acc =
if is_empty delta then
acc
else
let update_indexed_acc (e, k, ty) =
Hashtbl.Poly.update indexed_acc e ~f:(function
| Some keys_tys -> (k, ty) :: keys_tys
| None -> [(k, ty)])
in
StaticAccess.Set.iter update_indexed_acc delta;
(* The following collects the intersection of all keys that occur in
predecessors of an entity along with possible types of those keys. *)
let common_predecessor_keys e =
let predecessors =
Hashtbl.find adjacency_table e
|> Option.value_map ~default:EntitySet.empty ~f:(fun a -> a.backwards)
in
let collect_common_keys e common_keys =
let keys_tys =
Option.value ~default:[] @@ Hashtbl.find indexed_acc e
in
let collect_keys m (k, ty) =
let add_ty = function
| None -> Some [ty]
| Some tys -> Some (ty :: tys)
in
T.TShapeMap.update k add_ty m
in
let keys =
List.fold ~f:collect_keys keys_tys ~init:T.TShapeMap.empty
in
(* Here `None` represents the universe set *)
match common_keys with
| None -> Some keys
| Some keys' ->
let combine _ tys_opt tys'_opt =
match (tys_opt, tys'_opt) with
| (Some tys, Some tys') -> Some (tys @ tys')
| _ -> None
in
Some (T.TShapeMap.merge combine keys keys')
in
EntitySet.fold collect_common_keys predecessors None
|> Option.value ~default:T.TShapeMap.empty
in
(* Propagate all common keys forward *)
let propagate e acc =
let common_predecessor_keys = common_predecessor_keys e in
T.TShapeMap.fold
(fun k tys ->
union (List.map ~f:(fun ty -> (e, k, ty)) tys |> of_list))
common_predecessor_keys
acc
in
let candidates = find_candidates delta in
let delta = EntitySet.fold propagate candidates StaticAccess.Set.empty in
let delta = diff delta acc in
let acc = union delta acc in
close_upwards ~delta ~acc
in
close_upwards ~delta:static_accesses ~acc:static_accesses
(*
Propagates `Static_key` constraints forward (or backward depending on
variety) through the dataflow graph.
The implementation is semi-naïve, i.e., at each iteration it only considers
newly generated facts.
static_key(has, maybe, E, Key, Ty), :- static_key_base(has, _, E, Key, Ty).
static_key(has, maybe, F, Key, Ty) :- static_key(has, maybe, E, Key, Ty), subsets(E,F).
The following is the dual of above with graph edges flipped during
propagation. Here, unlike the forward case, we propagate both the
`Definitive` and `Maybe` variants.
static_key(needs, Certainty, E, Key, Ty) :- static_key_base(needs, Certainty, E, Key, Ty).
static_key(needs, Certainty, F, Key, Ty) :- static_key(needs, Certainty, E, Key, Ty), subsets(F,E).
*)
let derive_disjunctive_static_accesses adjacency_table direction static_accesses
=
let open StaticAccess.Set in
let rec close_upwards ~delta ~acc =
if is_empty delta then
acc
else
let propagate (e, k, ty) =
Hashtbl.find adjacency_table e
|> Option.value_map ~default:empty ~f:(fun adjacency ->
let adjacency =
match direction with
| `Forwards -> adjacency.forwards
| `Backwards -> adjacency.backwards
| `ForwardsAndBackwards ->
EntitySet.union adjacency.forwards adjacency.backwards
in
EntitySet.fold (fun e -> add (e, k, ty)) adjacency empty)
in
let delta = unions_map ~f:propagate delta in
let delta = diff delta acc in
let acc = union delta acc in
close_upwards ~delta ~acc
in
close_upwards ~delta:static_accesses ~acc:static_accesses
(*
Close dynamic key access in both directions to later invalidate all results
that touch it.
has_dynamic_key(E) :- has_dynamic_key_base(E).
has_dynamic_key(F) :- has_dynamic_key_ud(E), (subsets(E,F); subsets(F,E)).
*)
let derive_dynamic_accesses adjacency_table dynamic_accesses =
let open EntitySet in
let rec close_upwards_and_downwards ~delta ~acc =
if is_empty delta then
acc
else
let propagate e =
match Hashtbl.find adjacency_table e with
| Some adjacency -> union adjacency.forwards adjacency.backwards
| None -> empty
in
let delta = unions_map ~f:propagate delta in
let delta = diff delta acc in
let acc = union delta acc in
close_upwards_and_downwards ~delta ~acc
in
close_upwards_and_downwards ~delta:dynamic_accesses ~acc:dynamic_accesses
(* The following program roughly summarises the solver in Datalog.
Variables with single letters E and F and their variants with primes all
range over entities.
Comma (,) is used for conjunction semi-colon (;) is used for disjunction.
Comma has higher precedence (binds tighter) than semi-colon.
p :- q1, ..., qn
means if q1 to qn holds, so does p.
*)
let deduce (constraints : constraint_ list) : constraint_ list =
let {
markers;
definitely_has_static_accesses;
maybe_has_static_accesses;
definitely_needs_static_accesses;
maybe_needs_static_accesses;
dynamic_accesses;
subsets;
} =
disassemble constraints
in
let adjacency_table = mk_adjacency_table subsets in
(* Close upwards *)
let maybe_has_static_accesses =
derive_disjunctive_static_accesses
adjacency_table
`Forwards
(StaticAccess.Set.union
maybe_has_static_accesses
definitely_has_static_accesses)
in
(* Close upwards *)
let definitely_has_static_accesses =
derive_definitely_has_static_accesses
adjacency_table
definitely_has_static_accesses
in
(* Close downwards *)
let definitely_needs_static_accesses =
derive_disjunctive_static_accesses
adjacency_table
`Backwards (* TODO(T136668856): consider `ForwardsAndBackwards here *)
definitely_needs_static_accesses
in
(* Close downwards *)
let maybe_needs_static_accesses =
derive_disjunctive_static_accesses
adjacency_table
`ForwardsAndBackwards
maybe_needs_static_accesses
in
(* Close upwards and downwards *)
let dynamic_accesses =
derive_dynamic_accesses adjacency_table dynamic_accesses
in
assemble
{
markers;
definitely_has_static_accesses;
maybe_has_static_accesses;
definitely_needs_static_accesses;
maybe_needs_static_accesses;
dynamic_accesses;
subsets;
}
(*
static_shape_result(E) :- marks(E), not has_dynamic_key_ud(E).
static_shape_result_key(allocation | return | debug,E,Key,Ty) :-
static_key(has,_,E,Key,Ty).
static_shape_result_key(parameter,E,Key,Ty) :-
static_key(needs,_,E,Key,Ty).
static_shape_result_key_optional(allocation | return | debug,E,Key) :-
has_static_key(has, _, E, Key, _),
not has_static_key(has, definite, E, Key, _).
static_shape_result_key_optional(parameter,E,Key) :-
has_static_key(needs, maybe, E, Key, _).
dynamic_shape_result(E) :- marks(E), has_dynamic_key(E).
*)
let produce_results
(env : Typing_env_types.env) (constraints : constraint_ list) :
shape_result list =
let {
markers;
definitely_has_static_accesses;
maybe_has_static_accesses;
definitely_needs_static_accesses;
maybe_needs_static_accesses;
dynamic_accesses;
_;
} =
disassemble constraints
in
(* Start collecting shape results starting with empty shapes of candidates *)
let static_shape_results : (marker_kind * shape_keys) Pos.Map.t =
markers
|> List.fold
~f:(fun map (kind, pos) ->
Pos.Map.add pos (kind, T.TShapeMap.empty) map)
~init:Pos.Map.empty
in
(* Invalidate candidates that are observed to experience dynamic access *)
let dynamic_poss =
let add entity acc =
match entity with
| Literal pos
| Inter (HT.ParamLike ((pos, _), _)) ->
Pos.Set.add pos acc
| Variable _
| Inter (HT.Constant _)
| Inter (HT.ConstantIdentifier _) ->
acc
in
EntitySet.fold add dynamic_accesses Pos.Set.empty
in
let static_shape_results : (marker_kind * shape_keys) Pos.Map.t =
static_shape_results
|> Pos.Map.filter (fun pos _ -> not @@ Pos.Set.mem pos dynamic_poss)
in
let (forward_static_shape_results, backward_static_shape_results) =
Pos.Map.partition
(fun _ (kind, _) ->
match kind with
| Parameter -> false
| Debug
| Return
| Allocation
| Constant ->
true)
static_shape_results
in
(* Add known keys *)
let add_known_keys ~is_optional static_accesses static_shape_results :
(marker_kind * shape_keys) Pos.Map.t =
let update_entity _ key ty = function
| None -> None
| Some (kind, shape_keys') ->
Some (kind, Logic.(singleton key ty is_optional <> shape_keys') ~env)
in
let add_entity (entity, key, ty) pos_map =
match entity with
| Literal pos
| Inter (HT.ParamLike ((pos, _), _))
| Inter (HT.Constant (pos, _))
| Inter (HT.ConstantIdentifier { HT.ident_pos = pos; _ }) ->
Pos.Map.update pos (update_entity entity key ty) pos_map
| Variable _ -> pos_map
in
StaticAccess.Set.fold add_entity static_accesses static_shape_results
in
let forward_static_shape_results =
let add_known_keys = add_known_keys in
forward_static_shape_results
|> add_known_keys maybe_has_static_accesses ~is_optional:true
|> add_known_keys maybe_needs_static_accesses ~is_optional:true
|> add_known_keys definitely_has_static_accesses ~is_optional:false
(* TODO(T136668856): consider add_known_keys definitely_needs_static_accesses here *)
in
let backward_static_shape_results =
let add_known_keys = add_known_keys in
backward_static_shape_results
|> add_known_keys maybe_needs_static_accesses ~is_optional:true
|> add_known_keys definitely_needs_static_accesses ~is_optional:false
in
let static_shape_results =
Pos.Map.union forward_static_shape_results backward_static_shape_results
in
(* Convert to individual statically accessed dict results *)
let static_shape_results : shape_result list =
static_shape_results
|> Pos.Map.bindings
|> List.map ~f:(fun (pos, (marker_kind, keys_and_types)) ->
Shape_like_dict (pos, marker_kind, keys_and_types))
in
let dynamic_shape_results =
dynamic_accesses
|> EntitySet.filter (function
| Variable _ -> false
| _ -> true)
|> EntitySet.elements
|> List.map ~f:(fun entity_ -> Dynamically_accessed_dict entity_)
in
static_shape_results @ dynamic_shape_results
let embed_entity (ent : HT.entity) : entity_ = Inter ent
let substitute_inter_intra
~forwards (inter_constr : inter_constraint_) (intra_constr : constraint_) :
constraint_ option =
let replace intra_ent_2 =
match inter_constr with
| HT.ArgLike (param_ent, intra_ent_1)
when forwards && equal_entity_ intra_ent_1 intra_ent_2 ->
Some (embed_entity (HT.ParamLike param_ent))
| HT.ArgLike (param_ent, intra_ent_1)
when (not forwards)
&& equal_entity_ (Inter (HT.ParamLike param_ent)) intra_ent_2 ->
Some intra_ent_1
| HT.ConstantInitial ent when equal_entity_ ent intra_ent_2 -> Some ent
| HT.ConstantIdentifier ident_ent ->
let ent = embed_entity (HT.ConstantIdentifier ident_ent) in
if equal_entity_ ent intra_ent_2 then
Some ent
else
None
| HT.Constant const_ent ->
let ent = embed_entity (HT.Constant const_ent) in
if equal_entity_ ent intra_ent_2 then
Some ent
else
None
| _ -> None
in
match intra_constr with
| Marks _
| Subsets (_, _) ->
None
| Static_key (variety, certainty, intra_ent_2, key, ty) ->
let (variety, certainty) =
if forwards then
(Needs, Maybe)
else
(variety, certainty)
in
Option.map
~f:(fun x -> Static_key (variety, certainty, x, key, ty))
(replace intra_ent_2)
| Has_dynamic_key intra_ent_2 ->
Option.map ~f:(fun x -> Has_dynamic_key x) (replace intra_ent_2)
let substitute_inter_intra_backwards = substitute_inter_intra ~forwards:false
let substitute_inter_intra_forwards inter_constraint any_constraint =
substitute_inter_intra ~forwards:true inter_constraint any_constraint
let equiv
(any_constr_list_1 : any_constraint list)
(any_constr_list_2 : any_constraint list) : bool =
let only_intra_constr any_constr =
let only_inter_ent (intra_constr : constraint_) :
entity_ -> constraint_ option = function
| Inter _ -> Some intra_constr
| _ -> None
in
match any_constr with
| HT.Intra intra_constr ->
(match intra_constr with
| Marks _ -> Some intra_constr
| Static_key (_, _, ent, _, _) -> only_inter_ent intra_constr ent
| Has_dynamic_key ent -> only_inter_ent intra_constr ent
| _ -> None)
| HT.Inter _ -> None
in
ConstraintSet.equal
(ConstraintSet.of_list
(List.filter_map ~f:only_intra_constr any_constr_list_1))
(ConstraintSet.of_list
(List.filter_map ~f:only_intra_constr any_constr_list_2))
(* TODO(T140419180): could infer more shapes by not assuming
`Has_dynamic_key` when an entity flows into a `mixed` param. *)
let widen = List.map ~f:(fun ent -> Has_dynamic_key ent)
let subsets (ent1 : entity_) (ent2 : entity_) : constraint_ =
Subsets (ent1, ent2) |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_solver.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Shape_analysis_types
module HT = Hips_types
val deduce : constraint_ list -> constraint_ list
val produce_results :
Typing_env_types.env -> constraint_ list -> shape_result list
val embed_entity : HT.entity -> entity_
(** Backwards substitutes the intra-procedural constraint in the second argument
with respect to the inter-procedural constraint in the first argument *)
val substitute_inter_intra_backwards :
inter_constraint_ -> constraint_ -> constraint_ option
(** Forwards substitutes the intra-procedural constraint in the second argument
with respect to the inter-procedural constraint in the first argument *)
val substitute_inter_intra_forwards :
inter_constraint_ -> constraint_ -> constraint_ option
val equiv : any_constraint list -> any_constraint list -> bool
val widen : entity_ list -> constraint_ list
val subsets : entity_ -> entity_ -> constraint_ |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_types.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module A = Ast_defs
module T = Typing_defs
module LMap = Local_id.Map
module KMap = Typing_continuations.Map
module HT = Hips_types
module Error = struct
type t = string [@@deriving show]
let mk str = str
end
exception Shape_analysis_exn of Error.t
module CommonSet (S : Caml.Set.S) = struct
let unions_map ~f set =
S.fold (fun elt acc -> S.union (f elt) acc) set S.empty
end
type potential_targets = {
expressions_to_modify: Pos.t list;
hints_to_modify: Pos.t list;
}
type command =
| DumpConstraints
| DumpMarshalledConstraints of { constraints_dir: string }
| DumpDerivedConstraints
| SimplifyConstraints
| Codemod
| SolveConstraints
| CloseConstraints [@deriving eq]
(* TODO(T138659530): remove when using global everywhere is unblocked*)
type mode =
| Local
| Global
type options = {
command: command;
mode: mode;
verbosity: int;
}
type entity_ =
| Literal of Pos.t
| Variable of int
| Inter of HT.entity
[@@deriving eq, ord]
type entity = entity_ option
type shape_keys = T.locl_phase T.shape_field_type T.TShapeMap.t
type marker_kind =
| Allocation
| Parameter
| Return
| Debug
| Constant
[@@deriving ord, show { with_path = false }]
module Codemod = struct
type kind =
| Allocation
| Hint
[@@deriving show { with_path = false }]
end
type certainty =
| Definite
| Maybe
[@@deriving ord, show { with_path = false }]
type variety =
| Has
| Needs
[@@deriving ord, show { with_path = false }]
type constraint_ =
| Marks of marker_kind * Pos.t
| Static_key of variety * certainty * entity_ * T.TShapeField.t * T.locl_ty
| Has_dynamic_key of entity_
| Subsets of entity_ * entity_
[@@deriving ord]
type inter_constraint_ = entity_ HT.inter_constraint_ [@@deriving ord]
type shape_result =
| Shape_like_dict of Pos.t * marker_kind * shape_keys
| Dynamically_accessed_dict of entity_
type lenv = entity LMap.t KMap.t
type 'constraint_ decorated = {
hack_pos: Pos.t;
origin: int;
constraint_: 'constraint_;
}
[@@deriving ord]
module DecoratedConstraintSet = Caml.Set.Make (struct
type t = constraint_ decorated
let compare = compare_decorated compare_constraint_
end)
module DecoratedInterConstraintSet = Caml.Set.Make (struct
type t = inter_constraint_ decorated
let compare = compare_decorated (HT.compare_inter_constraint_ compare_entity_)
end)
type decorated_constraints =
DecoratedConstraintSet.t * DecoratedInterConstraintSet.t
type env = {
constraints: DecoratedConstraintSet.t;
inter_constraints: DecoratedInterConstraintSet.t;
lenv: lenv;
return: entity;
tast_env: Tast_env.t;
errors: Error.t list;
mode: mode;
}
module PointsToSet = Caml.Set.Make (struct
type t = entity_ * entity_
let compare (a, b) (c, d) =
match compare_entity_ a c with
| 0 -> compare_entity_ b d
| x -> x
end)
module EntityMap = Caml.Map.Make (struct
type t = entity_
let compare = compare_entity_
end)
module EntitySet = struct
module S = Caml.Set.Make (struct
type t = entity_
let compare = compare_entity_
end)
include S
include CommonSet (S)
end
module ConstraintSet = Caml.Set.Make (struct
type t = constraint_
let compare = compare_constraint_
end)
type analysis_result = {
results: shape_result list;
error_count: int;
}
type log = {
location: string;
result: (analysis_result, Error.t) Either.t;
}
type any_constraint = (constraint_, inter_constraint_) HT.any_constraint_
[@@deriving ord]
module ConstraintEntry = struct
type t = {
source_file: Relative_path.t;
id: string;
constraints: any_constraint list;
error_count: int;
}
end
type solve_entries =
Typing_env_types.env -> ConstraintEntry.t list -> shape_result list SMap.t |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_types.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module A = Ast_defs
module T = Typing_defs
module LMap = Local_id.Map
module KMap = Typing_continuations.Map
module HT = Hips_types
(** Useful extension to sets *)
module CommonSet (S : Caml.Set.S) : sig
val unions_map : f:(S.elt -> S.t) -> S.t -> S.t
end
module Error : sig
type t [@@deriving show]
val mk : string -> t
end
(** A generic exception for all shape analysis specific failures *)
exception Shape_analysis_exn of Error.t
(** Container to collect potential dicts that can be addressed with the shape
analysis. *)
type potential_targets = {
expressions_to_modify: Pos.t list;
hints_to_modify: Pos.t list;
}
type command =
| DumpConstraints (** Dump constraints generated by analysing the program *)
| DumpMarshalledConstraints of { constraints_dir: string }
(** Dump constraints to disk *)
| DumpDerivedConstraints
(** Dump constraints deduced by the solver: used for testing and for development of shape_analysis_solve_marshalled. *)
| SimplifyConstraints
(** Partially solve key constraints within functions and methods and
report back summaries about which `dict`s might be `shape`s and which
functions/methods they depend on. *)
| Codemod
(** Same as simplify constraints, but outputs JSON that represents
instructions to codemod source code. *)
| SolveConstraints
(** Inter-procedurally solve the key constraints and report back `dict`s
that can be `shape`s along with the `shape` keys.
N.b. only makes sense to use it in `Global` mode. *)
| CloseConstraints
(** Close and dump constraints generated by analysing the program *)
type mode =
| Local
(** Aggressively invalidate results that escape their local definition *)
| Global (** Do not invalidate results that escape their definition *)
type options = {
command: command;
(** See the `command` type documentation for various functionalities. *)
mode: mode; (** Controls result invalidation based on scope. *)
verbosity: int;
(** Controls how much debug information to output. 0 means no extra debug
information. Particularly, tests use 0. *)
}
type entity_ =
| Literal of Pos.t
| Variable of int
| Inter of HT.entity
[@@deriving eq, ord]
type entity = entity_ option
type shape_keys = T.locl_phase T.shape_field_type T.TShapeMap.t
type marker_kind =
| Allocation (** A dict allocation such as `dict[]` or `dict['a' => 42]` *)
| Parameter
(** A dict parameter to a function or method such as `function
f(dict<string,int> $d): void {...}` *)
| Return
(** A dict return of a function or method such as `function
f(): dict<string,int> {...}` *)
| Debug
(** A dict argument to a function or method such as `$d = dict[]; f($d)`
*)
| Constant (** A dict constant such as `const dict<string, mixed> DICT` *)
[@@deriving ord, show]
module Codemod : sig
type kind =
| Allocation (** Codemod `dict[...]` syntax to `shape(...)` syntax. *)
| Hint
(** Codemod to change the `dict<key,ty>` hint to `shape(...)` hint. *)
[@@deriving show { with_path = false }]
end
(** When embedded in a constraint, it conveys the degree of certainty for that
constraint. This is useful in determining if a field is optional. *)
type certainty =
| Definite
| Maybe
[@@deriving show]
(** Indicates whether the constraint provides information or requires it. *)
type variety =
| Has
| Needs
[@@deriving show]
type constraint_ =
| Marks of marker_kind * Pos.t (** Marks a point of interest *)
| Static_key of variety * certainty * entity_ * T.TShapeMap.key * T.locl_ty
(** Records a static field that is available along with the Hack type of
that key *)
| Has_dynamic_key of entity_
(** Records that an entity is accessed with a dynamic key *)
| Subsets of entity_ * entity_
(** Records that the first keys of the first entity are all present in
the second. *)
(** Interprocedural constraint: currently only `Arg((f, 0), p)`, which models
a function call f(p, _, ...). *)
type inter_constraint_ = entity_ HT.inter_constraint_
type shape_result =
| Shape_like_dict of Pos.t * marker_kind * shape_keys
(** A dict that acts like a shape along with its keys, types the keys
point to, and the keys are optional. The marker kind distinguishes
for what we are reporting a result. *)
| Dynamically_accessed_dict of entity_
(** A dict that is accessed or used dynamically. This is important
in inter-procedural setting where a locally static dict calls a
function where the parameter is accessed dynamically. In that case,
the original result on static access should be invalidated. *)
(** Local variable environment. Its values are `entity`, i.e., `entity_
option`, so that we can avoid pattern matching in constraint extraction. *)
type lenv = entity LMap.t KMap.t
(** Dressing on top of constraints that are solely used to help debug constraints *)
type 'constraint_ decorated = {
hack_pos: Pos.t; (** Hack source code position that led to the constraint *)
origin: int;
(** The origin of the constraint from Shape_analysis_walker.ml *)
constraint_: 'constraint_; (** The constraint proper *)
}
module DecoratedConstraintSet : Caml.Set.S with type elt = constraint_ decorated
module DecoratedInterConstraintSet :
Caml.Set.S with type elt = inter_constraint_ decorated
(** Tuple of sets of decorated intra- and inter-procedural constraints *)
type decorated_constraints =
DecoratedConstraintSet.t * DecoratedInterConstraintSet.t
type env = {
constraints: DecoratedConstraintSet.t;
(** Append-only set of intra-procedural constraints *)
inter_constraints: DecoratedInterConstraintSet.t;
(** Append-only set of inter-procedural constraints *)
lenv: lenv; (** Local variable information *)
return: entity; (** Entity for the return of a callable *)
tast_env: Tast_env.env;
(** TAST env associated with the definition being analysed *)
errors: Error.t list;
(** An append only stash of errors. If this list is non-empty, the
analysis was performed in a best effort fashion. *)
mode: mode;
(** Controls the invalidation of results that escape their scope *)
}
module PointsToSet : Caml.Set.S with type elt = entity_ * entity_
module EntityMap : Caml.Map.S with type key = entity_
module EntitySet : sig
include Caml.Set.S with type elt = entity_
val unions_map : f:(elt -> t) -> t -> t
end
module ConstraintSet : Caml.Set.S with type elt = constraint_
(** A list of shape results paired with how many analysis errors were
encountered while obtaining that result. The error count is intended as a
rough measure of confidence in the result. The results within the same list
are connected and shouldn't be applied independently. *)
type analysis_result = {
results: shape_result list;
error_count: int;
}
(** Either an analysis result or an error. In either case, the event is paired
with an identifier for its origin. *)
type log = {
location: string;
result: (analysis_result, Error.t) Either.t;
}
type any_constraint = (constraint_, inter_constraint_) HT.any_constraint_
val compare_any_constraint : any_constraint -> any_constraint -> int
module ConstraintEntry : sig
type t = {
source_file: Relative_path.t;
id: string;
constraints: any_constraint list;
error_count: int;
}
end
type solve_entries =
Typing_env_types.env -> ConstraintEntry.t list -> shape_result list SMap.t |
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_walker.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Shape_analysis_types
module Cont = Typing_continuations
module A = Aast
module T = Tast
module SN = Naming_special_names
module Env = Shape_analysis_env
module Utils = Aast_names_utils
module HT = Hips_types
let join ~pos ~origin (env : env) (left : entity_) (right : entity_) :
env * entity_ =
let join = Env.fresh_var () in
let decorate constraint_ = { hack_pos = pos; origin; constraint_ } in
let add_constraint env c = Env.add_constraint env @@ decorate c in
let constraints = [Subsets (left, join); Subsets (right, join)] in
let env = List.fold ~f:add_constraint ~init:env constraints in
(env, join)
(* In local mode, we deliberately poison inter-procedural entities to prevent
the solver from yielding results that need access to other entities (e.g.,
other functions, class definitions, constants, etc.).
Currently, local mode is enabled only when we the shape analysis logger is
enabled.
*)
let when_local_mode mode ~default f =
match mode with
| Local -> f ()
| Global -> default
let dynamic_when_local ~origin pos env entity_ =
when_local_mode env.mode ~default:env @@ fun () ->
let constraint_ =
{ hack_pos = pos; origin; constraint_ = Has_dynamic_key entity_ }
in
Env.add_constraint env constraint_
let dynamic_always ~origin pos env entity_ =
let constraint_ =
{ hack_pos = pos; origin; constraint_ = Has_dynamic_key entity_ }
in
Env.add_constraint env constraint_
let failwithpos pos msg =
raise
@@ Shape_analysis_exn (Error.mk @@ Format.asprintf "%a: %s" Pos.pp pos msg)
let not_yet_supported (env : env) pos msg =
let msg = Error.mk @@ Format.asprintf "%a: Unsupported %s" Pos.pp pos msg in
Env.add_error env msg
let failwith = failwithpos Pos.none
let pos_of_hint hint_opt =
match hint_opt with
| Some (pos, _hint) -> pos
| None -> failwith "parameter hint is missing"
let might_be_dict tast_env ty =
let open Typing_make_type in
let open Typing_reason in
let mixed = mixed Rnone in
let dict_top = dict Rnone mixed mixed in
let awaitable_dict_top = awaitable Rnone dict_top in
let nothing = nothing Rnone in
let dict_bottom = dict Rnone nothing nothing in
let awaitable_dict_bottom = awaitable Rnone dict_bottom in
let typing_env = Tast_env.tast_env_as_typing_env tast_env in
let is_type_disjoint = Typing_subtype.is_type_disjoint typing_env in
not
@@ (is_type_disjoint ty dict_top
&& is_type_disjoint ty dict_bottom
&& is_type_disjoint ty awaitable_dict_top
&& is_type_disjoint ty awaitable_dict_bottom)
let is_dict tast_env ty =
let open Typing_make_type in
let open Typing_reason in
let mixed = mixed Rnone in
let dict_top = dict Rnone mixed mixed in
let awaitable_dict_top = awaitable Rnone dict_top in
let is_sub_type = Tast_env.is_sub_type tast_env in
is_sub_type ty dict_top || is_sub_type ty awaitable_dict_top
let is_cow tast_env ty =
let open Typing_make_type in
let open Typing_reason in
let mixed = mixed Rnone in
let dict_bottom = dict Rnone mixed mixed in
let vec_bottom = vec Rnone mixed in
let keyset_bottom = keyset Rnone mixed in
let cow_ty =
Typing_make_type.union
Typing_reason.Rnone
[dict_bottom; vec_bottom; keyset_bottom]
in
Tast_env.is_sub_type tast_env ty cow_ty
let disjoint_from_traversable tast_env ty =
let open Typing_make_type in
let open Typing_reason in
let mixed = mixed Rnone in
let traversable_top = traversable Rnone mixed in
let typing_env = Tast_env.tast_env_as_typing_env tast_env in
Typing_subtype.is_type_disjoint typing_env traversable_top ty
let any_shape_can_flow tast_env ty =
let open Typing_make_type in
let open Typing_reason in
let shape_top = open_shape Rnone Typing_defs.TShapeMap.empty in
Tast_env.is_sub_type tast_env shape_top ty
let class_name_of_class_id pos tenv class_id =
let open Aast in
match class_id with
| CIparent -> Tast_env.get_parent_id tenv
| CIself -> Tast_env.get_self_id tenv
| CIstatic -> Tast_env.get_self_id tenv
| CIexpr (_, _, Lvar (_, _)) ->
(* TODO(T135268910): handle `classname` / `new $c` *)
None
| CIexpr (_, _, e) ->
failwithpos pos
@@ Printf.sprintf
"Unexpected class name expression: %s"
(Aast_names_utils.expr_name e)
| CI (_, id) -> Some id
let to_marshallable_ty = Wipe_type_reason.wipe
let add_key_constraint
~(pos : Pos.t)
~(origin : int)
~certainty
~variety
~base_ty
(((_, _, key), ty) : T.expr * Typing_defs.locl_ty)
(env : env)
entity : env =
let add_key key =
let ty = ty |> Tast_env.fully_expand env.tast_env |> to_marshallable_ty in
let add_static_key env variety =
let constraint_ = Static_key (variety, certainty, entity, key, ty) in
Env.add_constraint env { hack_pos = pos; origin; constraint_ }
in
List.fold ~f:add_static_key variety ~init:env
in
if is_dict env.tast_env base_ty then
match key with
| A.String str -> add_key (Typing_defs.TSFlit_str (Pos_or_decl.none, str))
| A.Class_const ((_, _, A.CI (_, class_name)), (_, const_name)) ->
add_key
(Typing_defs.TSFclass_const
((Pos_or_decl.none, class_name), (Pos_or_decl.none, const_name)))
| _ ->
let constraint_ = Has_dynamic_key entity in
Env.add_constraint env { hack_pos = pos; origin; constraint_ }
else
env
let redirect ~pos ~origin (env : env) (entity_ : entity_) : env * entity_ =
let var = Env.fresh_var () in
let constraint_ = Subsets (entity_, var) in
let decorated_constraint = { hack_pos = pos; origin; constraint_ } in
let env = Env.add_constraint env decorated_constraint in
(env, var)
let rec assign
(pos : Pos.t)
(origin : int)
(env : env)
((lhs_ty, lhs_pos, lval) : T.expr)
(rhs : entity)
(ty_rhs : Typing_defs.locl_ty) : env =
let decorate origin constraint_ = { hack_pos = pos; origin; constraint_ } in
match lval with
| A.Lvar (_, lid) -> Env.set_local env lid rhs
| A.Array_get ((ty, _, A.Lvar (_, lid)), ix_opt) ->
let entity = Env.get_local env lid in
begin
match entity with
| Some entity_ ->
let (env, entity_) =
if is_cow env.tast_env ty then
(* Handle copy-on-write by creating a variable indirection *)
let (env, entity_) = redirect ~pos ~origin env entity_ in
let env = Env.set_local env lid (Some entity_) in
(env, entity_)
else
(env, entity_)
in
let env =
Option.fold ~init:env ix_opt ~f:(fun env ix ->
add_key_constraint
~pos
~origin
~certainty:Definite
~variety:[Has]
~base_ty:ty
(ix, ty_rhs)
env
entity_)
in
let env =
Option.fold ~init:env rhs ~f:(fun env rhs_entity_ ->
decorate __LINE__ (Subsets (rhs_entity_, entity_))
|> Env.add_constraint env)
in
env
| None ->
(* We might end up here as a result of deadcode, such as a dictionary
assignment after an unconditional break in a loop. In this
situation, it is not meaningful to report a candidate. *)
env
end
| A.Class_get (_, _, _)
| A.Obj_get (_, _, _, _) ->
(* Imprecise local handling so that false positives are invalidated *)
let env =
if any_shape_can_flow env.tast_env lhs_ty then
env
else
Option.fold ~init:env ~f:(dynamic_when_local ~origin:__LINE__ pos) rhs
in
not_yet_supported env lhs_pos ("lvalue: " ^ Utils.expr_name lval)
| _ -> not_yet_supported env lhs_pos ("lvalue: " ^ Utils.expr_name lval)
and expr_ (env : env) ((ty, pos, e) : T.expr) : env * entity =
let decorate ~origin constraint_ = { hack_pos = pos; origin; constraint_ } in
let mode = env.mode in
let dynamic_when_local = dynamic_when_local pos in
let dynamic_always = dynamic_always pos in
match e with
| A.Int _
| A.Float _
| A.String _
| A.True
| A.False
| A.Null ->
(env, None)
| A.Tuple values
| A.Varray (_, values)
| A.ValCollection (_, _, values) ->
(* TODO(T131709581): This is an approximation where we identify the the
surrounding collection with whatever might be inside. *)
let collection_entity_ = Env.fresh_var () in
let collection_entity = Some collection_entity_ in
let add_value env value =
let (env, value_entity) = expr_ env value in
Option.fold ~init:env value_entity ~f:(fun env value_entity_ ->
let constraint_ =
decorate ~origin:__LINE__
@@ Subsets (value_entity_, collection_entity_)
in
Env.add_constraint env constraint_)
in
let env = List.fold ~init:env ~f:add_value values in
(env, collection_entity)
| A.Darray (_, key_value_pairs)
| A.KeyValCollection ((_, A.Dict), _, key_value_pairs) ->
let entity_ = Literal pos in
let entity = Some entity_ in
let constraint_ = decorate ~origin:__LINE__ @@ Marks (Allocation, pos) in
let env = Env.add_constraint env constraint_ in
let handle_key_value env (key, ((val_ty, _, _) as value)) : env =
let (env, _key_entity) = expr_ env key in
let (env, val_entity) = expr_ env value in
let env =
(* TODO(T131709581): This is an approximation where we identify the the
surrounding collection with whatever might be inside. *)
Option.fold ~init:env val_entity ~f:(fun env val_entity_ ->
decorate ~origin:__LINE__ @@ Subsets (val_entity_, entity_)
|> Env.add_constraint env)
in
Option.fold
~init:env
~f:
(add_key_constraint
~pos
~origin:__LINE__
~certainty:Definite
~variety:[Has]
~base_ty:ty
(key, val_ty))
entity
in
let env = List.fold ~init:env ~f:handle_key_value key_value_pairs in
(env, entity)
| A.KeyValCollection (_, _, key_value_pairs) ->
(* TODO(T131709581): This is an approximation where we identify the the
surrounding collection with whatever might be inside. *)
let entity_ = Env.fresh_var () in
let entity = Some entity_ in
let handle_key_value env (key, value) : env =
let (env, _key_entity) = expr_ env key in
let (env, val_entity) = expr_ env value in
Option.fold ~init:env val_entity ~f:(fun env val_entity_ ->
decorate ~origin:__LINE__ @@ Subsets (val_entity_, entity_)
|> Env.add_constraint env)
in
let env = List.fold ~init:env ~f:handle_key_value key_value_pairs in
(env, entity)
| A.Array_get (((base_ty, _, _) as base), Some ix) ->
let (env, base_entity) = expr_ env base in
let (env, _entity_ix) = expr_ env ix in
let env =
Option.fold
~init:env
~f:
(add_key_constraint
~pos
~origin:__LINE__
~certainty:Definite
~variety:[Has; Needs]
(*TODO(T136668856): consider only generating a `Needs` constraint here, and propagating `Needs` forward *)
~base_ty
(ix, ty))
base_entity
in
(* TODO(T131709581): Returning the collection is an approximation where we
identify the the surrounding collection with whatever might be inside. *)
let entity =
if disjoint_from_traversable env.tast_env ty then
None
else
base_entity
in
(env, entity)
| A.Lvar (_, lid) ->
let entity = Env.get_local env lid in
(env, entity)
| A.(Binop { bop = Ast_defs.Eq None; lhs = e1; rhs = (ty_rhs, _, _) as e2 })
->
let (env, entity_rhs) = expr_ env e2 in
let env = assign pos __LINE__ env e1 entity_rhs ty_rhs in
(env, None)
| A.(Call { func = (_, _, Id (_, idx)); args; _ })
when String.equal idx SN.FB.idx -> begin
(* Currently treating idx expressions with and without default value in the same way.
Essentially following the case for A.Array_get after extracting the right data. *)
match args with
| [(_, ((base_ty, _, _) as base)); (_, ix)]
| [(_, ((base_ty, _, _) as base)); (_, ix); _] ->
let (env, entity_exp) = expr_ env base in
let (env, _entity_ix) = expr_ env ix in
let env =
Option.fold
~init:env
~f:
(add_key_constraint
~pos
~origin:__LINE__
~certainty:Maybe
~variety:[Has; Needs]
~base_ty
(ix, ty))
entity_exp
in
(env, None)
| _ ->
let env =
not_yet_supported env pos ("idx expression: " ^ Utils.expr_name e)
in
(env, None)
end
| A.New (class_id, targs, args, unpacked_arg, _instantiation) ->
(* What is new object creation but a call to a static method call to a
class constructor? *)
let func = (ty, pos, A.Class_const (class_id, (pos, "__construct"))) in
let args = List.map ~f:(fun arg -> (Ast_defs.Pnormal, arg)) args in
let call_expr = (ty, pos, A.(Call { func; targs; args; unpacked_arg })) in
expr_ env call_expr
| A.(Call { func = (base_ty, _, lhs) as base; args; unpacked_arg; _ }) ->
let lhs_is_obj_get =
match lhs with
| A.Obj_get _ -> true
| _ -> false
in
let param_tys =
match Typing_defs.get_node base_ty with
| Typing_defs.Tfun ft ->
List.map
~f:(fun param -> param.Typing_defs.fp_type.Typing_defs.et_type)
ft.Typing_defs.ft_params
| _ -> []
in
let handle_arg arg_idx env (param_kind, ((_ty, pos, _exp) as arg)) =
let (env, arg_entity) = expr_ env arg in
let param_ty_opt = List.nth param_tys arg_idx in
let env =
let be_conservative =
Option.value_map
~default:true
param_ty_opt
~f:(Fn.non @@ any_shape_can_flow env.tast_env)
in
if be_conservative then
let fold_env f = Option.fold ~init:env arg_entity ~f in
if lhs_is_obj_get then
(* Because HIPS doesn't know about objects yet (T139375375). Note that this isn't
as conservative as it could be because of function and method pointers *)
fold_env @@ dynamic_always ~origin:__LINE__
else
(* During local mode we cannot know what happens to the entity, so we
conservatively assume there is a dynamic access. *)
fold_env @@ dynamic_when_local ~origin:__LINE__
else
env
in
let (env, arg_entity) =
match param_kind with
| Ast_defs.Pinout _ -> begin
(* When we have an inout parameter, we sever the connection between
what goes into the parameter and what comes out.
Once again in local mode, we do not know what happened to the
dictionary, so we assume it was dynamically accessed. *)
match arg with
| (_, _, A.Lvar (_, lid)) ->
let arg_entity_ = Env.fresh_var () in
let arg_entity = Some arg_entity_ in
let env = Env.set_local env lid arg_entity in
let env = dynamic_when_local ~origin:__LINE__ env arg_entity_ in
(env, arg_entity)
| (_, pos, _) ->
let env = not_yet_supported env pos "inout argument" in
(env, arg_entity)
end
| Ast_defs.Pnormal -> (env, arg_entity)
in
match arg_entity with
| Some arg_entity_ -> begin
match base with
| (_, _, A.Id (_, f_id)) when String.equal f_id SN.Hips.inspect ->
let constraint_ = decorate ~origin:__LINE__ @@ Marks (Debug, pos) in
let env = Env.add_constraint env constraint_ in
let constraint_ =
decorate ~origin:__LINE__ @@ Subsets (arg_entity_, Literal pos)
in
let env = Env.add_constraint env constraint_ in
env
| (_, _, A.Id (_, f_id)) ->
(* TODO: inout parameters need special treatment inter-procedurally *)
let inter_constraint_ =
decorate ~origin:__LINE__
@@ HT.ArgLike (((pos, f_id), HT.Index arg_idx), arg_entity_)
in
Env.add_inter_constraint env inter_constraint_
| _ -> env
end
| None -> env
in
(* Handle the bast of the call *)
let (env, _base_entity) =
match base with
| (_, _, A.Id _) ->
(* Use of identifiers inside function calls is not compositional.
This could be cleaned up... *)
(env, None)
| _ -> expr_ env base
in
(* Handle the vanilla arguments *)
let env = List.foldi ~f:handle_arg ~init:env args in
(* Handle the unpaced argument (e.g., ...$args) *)
let env =
Option.value_map
~default:env
~f:(fun exp ->
let idx = List.length args + 1 in
handle_arg idx env (Ast_defs.Pnormal, exp))
unpacked_arg
in
(* Handle the return. *)
let return_entity = Env.fresh_var () in
let env =
match base with
(* TODO: handle function calls through variables *)
| (_, _, A.Id (_, f_id)) when not @@ String.equal f_id SN.Hips.inspect ->
let constraint_ =
decorate ~origin:__LINE__
@@ HT.ArgLike (((pos, f_id), HT.Return), return_entity)
in
Env.add_inter_constraint env constraint_
| _ -> env
in
let env =
when_local_mode mode ~default:env @@ fun () ->
let constraint_ =
decorate ~origin:__LINE__ @@ Has_dynamic_key return_entity
in
Env.add_constraint env constraint_
in
(env, Some return_entity)
| A.Await e -> expr_ env e
| A.As (e, _ty, _) -> expr_ env e
| A.Is (e, _ty) ->
(* `is` expressions always evaluate to bools, so we discard the entity. *)
let (env, _) = expr_ env e in
(env, None)
| A.Unop
( Ast_defs.(
( Utild | Unot | Uplus | Uminus | Uincr | Udecr | Upincr | Updecr
| Usilence )),
e1 ) ->
(* Adding support for unary operations *)
let (env, _) = expr_ env e1 in
(env, None)
| A.Eif (cond, then_expr_opt, else_expr) ->
eif ~pos env cond then_expr_opt else_expr
| A.(
Binop
{
bop = Ast_defs.QuestionQuestion;
lhs = nullable_expr;
rhs = else_expr;
}) ->
eif ~pos env nullable_expr None else_expr
| A.(
Binop
{
bop =
Ast_defs.(
( Plus | Minus | Star | Slash | Eqeq | Eqeqeq | Starstar | Diff
| Diff2 | Ampamp | Barbar | Lt | Lte | Gt | Gte | Dot | Amp | Bar
| Ltlt | Gtgt | Percent | Xor | Cmp ));
lhs = e1;
rhs = e2;
}) ->
(* Adding support for binary operations. Currently not covering
"Ast_defs.Eq Some _" *)
let (env, _) = expr_ env e1 in
let (env, _) = expr_ env e2 in
(env, None)
| A.Id name ->
let entity__ =
{
HT.ident_pos = fst name;
HT.class_name_opt = None;
HT.const_name = snd name;
}
in
let entity_ = Inter (HT.ConstantIdentifier entity__) in
let env = dynamic_when_local ~origin:__LINE__ env entity_ in
let constr_ =
{
hack_pos = fst name;
origin = __LINE__;
constraint_ = HT.ConstantIdentifier entity__;
}
in
let env = Env.add_inter_constraint env constr_ in
(env, Some entity_)
| A.Class_const ((_, ident_pos, class_id), (_, const_name)) ->
let class_name_opt =
class_name_of_class_id ident_pos env.tast_env class_id
in
let entity__ = { HT.ident_pos; HT.class_name_opt; HT.const_name } in
let entity_ = Inter (HT.ConstantIdentifier entity__) in
let env = dynamic_when_local ~origin:__LINE__ env entity_ in
let constr_ =
{
hack_pos = ident_pos;
origin = __LINE__;
constraint_ = HT.ConstantIdentifier entity__;
}
in
let env = Env.add_inter_constraint env constr_ in
(env, Some entity_)
| A.Class_get (_, _, _)
| A.Obj_get (_, _, _, _) ->
let env = not_yet_supported env pos ("expression: " ^ Utils.expr_name e) in
(* Imprecise local handling so that false positives are invalidated *)
when_local_mode mode ~default:(env, None) @@ fun () ->
let entity_ = Env.fresh_var () in
let constraint_ = decorate ~origin:__LINE__ @@ Has_dynamic_key entity_ in
let env = Env.add_constraint env constraint_ in
(env, Some entity_)
| _ ->
let env = not_yet_supported env pos ("expression: " ^ Utils.expr_name e) in
(env, None)
and eif ~pos env cond then_expr_opt else_expr =
let (cond_env, cond_entity) = expr_ env cond in
let base_env = Env.reset_constraints cond_env in
let (then_env, then_entity) =
match then_expr_opt with
| Some then_expr ->
let base_env = Env.refresh ~pos ~origin:__LINE__ base_env in
expr_ base_env then_expr
| None -> (cond_env, cond_entity)
in
let (else_env, else_entity) =
let base_env = Env.refresh ~pos ~origin:__LINE__ base_env in
expr_ base_env else_expr
in
let env = Env.union ~pos ~origin:__LINE__ env then_env else_env in
(* Create a join point entity. It is pretty much Option.marge except that
that function doesn't allow threading state (`env`) through *)
let (env, entity) =
match (then_entity, else_entity) with
| (Some then_entity_, Some else_entity_) ->
let (env, join) =
join ~pos ~origin:__LINE__ env then_entity_ else_entity_
in
(env, Some join)
| (None, Some _) -> (env, else_entity)
| (_, _) -> (env, then_entity)
in
(env, entity)
let expr (env : env) (e : T.expr) : env = expr_ env e |> fst
let rec switch
~pos
(parent_locals : lenv)
(env : env)
(cases : ('ex, 'en) A.case list)
(dfl : ('ex, 'en) A.default_case option) : env =
let initialize_next_cont env =
let env = Env.restore_conts_from env ~from:parent_locals [Cont.Next] in
let env = Env.refresh ~pos ~origin:__LINE__ env in
let env =
Env.update_next_from_conts
~pos
~origin:__LINE__
env
[Cont.Next; Cont.Fallthrough]
in
Env.drop_cont env Cont.Fallthrough
in
let handle_case env (e, b) =
let env = initialize_next_cont env in
let env = expr env e in
block env b
in
let handle_default_case env dfl =
dfl
|> Option.fold ~init:env ~f:(fun env (_, b) ->
let env = initialize_next_cont env in
block env b)
in
let env = List.fold ~init:env ~f:handle_case cases in
let env = handle_default_case env dfl in
env
and foreach_pattern env collection_ent = function
| A.As_v (_, _, A.Lvar (_, lid))
| A.As_kv (_, (_, _, A.Lvar (_, lid)))
| A.Await_as_v (_, (_, _, A.Lvar (_, lid)))
| A.Await_as_kv (_, _, (_, _, A.Lvar (_, lid))) ->
Env.set_local env lid collection_ent
| _ -> env
and stmt (env : env) ((pos, stmt) : T.stmt) : env =
let decorate ~origin constraint_ = { hack_pos = pos; origin; constraint_ } in
match stmt with
| A.Expr e -> expr env e
| A.Return None -> env
| A.Return (Some e) ->
let (env, entity) = expr_ env e in
begin
match (entity, env.return) with
| (Some entity_, Some return_) ->
let constraint_ = Subsets (entity_, return_) in
let decorated_constraint = decorate ~origin:__LINE__ constraint_ in
let env = Env.add_constraint env decorated_constraint in
env
| _ -> env
end
| A.If (cond, then_bl, else_bl) ->
let parent_env = expr env cond in
let base_env = Env.reset_constraints parent_env in
let then_env =
let base_env = Env.refresh ~pos ~origin:__LINE__ base_env in
block base_env then_bl
in
let else_env =
let base_env = Env.refresh ~pos ~origin:__LINE__ base_env in
block base_env else_bl
in
Env.union ~pos ~origin:__LINE__ parent_env then_env else_env
| A.Switch (cond, cases, dfl) ->
let env = expr env cond in
(* NB: A 'continue' inside a 'switch' block is equivalent to a 'break'.
* See the note in
* http://php.net/manual/en/control-structures.continue.php *)
Env.stash_and_do env [Cont.Continue; Cont.Break] @@ fun env ->
let parent_locals = env.lenv in
let env = switch ~pos parent_locals env cases dfl in
Env.update_next_from_conts
~pos
~origin:__LINE__
env
[Cont.Continue; Cont.Break; Cont.Next]
| A.Fallthrough ->
Env.move_and_merge_next_in_cont ~pos ~origin:__LINE__ env Cont.Fallthrough
| A.Continue ->
Env.move_and_merge_next_in_cont ~pos ~origin:__LINE__ env Cont.Continue
| A.Break ->
Env.move_and_merge_next_in_cont ~pos ~origin:__LINE__ env Cont.Break
| A.While (cond, bl) ->
Env.stash_and_do env [Cont.Continue; Cont.Break] @@ fun env ->
let env =
Env.save_and_merge_next_in_cont ~pos ~origin:__LINE__ env Cont.Continue
in
let env_before_iteration = Env.refresh ~pos ~origin:__LINE__ env in
let env_after_iteration =
let env = expr env_before_iteration cond in
let env = block env bl in
env
in
let env =
Env.loop_continuation
~pos
~origin:__LINE__
Cont.Next
~env_before_iteration
~env_after_iteration
in
let env =
Env.update_next_from_conts
~pos
~origin:__LINE__
env
[Cont.Continue; Cont.Next]
in
let env = expr env cond in
let env =
Env.update_next_from_conts
~pos
~origin:__LINE__
env
[Cont.Break; Cont.Next]
in
env
| A.Foreach (collection_exp, pattern, bl) ->
let (env, collection_ent) = expr_ env collection_exp in
Env.stash_and_do env [Cont.Continue; Cont.Break] @@ fun env ->
let env =
Env.save_and_merge_next_in_cont ~pos ~origin:__LINE__ env Cont.Continue
in
let env_before_iteration = Env.refresh ~pos ~origin:__LINE__ env in
let env_after_iteration =
let env = foreach_pattern env_before_iteration collection_ent pattern in
let env = block env bl in
env
in
let env =
Env.loop_continuation
~pos
~origin:__LINE__
Cont.Next
~env_before_iteration
~env_after_iteration
in
let env =
Env.update_next_from_conts
~pos
~origin:__LINE__
env
[Cont.Continue; Cont.Break; Cont.Next]
in
env
| A.Block statements -> block env statements
| A.Noop
| A.AssertEnv _
| A.Markup _ ->
env
| _ -> not_yet_supported env pos ("statement: " ^ Utils.stmt_name stmt)
and block (env : env) : T.block -> env = List.fold ~init:env ~f:stmt
let decl_hint mode kind tast_env ((ty, hint) : T.type_hint) :
decorated_constraints * entity =
let hint_pos = pos_of_hint hint in
let entity_ =
match kind with
| `Parameter (id, idx) -> Inter (HT.ParamLike ((hint_pos, id), idx))
| `Return id -> Inter (HT.ParamLike ((hint_pos, id), HT.Return))
in
let decorate ~origin constraint_ =
{ hack_pos = hint_pos; origin; constraint_ }
in
let inter_constraints =
match kind with
| `Parameter (id, idx) ->
DecoratedInterConstraintSet.singleton
@@ decorate ~origin:__LINE__
@@ HT.ParamLike ((hint_pos, id), idx)
| `Return id ->
DecoratedInterConstraintSet.singleton
@@ decorate ~origin:__LINE__
@@ HT.ParamLike ((hint_pos, id), HT.Return)
in
let kind =
match kind with
| `Parameter _ -> Parameter
| `Return _ -> Return
in
let constraints =
if might_be_dict tast_env ty then
DecoratedConstraintSet.singleton
@@ decorate ~origin:__LINE__
@@ Marks (kind, hint_pos)
else
DecoratedConstraintSet.empty
in
let constraints =
when_local_mode mode ~default:constraints @@ fun () ->
let invalidation_constraint =
decorate ~origin:__LINE__ @@ Has_dynamic_key entity_
in
DecoratedConstraintSet.add invalidation_constraint constraints
in
((constraints, inter_constraints), Some entity_)
let init_params mode id tast_env (params : T.fun_param list) :
decorated_constraints * entity LMap.t =
let add_param
(idx : int)
((intra_constraints, inter_constraints), lmap)
A.{ param_name; param_type_hint; param_is_variadic; _ } =
if param_is_variadic then
(* TODO(T125878781): Handle variadic paramseters *)
((intra_constraints, inter_constraints), lmap)
else
let ((new_intra_constraints, new_inter_constraints), entity) =
decl_hint mode (`Parameter (id, HT.Index idx)) tast_env param_type_hint
in
let param_lid = Local_id.make_unscoped param_name in
let lmap = LMap.add param_lid entity lmap in
let intra_constraints =
DecoratedConstraintSet.union new_intra_constraints intra_constraints
in
let inter_constraints =
DecoratedInterConstraintSet.union
new_inter_constraints
inter_constraints
in
((intra_constraints, inter_constraints), lmap)
in
List.foldi
~f:add_param
~init:
( (DecoratedConstraintSet.empty, DecoratedInterConstraintSet.empty),
LMap.empty )
params
let callable mode id tast_env params ~return body =
(* TODO(T130457262): inout parameters should have the entity of their final
binding flow back into them. *)
let ((param_intra_constraints, param_inter_constraints), param_env) =
init_params mode id tast_env params
in
let ((return_intra_constraints, return_inter_constraints), return) =
decl_hint mode (`Return id) tast_env return
in
let intra_constraints =
DecoratedConstraintSet.union
return_intra_constraints
param_intra_constraints
in
let inter_constraints =
DecoratedInterConstraintSet.union
return_inter_constraints
param_inter_constraints
in
let env =
Env.init mode tast_env intra_constraints inter_constraints param_env ~return
in
let env = block env body.A.fb_ast in
((env.constraints, env.inter_constraints), env.errors)
let marker_constraint_of ~hack_pos (marker_pos : Pos.t) : constraint_ decorated
=
{ hack_pos; origin = __LINE__; constraint_ = Marks (Constant, marker_pos) }
let constant_constraint_of
~hack_pos (constant_pos : Pos.t) (constant_name : string) :
inter_constraint_ decorated =
{
hack_pos;
origin = __LINE__;
constraint_ = HT.Constant (constant_pos, constant_name);
}
let subset_constraint_of
~hack_pos (ent1 : entity_) ~constant_pos (constant_name : string) :
constraint_ decorated =
{
hack_pos;
origin = __LINE__;
constraint_ =
Subsets (ent1, Inter (HT.Constant (constant_pos, constant_name)));
}
let initial_constraint_of ~hack_pos (ent : entity_) :
inter_constraint_ decorated =
{ hack_pos; origin = __LINE__; constraint_ = HT.ConstantInitial ent }
let program mode (ctx : Provider_context.t) (tast : Tast.program) =
let def (def : T.def) : (string * (decorated_constraints * Error.t list)) list
=
let tast_env = Tast_env.def_env ctx def in
match def with
| A.Fun fd ->
let (_, id) = fd.A.fd_name in
let A.{ f_body; f_params; f_ret; _ } = fd.A.fd_fun in
[(id, callable mode id tast_env f_params ~return:f_ret f_body)]
| A.Class A.{ c_kind = Ast_defs.Cenum; _ } ->
(* There is nothing to analyse in an enum definition *)
[]
| A.Class
A.
{
c_methods;
c_name = (_, class_name);
c_consts;
c_extends;
c_kind =
Ast_defs.(Cclass Concrete | Cclass Abstract | Cinterface | Ctrait);
_;
} ->
let handle_method
A.{ m_body; m_name = (_, method_name); m_params; m_ret; _ } =
let id = class_name ^ "::" ^ method_name in
(id, callable mode id tast_env m_params ~return:m_ret m_body)
in
let handle_constant A.{ cc_type; cc_id; cc_kind; _ } =
let id = class_name ^ "::" ^ snd cc_id in
let hint_pos = pos_of_hint cc_type in
let (env, ent) =
let empty_env =
Env.init
mode
tast_env
DecoratedConstraintSet.empty
DecoratedInterConstraintSet.empty
~return:None
LMap.empty
in
match cc_kind with
| A.CCAbstract initial_expr_opt ->
(match initial_expr_opt with
| Some initial_expr -> expr_ empty_env initial_expr
| None -> (empty_env, None))
| A.CCConcrete initial_expr -> expr_ empty_env initial_expr
in
let marker_constraint =
marker_constraint_of ~hack_pos:hint_pos hint_pos
in
let env = Env.add_constraint env marker_constraint in
let constant_constraint =
constant_constraint_of ~hack_pos:(fst cc_id) hint_pos id
in
let env = Env.add_inter_constraint env constant_constraint in
let env =
match ent with
| Some ent_ ->
let subset_constr =
subset_constraint_of
~hack_pos:(fst cc_id)
ent_
~constant_pos:hint_pos
id
in
let initial_constr =
initial_constraint_of ~hack_pos:(fst cc_id) ent_
in
let env = Env.add_constraint env subset_constr in
Env.add_inter_constraint env initial_constr
| None -> env
in
(id, ((env.constraints, env.inter_constraints), env.errors))
in
let handle_extends class_hint =
match class_hint with
| (pos, A.Happly (class_id_of_extends, _)) ->
let extends_constr =
{
hack_pos = pos;
origin = __LINE__;
constraint_ = HT.ClassExtends class_id_of_extends;
}
in
let empty_env =
Env.init
mode
tast_env
DecoratedConstraintSet.empty
DecoratedInterConstraintSet.empty
~return:None
LMap.empty
in
let env = Env.add_inter_constraint empty_env extends_constr in
Some
(class_name, ((env.constraints, env.inter_constraints), env.errors))
| _ -> None
in
List.map ~f:handle_method c_methods
@ List.map ~f:handle_constant c_consts
@ List.filter_map ~f:handle_extends c_extends
| A.Constant A.{ cst_name; cst_value; cst_type; _ } ->
let hint_pos = pos_of_hint cst_type in
let env =
Env.init
mode
tast_env
DecoratedConstraintSet.empty
DecoratedInterConstraintSet.empty
~return:None
LMap.empty
in
let (env, ent) = expr_ env cst_value in
let marker_constraint =
marker_constraint_of ~hack_pos:hint_pos hint_pos
in
let env = Env.add_constraint env marker_constraint in
let constant_constraint =
constant_constraint_of ~hack_pos:(fst cst_name) hint_pos (snd cst_name)
in
let env = Env.add_inter_constraint env constant_constraint in
let env =
match ent with
| Some ent_ ->
let subset_constr =
subset_constraint_of
~hack_pos:(fst cst_name)
ent_
~constant_pos:hint_pos
(snd cst_name)
in
let initial_constr =
initial_constraint_of ~hack_pos:(fst cst_name) ent_
in
let env = Env.add_constraint env subset_constr in
Env.add_inter_constraint env initial_constr
| None -> env
in
[(snd cst_name, ((env.constraints, env.inter_constraints), env.errors))]
| _ -> failwith "A definition is not yet handled"
in
List.concat_map ~f:def tast |> SMap.of_list |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_walker.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Shape_analysis_types
val callable :
mode ->
Ast_defs.id_ ->
Tast_env.t ->
Tast.fun_param list ->
return:Tast.type_hint ->
Tast.func_body ->
decorated_constraints * Error.t list
val program :
mode ->
Provider_context.t ->
Tast.program ->
(decorated_constraints * Error.t list) SMap.t |
OCaml | hhvm/hphp/hack/src/shape_analysis/wipe_type_reason.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Typing_defs
let rec wipe ty =
let wp node = mk (Reason.Rnone, node) in
let wtlist tlist = List.map ~f:wipe tlist in
match get_node ty with
| Toption ty -> wp (Toption (wipe ty))
| Ttuple tlist ->
let wlist = wtlist tlist in
wp (Ttuple wlist)
| Tunion tlist ->
let wlist = wtlist tlist in
wp (Tunion wlist)
| Tintersection tlist ->
let wlist = wtlist tlist in
wp (Tintersection wlist)
| Tgeneric (s, tlist) ->
let wlist = wtlist tlist in
wp (Tgeneric (s, wlist))
| Tvec_or_dict (t1, t2) ->
let w1 = wipe t1 in
let w2 = wipe t2 in
wp (Tvec_or_dict (w1, w2))
| Tnewtype (s, tlist, ty) ->
let wlist = wtlist tlist in
let w = wipe ty in
wp (Tnewtype (s, wlist, w))
| Tdependent (d, ty) ->
let w = wipe ty in
wp (Tdependent (d, w))
| Tclass (p, _, tlist) ->
let wlist = wtlist tlist in
(*exact contains reason, so we wipe it out*)
wp (Tclass (p, Typing_defs_core.Exact, wlist))
| Taccess (ty, pos_id) ->
let w = wipe ty in
wp (Taccess (w, pos_id))
| Tshape { s_origin = _; s_unknown_value = kind; s_fields = map } ->
let map =
TShapeMap.map (fun sft -> { sft with sft_ty = wipe sft.sft_ty }) map
in
(* TODO(shapes) should this reset origin? *)
wp
(Tshape
{
s_origin = Missing_origin;
s_unknown_value = kind;
(* TODO(shapes) This should likely wipe the reason of s_unknown_value *)
s_fields = map;
})
| Tfun ft ->
let wt_et et = { et with et_type = wipe et.et_type } in
let wt_fp fp = { fp with fp_type = wt_et fp.fp_type } in
let wt_wc (lb, kind, ub) = (wipe lb, kind, wipe ub) in
let rec wt_tp tp =
let wt_c (kind, ty) = (kind, wipe ty) in
{
tp with
tp_tparams = List.map ~f:wt_tp tp.tp_tparams;
tp_constraints = List.map ~f:wt_c tp.tp_constraints;
}
in
let wt_ip { capability } =
let wt_cap cap =
match cap with
| CapTy ty -> CapTy (wipe ty)
| CapDefaults _ -> cap
in
{ capability = wt_cap capability }
in
let ft =
{
ft with
ft_ret = wt_et ft.ft_ret;
ft_params = List.map ~f:wt_fp ft.ft_params;
ft_where_constraints = List.map ~f:wt_wc ft.ft_where_constraints;
ft_tparams = List.map ~f:wt_tp ft.ft_tparams;
ft_implicit_params = wt_ip ft.ft_implicit_params;
}
in
wp (Tfun ft)
(*we just wipe out the reason because below types are not recursive.*)
| Tany _
| Tnonnull
| Tdynamic
| Tprim _
| Tvar _
| Tunapplied_alias _
| Tneg _ ->
Typing_defs_core.(with_reason ty Reason.Rnone) |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/wipe_type_reason.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val wipe : Typing_defs.locl_ty -> Typing_defs.locl_ty |
hhvm/hphp/hack/src/shape_analysis/shape_analysis_scuba/dune | (* -*- tuareg -*- *)
let library_entry name suffix =
Printf.sprintf
"(library
(name %s)
(wrapped false)
(modules)
(libraries %s_%s))" name name suffix
let fb_entry name =
library_entry name "fb"
let stubs_entry name =
library_entry name "stubs"
let entry is_fb name =
if is_fb then
fb_entry name
else
stubs_entry name
let () =
(* test presence of fb subfolder *)
let current_dir = Sys.getcwd () in
(* we are in src/shape_analysis/shape_analysis_scuba, locate src/facebook *)
let src_dir = Filename.dirname @@ Filename.dirname current_dir in
let fb_dir = Filename.concat src_dir "facebook" in
(* locate src/facebook/dune *)
let fb_dune = Filename.concat fb_dir "dune" in
let is_fb = Sys.file_exists fb_dune in
let shape_analysis_scuba = entry is_fb "shape_analysis_scuba" in
Jbuild_plugin.V1.send shape_analysis_scuba |
|
hhvm/hphp/hack/src/shape_analysis/shape_analysis_solve_marshalled/dune | (executable
(name shape_analysis_solve_marshalled)
(flags
(:standard -linkall))
(modules shape_analysis_solve_marshalled)
(modes exe byte_complete)
(libraries
shape_analysis
core_kernel
provider_context
default_injector_config
)
) |
|
OCaml | hhvm/hphp/hack/src/shape_analysis/shape_analysis_solve_marshalled/shape_analysis_solve_marshalled.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
* This module as much as possible is just IO and
doesn't know anything about the shapes of data.
These restrictions are intended to facilitate adapting for future analyses
*)
open Hh_prelude
module SA = Shape_analysis
module SAF = Shape_analysis_files
module SAC = Shape_analysis_codemod
let usage_message =
{|
Utility to read shape analysis constraints from a directory and output codemod information
in JSON Lines format in the same directory as the constraints.
USAGE:
$THIS_EXECUTABLE\
--constraints-dir [directory containing .dmpc files with shape analysis constraints] For example: '/tmp/shape_analysis_constraints'] \
--source-root [directory that relative paths in constraints are relative to]
--grain [The granularity of solving. One of 'callable' (does not use HIPS) or 'source-file' (uses HIPS)]
--atomic (Flag. If provided, then group codemod directives. Described in D40008464.)
To observe progress, append the following, where $CONSTRAINTS_DIR matches --constraints-dir:
& while true;
do echo entries written: $(cat $CONSTRAINTS_DIR/*.jsonl | wc -l) &&
sleep 10
done
|}
type grain =
| Callable
| SourceFile
type args = {
constraints_dir: string;
source_root: string;
grain: grain;
atomic: bool;
}
let parse_args () : args =
let constraints_dir = ref "" in
let source_root = ref "" in
let grain_str = ref "" in
let should_show_help = ref false in
let atomic = ref false in
let args_spec =
[
( "--constraints-dir",
Arg.Set_string constraints_dir,
"directory containing constraints" );
( "--source-root",
Arg.Set_string source_root,
"Constraints contain positions, which contain paths that are relative to the source root."
);
( "--grain",
Arg.Set_string grain_str,
"The granularity of constraint solving. One of 'callable' or 'source-file'"
);
( "--atomic",
Arg.Set atomic,
"If provided, then group codemod directives. Described in D40008464." );
("--help", Arg.Set should_show_help, "show help help");
]
in
let exit_bad_args () =
print_endline usage_message;
exit 2
in
let parse_grain = function
| "callable" -> Callable
| "source-file" -> SourceFile
| _ -> exit_bad_args ()
in
Arg.parse args_spec ignore usage_message;
if !should_show_help then exit_bad_args ();
let unref_string s =
match !s with
| "" ->
print_endline usage_message;
exit 2
| s -> s
in
{
constraints_dir = unref_string constraints_dir;
source_root = unref_string source_root;
grain = parse_grain @@ unref_string grain_str;
atomic = !atomic;
}
let print_codemods formatter =
List.iter ~f:(fun json ->
Format.fprintf formatter "%s\n" @@ Hh_json.json_to_string json)
let par_iter (items : 'i list) ~(f : 'i -> unit) : unit =
let orig_pid = Unix.getpid () in
let rec loop pids = function
| [] -> pids
| h :: t ->
(match Unix.fork () with
| 0 ->
f h;
pids
| pid -> loop (pid :: pids) t)
in
let pids = loop [] items in
if Unix.getpid () = orig_pid then
List.iter pids ~f:(fun pid ->
match snd @@ Unix.waitpid [] pid with
| Unix.WEXITED 0 -> ()
| Unix.WEXITED c ->
failwith @@ Format.sprintf "subprocess %d exited with code %d\n" pid c
| Unix.WSIGNALED s ->
failwith
@@ Format.sprintf "subprocess %d killed with signal %d\n" pid s
| Unix.WSTOPPED s ->
failwith
@@ Format.sprintf "subprocess %d stopped with signal %d\n" pid s)
else
exit 0
let mutate_global_state source_root =
Relative_path.set_path_prefix Relative_path.Root @@ Path.make source_root;
let num_workers = Sys_utils.nbr_procs in
let (_ : SharedMem.handle) =
SharedMem.init SharedMem.default_config ~num_workers
in
()
(**
It's OK to use an empty typing env because
`Shape_analysis_codemod` only uses the env
for printing, using `Typing_print.full env shape_ty`,
which in turn only uses env.genv.tcopt.tco_type_printer_fuel
*)
let env =
Tast_env.tast_env_as_typing_env
(Tast_env.empty
@@ Provider_context.empty_for_debugging
~popt:ParserOptions.default
~tcopt:TypecheckerOptions.default
~deps_mode:(Typing_deps_mode.InMemoryMode None))
let () =
let { constraints_dir; source_root; grain; atomic } = parse_args () in
mutate_global_state source_root;
let (read_entries, solve) =
match grain with
| SourceFile ->
(SAF.read_entries_by_source_file, SA.shape_results_using_hips)
| Callable -> (SAF.read_entries_by_callable, SA.shape_results_no_hips)
in
let solve_and_write constraints_file =
let out_channel =
constraints_file
|> Filename.chop_extension
|> Format.sprintf "%s.jsonl"
|> Out_channel.create
in
let formatter = Format.formatter_of_out_channel out_channel in
read_entries ~constraints_file
|> Sequence.map ~f:(SAC.codemods_of_entries env ~solve ~atomic)
|> Sequence.iter ~f:(print_codemods formatter);
Out_channel.close out_channel
in
Sys.readdir constraints_dir
|> Array.to_list
|> List.filter ~f:(String.is_suffix ~suffix:SAF.constraints_file_extension)
|> List.map ~f:(Filename.concat constraints_dir)
|> par_iter ~f:solve_and_write;
Format.printf "wrote .jsonl files to %s\n" constraints_dir |
OCaml Interface | hhvm/hphp/hack/src/shape_analysis/shape_analysis_solve_marshalled/shape_analysis_solve_marshalled.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
Executable for solving constraints that have been persisted to disk.
See `usage_message` in the implementation.
*) |
hhvm/hphp/hack/src/shmffi/dune | (data_only_dirs shmffi cargo)
(library
(name shmffi)
(modules)
(c_library_flags -lpthread)
(wrapped false)
(foreign_archives shmffi))
(rule
(targets libshmffi.a)
(deps
(source_tree %{workspace_root}/hack/src))
(locks /cargo)
(action
(run %{workspace_root}/hack/scripts/invoke_cargo.sh shmffi shmffi))) |
|
Rust | hhvm/hphp/hack/src/shmffi/ocaml_blob.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::ptr::NonNull;
use lz4::liblz4;
use shmrs::chashmap::CMapValue;
extern "C" {
fn caml_input_value_from_block(data: *const u8, size: usize) -> usize;
fn caml_alloc_initialized_string(size: usize, data: *const u8) -> usize;
fn caml_output_value_to_malloc(value: usize, flags: usize, ptr: *mut *mut u8, len: *mut usize);
}
/// A struct to make sure we don't mix up fields in `HeapValueHeader` that
/// have the same type.
pub struct HeapValueHeaderFields {
pub buffer_size: usize,
pub uncompressed_size: usize,
pub is_serialized: bool,
pub is_evictable: bool,
}
impl From<HeapValueHeaderFields> for HeapValueHeader {
fn from(fields: HeapValueHeaderFields) -> Self {
HeapValueHeader::new(fields)
}
}
#[derive(Clone, Copy)]
pub struct HeapValueHeader(u64);
impl HeapValueHeader {
pub const RAW_SIZE: usize = std::mem::size_of::<u64>();
pub fn from_raw(raw: [u8; Self::RAW_SIZE]) -> Self {
HeapValueHeader(u64::from_le_bytes(raw))
}
pub fn to_raw(&self) -> [u8; Self::RAW_SIZE] {
self.0.to_le_bytes()
}
fn new(fields: HeapValueHeaderFields) -> Self {
let buffer_size: u32 = fields.buffer_size.try_into().unwrap();
let uncompressed_size: u32 = fields.uncompressed_size.try_into().unwrap();
// Make sure the MSB are 0. We only have 31 bits for the sizes as we need
// one additional bit for `is_serialized` and one bit to mark a value as
// evictable or not.
//
// Note that we can use the full 64-bits. This header never escapes into the
// OCaml world in bare form.
assert_eq!(buffer_size & (1 << 31), 0);
assert_eq!(uncompressed_size & (1 << 31), 0);
let mut result: u64 = 0;
result |= buffer_size as u64;
result |= (uncompressed_size as u64) << 31;
result |= (fields.is_serialized as u64) << 62;
result |= (fields.is_evictable as u64) << 63;
Self(result)
}
/// Size of the buffer attached to this value.
pub fn buffer_size(&self) -> usize {
(self.0 & ((1 << 31) - 1)) as usize
}
/// Size if the buffer were uncompressed.
pub fn uncompressed_size(&self) -> usize {
((self.0 >> 31) & ((1 << 31) - 1)) as usize
}
/// Was the buffer serialized, or does it contain a raw OCaml string?
pub fn is_serialized(&self) -> bool {
((self.0 >> 62) & 1) == 1
}
/// Was the buffer compressed?
pub fn is_compressed(&self) -> bool {
self.uncompressed_size() != self.buffer_size()
}
/// Is the value evictable?
pub fn is_evictable(&self) -> bool {
((self.0 >> 63) & 1) == 1
}
}
/// A value stored in shared-memory.
///
/// This is just a pointer to some buffer in shared-memory,
/// together with some metadata.
///
/// Note that it does not implement drop to deallocate the
/// underlying buffer. That would require tracking which
/// shard allocator was originally used to allocate the buffer,
/// as values can freely move between shards. The memory overhead
/// for this is prohibitively expensive.
pub struct HeapValue {
pub header: HeapValueHeader,
pub data: NonNull<u8>,
}
// Safety: The memory behind `data` is owned by this HeapValue, but
// we never write to that memory, only read.
//
// Most importantly, we never violate the the aliasing rule: we never
// create two mutable references to the same underlying data.
unsafe impl Send for HeapValue {}
unsafe impl Sync for HeapValue {}
impl HeapValue {
/// Convert the heap value into an OCaml object.
///
/// Safety: this allocates in the OCaml heap, and thus enters the runtime.
/// It may deallocate each and every object you haven't registered as a
/// root. It may even reallocate (i.e. move from the young generation to
/// the old) values *inside* registered nodes). There's no guarantee that
/// every object reachable from a root won't move!
pub unsafe fn to_ocaml_value(&self) -> usize {
if !self.header.is_serialized() {
caml_alloc_initialized_string(self.header.buffer_size(), self.data.as_ptr())
} else if !self.header.is_compressed() {
caml_input_value_from_block(self.data.as_ptr(), self.header.buffer_size())
} else {
let mut data: Vec<u8> = Vec::with_capacity(self.header.uncompressed_size());
let uncompressed_size = liblz4::LZ4_decompress_safe(
self.data.as_ptr() as *const libc::c_char,
data.as_mut_ptr() as *mut libc::c_char,
self.header.buffer_size().try_into().unwrap(),
self.header.uncompressed_size().try_into().unwrap(),
);
let uncompressed_size: usize = uncompressed_size.try_into().unwrap();
assert!(self.header.uncompressed_size() == uncompressed_size);
// SAFETY: `LZ4_decompress_safe` should have initialized
// `uncompressed_size` bytes; we assert above that
// `uncompressed_size` is equal to the capacity we set
data.set_len(uncompressed_size);
caml_input_value_from_block(data.as_ptr(), data.len())
}
}
pub fn as_slice(&self) -> &[u8] {
let len = self.header.buffer_size();
// Safety: We own the data. The return value cannot outlive `self`.
unsafe { std::slice::from_raw_parts(self.data.as_ptr(), len) }
}
}
impl CMapValue for HeapValue {
fn points_to_evictable_data(&self) -> bool {
self.header.is_evictable()
}
fn points_to_flushable_data(&self) -> bool {
false
}
fn ptr(&self) -> &NonNull<u8> {
&self.data
}
}
/// An OCaml serialized value, in all its forms.
///
/// Each `SerializedValue` is bound by a lifetime 'a because it might reference
/// a borrowed string (which may be on the OCaml heap, or it may be in
/// Rust-managed memory).
pub enum SerializedValue<'a> {
/// A plain uncompressed byte string. Stored in shm as-is (i.e., without
/// marshaling/serialization/compression).
BStr(&'a [u8]),
/// An OCaml serialized (marshaled) value, allocated on the heap via `malloc`.
Serialized(MallocBuf),
/// An OCaml serialized (marshaled) value, which was then compressed using lz4.
Compressed {
data: Vec<u8>,
uncompressed_size: usize,
},
}
/// A byte buffer allocated by `malloc`, via `caml_output_value_to_malloc`.
/// Essentially `Box<[u8]>`, but with a `Drop` impl that invokes `free`.
pub struct MallocBuf {
ptr: *const u8,
len: usize,
}
impl Drop for MallocBuf {
fn drop(&mut self) {
extern "C" {
fn free(data: *const u8);
}
unsafe { free(self.ptr) };
}
}
impl MallocBuf {
fn as_slice(&self) -> &[u8] {
unsafe { std::slice::from_raw_parts(self.ptr, self.len) }
}
}
impl<'a> From<ocamlrep::Value<'a>> for SerializedValue<'a> {
fn from(value: ocamlrep::Value<'a>) -> Self {
// We are entering the OCaml runtime, is there a risk
// that `value` (or other values) get deallocated?
// I don't think so: caml_output_value_to_malloc shouldn't
// allocate on the OCaml heap, and thus not trigger the GC.
if let Some(str) = value.as_byte_string() {
SerializedValue::BStr(str)
} else {
let mut ptr: *mut u8 = std::ptr::null_mut();
let mut len: usize = 0;
unsafe {
caml_output_value_to_malloc(
value.to_bits(),
ocamlrep::Value::int(0).to_bits(),
&mut ptr,
&mut len,
)
};
SerializedValue::Serialized(MallocBuf { ptr, len })
}
}
}
impl<'a> SerializedValue<'a> {
pub fn as_slice(&self) -> &[u8] {
use SerializedValue::*;
match self {
BStr(value) => value,
Serialized(buf) => buf.as_slice(),
Compressed { data, .. } => data,
}
}
pub fn maybe_compress(self) -> Self {
use SerializedValue::*;
match self {
this @ (BStr(..) | Compressed { .. }) => this,
Serialized(buf) => unsafe {
let uncompressed_size: i32 = buf.len.try_into().unwrap();
let max_compression_size = liblz4::LZ4_compressBound(uncompressed_size);
let mut compressed_data =
Vec::with_capacity(max_compression_size.try_into().unwrap());
let compressed_size = liblz4::LZ4_compress_default(
buf.ptr as *const libc::c_char,
compressed_data.as_mut_ptr() as *mut libc::c_char,
uncompressed_size,
max_compression_size,
);
if compressed_size == 0 || compressed_size >= uncompressed_size {
Serialized(buf)
} else {
// SAFETY: `LZ4_compress_default` should have initialized
// `compressed_size` bytes (which should be no more than
// `max_compression_size` bytes, which is our vec's
// capacity).
compressed_data.set_len(compressed_size.try_into().unwrap());
Compressed {
data: compressed_data,
uncompressed_size: buf.len,
}
}
},
}
}
pub fn make_header(&self, is_evictable: bool) -> HeapValueHeader {
let slice = self.as_slice();
use SerializedValue::*;
let header = match self {
BStr(..) => HeapValueHeaderFields {
buffer_size: slice.len(),
uncompressed_size: slice.len(),
is_serialized: false,
is_evictable,
},
Serialized { .. } => HeapValueHeaderFields {
buffer_size: slice.len(),
uncompressed_size: slice.len(),
is_serialized: true,
is_evictable,
},
Compressed {
uncompressed_size, ..
} => HeapValueHeaderFields {
buffer_size: slice.len(),
uncompressed_size: *uncompressed_size,
is_serialized: true,
is_evictable,
},
};
header.into()
}
pub fn to_heap_value_in(&self, is_evictable: bool, buffer: &mut [u8]) -> HeapValue {
let slice = self.as_slice();
buffer.copy_from_slice(slice);
HeapValue {
header: self.make_header(is_evictable),
data: NonNull::from(buffer).cast(),
}
}
pub fn uncompressed_size(&self) -> usize {
use SerializedValue::*;
match self {
BStr(data) => data.len(),
Serialized(buf) => buf.len,
&Compressed {
uncompressed_size, ..
} => uncompressed_size,
}
}
pub fn compressed_size(&self) -> usize {
use SerializedValue::*;
match self {
BStr(data) => data.len(),
Serialized(buf) => buf.len,
Compressed { data, .. } => data.len(),
}
}
}
#[cfg(test)]
mod tests {
use rand::prelude::*;
use super::*;
#[test]
fn test_heap_value_header() {
const NUM_TESTS: usize = 100;
let mut rng = StdRng::from_seed([0; 32]);
for _ in 0..NUM_TESTS {
let buffer_size = (rng.gen::<u32>() & ((1 << 31) - 1)) as usize;
let uncompressed_size = if rng.gen_bool(0.5) {
buffer_size
} else {
(rng.gen::<u32>() & ((1 << 31) - 1)) as usize
};
let is_serialized = rng.gen_bool(0.5);
let is_evictable = rng.gen_bool(0.5);
let header = HeapValueHeaderFields {
buffer_size,
uncompressed_size,
is_serialized,
is_evictable,
};
let header: HeapValueHeader = header.into();
assert_eq!(header.buffer_size(), buffer_size);
assert_eq!(header.uncompressed_size(), uncompressed_size);
assert_eq!(header.is_serialized(), is_serialized);
assert_eq!(header.is_compressed(), buffer_size != uncompressed_size);
assert_eq!(header.is_evictable(), is_evictable);
}
}
#[test]
fn test_to_heap_value() {
fn test_once(x: SerializedValue<'_>) {
let x = x.maybe_compress();
let mut buffer = vec![0_u8; x.as_slice().len()];
let heap_value = x.to_heap_value_in(true, &mut buffer);
assert!(heap_value.header.is_evictable());
assert_eq!(heap_value.as_slice(), x.as_slice());
let heap_value = x.to_heap_value_in(false, &mut buffer);
assert!(!heap_value.header.is_evictable());
assert_eq!(heap_value.as_slice(), x.as_slice());
}
fn malloc_buf(buf: &[u8]) -> MallocBuf {
extern "C" {
fn malloc(size: usize) -> *mut u8;
}
let ptr = unsafe { malloc(buf.len()) };
assert_ne!(ptr, std::ptr::null_mut());
let slice = unsafe { std::slice::from_raw_parts_mut(ptr, buf.len()) };
slice.copy_from_slice(buf);
MallocBuf {
ptr,
len: buf.len(),
}
}
test_once(SerializedValue::BStr(&[
0xdb, 0x7f, 0x13, 0xa6, 0xab, 0x0e, 0x51, 0x74, 0x2b,
]));
let buf = malloc_buf(&[0xdb, 0x7f, 0x13, 0xa6, 0xab, 0x0e, 0x51, 0x74, 0x2b]);
test_once(SerializedValue::Serialized(buf));
let buf = malloc_buf(&vec![95; 1024]);
test_once(SerializedValue::Serialized(buf));
}
} |
Rust | hhvm/hphp/hack/src/shmffi/shmffi.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#![feature(allocator_api)]
use std::alloc::Layout;
use std::convert::TryInto;
use std::io::Write;
use std::ptr::NonNull;
use ocaml_blob::HeapValue;
use ocaml_blob::HeapValueHeader;
use ocaml_blob::SerializedValue;
use ocamlrep::ptr::UnsafeOcamlPtr;
use ocamlrep::Allocator;
use ocamlrep::Value;
use ocamlrep_ocamlpool::catch_unwind;
use once_cell::sync::OnceCell;
use shmrs::chashmap::MINIMUM_EVICTABLE_BYTES_PER_SHARD;
use shmrs::chashmap::NUM_SHARDS;
use shmrs::segment::ShmemTableSegment;
use shmrs::segment::ShmemTableSegmentRef;
pub static SEGMENT: OnceCell<ShmemTableSegmentRef<'static, HeapValue>> = OnceCell::new();
pub fn with<R>(f: impl FnOnce(&ShmemTableSegmentRef<'static, HeapValue>) -> R) -> R {
f(SEGMENT.get().unwrap())
}
#[no_mangle]
pub extern "C" fn shmffi_init(
mmap_address: *mut libc::c_void,
file_size: libc::size_t,
max_evictable_bytes: libc::ssize_t,
) {
// The `max_evictable_bytes` argument to the `shmffi_init` function
// might be negative to indicate that evictability is disabled.
//
// We'll initialize the maps anyways, but with minimum-capacity allocators.
let max_evictable_bytes = std::cmp::max(
(NUM_SHARDS * MINIMUM_EVICTABLE_BYTES_PER_SHARD)
.try_into()
.unwrap(),
max_evictable_bytes,
) as libc::size_t;
catch_unwind(|| {
if SEGMENT
.set(
// Safety:
// - We are the only one initializing!
unsafe {
ShmemTableSegment::initialize(
mmap_address,
file_size,
max_evictable_bytes / NUM_SHARDS,
)
},
)
.is_err()
{
panic!("Unexpected prior value in SEGMENT");
}
0
});
}
#[no_mangle]
pub extern "C" fn shmffi_attach(mmap_address: *mut libc::c_void, file_size: libc::size_t) {
catch_unwind(|| {
if SEGMENT
.set(
// Safety:
// - Should be already initialized by the master process.
unsafe { ShmemTableSegment::attach(mmap_address, file_size) },
)
.is_err()
{
panic!("Unexpected prior value in SEGMENT");
}
0
});
}
#[no_mangle]
pub extern "C" fn shmffi_add(evictable: bool, hash: u64, data: usize) -> usize {
catch_unwind(|| {
let data = unsafe { Value::from_bits(data) };
let serialized = SerializedValue::from(data);
let compressed = serialized.maybe_compress();
let compressed_size = compressed.compressed_size();
let uncompressed_size = compressed.uncompressed_size();
let did_insert = with(|segment| {
segment.table.insert(
hash,
Some(Layout::from_size_align(compressed.as_slice().len(), 1).unwrap()),
evictable,
|buffer| compressed.to_heap_value_in(evictable, buffer),
)
});
// TODO(hverr): We don't have access to "total_size" (which includes
// alignment overhead), remove the third field.
let ret: (isize, isize, isize) = if did_insert {
(
compressed_size as isize,
uncompressed_size as isize,
compressed_size as isize,
)
} else {
(-1, -1, -1)
};
unsafe { ocamlrep_ocamlpool::to_ocaml(&ret) }
})
}
/// Writes the given data directly into the shared memory.
/// `data` should be an OCaml `bytes` object returned by
/// `shmffi_get_raw` or `shmffi_serialize_raw`.
#[no_mangle]
pub extern "C" fn shmffi_add_raw(hash: u64, ocaml_bytes: usize) -> usize {
catch_unwind(|| {
let ocaml_bytes = unsafe { Value::from_bits(ocaml_bytes) };
let ocaml_bytes = ocaml_bytes.as_byte_string().unwrap();
let header_size = HeapValueHeader::RAW_SIZE;
let header = HeapValueHeader::from_raw(ocaml_bytes[0..header_size].try_into().unwrap());
with(|segment| {
segment.table.insert(
hash,
Some(Layout::from_size_align(header.buffer_size(), 1).unwrap()),
header.is_evictable(),
|buffer| {
buffer.copy_from_slice(&ocaml_bytes[header_size..]);
HeapValue {
header,
data: NonNull::from(buffer).cast(),
}
},
);
});
// Returns unit
ocamlrep::Value::int(0).to_bits()
})
}
#[no_mangle]
pub extern "C" fn shmffi_get_and_deserialize(hash: u64) -> usize {
catch_unwind(|| {
with(|segment| {
let result = match segment.table.read(&hash).get() {
None => None,
Some(heap_value) => {
// Safety: we are not holding on to unrooted OCaml values.
//
// This value itself is unrooted, but we are not calling into
// the OCaml runtime after this. The option that will be allocated
// later is allocated via ocamlpool, which cannot trigger the GC.
let deserialized_value = unsafe { heap_value.to_ocaml_value() };
// Safety: the value is only used to wrap it in an option.
//
// Because we use ocamlpool below, the GC won't run while this
// value exists.
let deserialized_value = unsafe { UnsafeOcamlPtr::new(deserialized_value) };
Some(deserialized_value)
}
};
// Safety: we don't call into the OCaml runtime, so there's no
// risk of us GC'ing the deserialized value.
unsafe { ocamlrep_ocamlpool::to_ocaml(&result) }
})
})
}
/// Looks up the entry with the corresponding hash and return
/// its raw form as an OCaml `bytes option`, without deserializing.
/// The result is meant to be passed to `shmffi_deserialize_raw`
/// and `shmffi_add_raw`, potentially over the network.
#[no_mangle]
pub extern "C" fn shmffi_get_raw(hash: u64) -> usize {
catch_unwind(|| {
with(|segment| {
let result = segment.table.read(&hash).get().map(|heap_value| {
// Encode header and data
let header_size = HeapValueHeader::RAW_SIZE;
let value_slice = heap_value.as_slice();
let ocaml_bytes_len = header_size + value_slice.len();
// Safety: we assume that the OCaml runtime is suspended by a
// call into this function, and we're not calling into the OCaml
// runtime from any other thread. We are calling into the OCaml
// runtime here, but via ocamlpool, which cannot trigger a GC.
let pool = unsafe { ocamlrep_ocamlpool::Pool::new() };
let mut byte_string = pool.byte_string_with_len(ocaml_bytes_len);
byte_string.write_all(&heap_value.header.to_raw()).unwrap();
byte_string.write_all(value_slice).unwrap();
// Safety: Because we only interact with the runtime via
// ocamlpool below, the GC won't run while this value exists.
unsafe { UnsafeOcamlPtr::new(byte_string.build().to_bits()) }
});
// Safety: we don't call into the OCaml runtime, so there's no
// risk of us GC'ing the deserialized value.
unsafe { ocamlrep_ocamlpool::to_ocaml(&result) }
})
})
}
/// Takes an OCaml `bytes` object as returned by `shmffi_serialize_raw`
/// or `shmffi_get_raw`, and deserialize it back into an OCaml value.
#[no_mangle]
pub extern "C" fn shmffi_deserialize_raw(ocaml_bytes: usize) -> usize {
catch_unwind(|| {
// First we have to copy the OCaml buffer contents to a native buffer,
// because deserializing with to_ocaml_value can cause OCaml GC to activate
let ocaml_bytes = unsafe { Value::from_bits(ocaml_bytes) };
let bytes_copy = ocaml_bytes.as_byte_string().unwrap().to_vec();
unsafe {
// Construct a HeapValue so we can use to_ocaml_value
// Safety: bytes_copy will outlive `heap_value`
let header_size = HeapValueHeader::RAW_SIZE;
let header = HeapValueHeader::from_raw(bytes_copy[0..header_size].try_into().unwrap());
let heap_value = HeapValue {
header,
data: NonNull::from(&bytes_copy[header_size]),
};
// Deserialize into destination type
// Safety: We made sure to copy ocaml_bytes to a native buffer before deserializing
heap_value.to_ocaml_value()
}
})
}
/// Takes an OCaml value and serializes it into a form suitable
/// for sending over the network, and for usage with `shmffi_deserialize_raw`
/// and `shmffi_add_raw`. Returns an OCaml `bytes` object.
#[no_mangle]
pub extern "C" fn shmffi_serialize_raw(data: usize) -> usize {
catch_unwind(|| {
// Serialize and compress
let data = unsafe { Value::from_bits(data) };
let serialized = SerializedValue::from(data);
let compressed = serialized.maybe_compress();
let header = compressed.make_header(false /* evictable */);
// Encode header and data
let header_size = HeapValueHeader::RAW_SIZE;
let value_slice = compressed.as_slice();
let ocaml_bytes_len = header_size + value_slice.len();
// Safety: we assume that the OCaml runtime is suspended by a
// call into this function, and we're not calling into the OCaml
// runtime from any other thread. We are calling into the OCaml
// runtime here, but via ocamlpool, which cannot trigger a GC.
let pool = unsafe { ocamlrep_ocamlpool::Pool::new() };
let mut byte_string = pool.byte_string_with_len(ocaml_bytes_len);
byte_string.write_all(&header.to_raw()).unwrap();
byte_string.write_all(value_slice).unwrap();
byte_string.build().to_bits()
})
}
#[no_mangle]
pub extern "C" fn shmffi_mem(hash: u64) -> usize {
catch_unwind(|| {
let flag = with(|segment| segment.table.contains_key(&hash));
Value::int(flag as isize).to_bits()
})
}
#[no_mangle]
pub extern "C" fn shmffi_mem_status(hash: u64) -> usize {
let flag = with(|segment| segment.table.contains_key(&hash));
// From hh_shared.c: 1 = present, -1 = not present
let result = if flag { 1 } else { -1 };
Value::int(result).to_bits()
}
#[no_mangle]
pub extern "C" fn shmffi_get_size(hash: u64) -> usize {
let size = with(|segment| {
segment
.table
.read(&hash)
.get()
.map(|value| value.header.buffer_size())
});
let size = size.unwrap_or(0);
Value::int(size as isize).to_bits()
}
#[no_mangle]
pub extern "C" fn shmffi_move(hash1: u64, hash2: u64) {
with(|segment| {
let (header, data) = segment.table.inspect_and_remove(&hash1, |value| {
let value = value.unwrap();
(value.header, <Box<[u8]>>::from(value.as_slice()))
});
segment.table.insert(
hash2,
Some(Layout::from_size_align(data.len(), 1).unwrap()),
header.is_evictable(),
|buffer| {
buffer.copy_from_slice(&data);
HeapValue {
header,
data: std::ptr::NonNull::new(buffer.as_mut_ptr()).unwrap(),
}
},
);
});
}
#[no_mangle]
pub extern "C" fn shmffi_remove(hash: u64) -> usize {
let size = with(|segment| {
segment
.table
.inspect_and_remove(&hash, |value| value.unwrap().as_slice().len())
});
Value::int(size as isize).to_bits()
}
#[no_mangle]
pub extern "C" fn shmffi_allocated_bytes() -> usize {
catch_unwind(|| {
let bytes = with(|segment| segment.table.allocated_bytes());
Value::int(bytes as isize).to_bits()
})
}
#[no_mangle]
pub extern "C" fn shmffi_num_entries() -> usize {
catch_unwind(|| {
let num_entries = with(|segment| segment.table.len());
Value::int(num_entries as isize).to_bits()
})
} |
Rust | hhvm/hphp/hack/src/shmffi/shm_store.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#![feature(allocator_api)]
use std::alloc::Layout;
use std::borrow::Borrow;
use std::borrow::Cow;
use std::hash::Hash;
use std::io::Read;
use std::io::Write;
use std::num::NonZeroUsize;
use anyhow::Result;
use md5::Digest;
use ocamlrep::ptr::UnsafeOcamlPtr;
use ocamlrep::FromOcamlRep;
use ocamlrep::ToOcamlRep;
use parking_lot::Mutex;
use serde::de::DeserializeOwned;
use serde::Serialize;
/// A `datastore::Store` which writes its values to sharedmem (via the `shmffi`
/// crate) as bincode-serialized values. Can be configured to compress the
/// bincode blobs using `Compression`.
pub struct ShmStore<K, V> {
/// An LRU cache of hashconsed values in front of the serialized shm heap.
cache: Mutex<lru::LruCache<K, V>>,
evictable: bool,
compression: Compression,
prefix: &'static str,
}
#[derive(Debug, Copy, Clone)]
pub enum Evictability {
NonEvictable,
Evictable,
}
#[derive(Debug, Copy, Clone)]
pub enum Compression {
None,
Lz4 { compression_level: u32 },
Zstd { compression_level: i32 },
}
impl Default for Compression {
fn default() -> Self {
Self::Lz4 {
compression_level: 1,
}
}
}
impl<K, V> ShmStore<K, V>
where
K: Key + Copy + Hash + Eq + Send + Sync + 'static,
V: Clone + Serialize + DeserializeOwned + Send + Sync + 'static,
{
pub fn new(prefix: &'static str, evictability: Evictability, compression: Compression) -> Self {
Self {
cache: Mutex::new(lru::LruCache::new(NonZeroUsize::new(1000).unwrap())),
evictable: matches!(evictability, Evictability::Evictable),
compression,
prefix,
}
}
fn hash_key(&self, key: K) -> u64 {
let mut hasher = md5::Md5::new();
hasher.update(self.prefix);
key.hash_key(&mut hasher);
// hh_shared just takes the first 64 bits of the 128-bit MD5 digest.
u64::from_ne_bytes((&hasher.finalize()[0..8]).try_into().unwrap())
}
#[rustfmt::skip]
fn log_serialize(&self, size_in_shm: usize) {
let size_in_shm = size_in_shm as f64;
// shmrs doesn't actually allow us to count the total including
// header/padding/alignment. `shmffi` just reuses the `compressed`
// number for this stat, so do the same for now.
measure::sample((self.prefix, "total bytes including header and padding"), size_in_shm);
measure::sample(("ALL bytes", "total bytes including header and padding"), size_in_shm);
measure::sample((self.prefix, "bytes serialized into shared heap"), size_in_shm);
measure::sample("ALL bytes serialized into shared heap", size_in_shm);
}
#[rustfmt::skip]
fn log_deserialize(&self, compressed_size: usize) {
measure::sample((self.prefix, "bytes deserialized from shared heap"), compressed_size as f64);
measure::sample("ALL bytes deserialized from shared heap", compressed_size as f64);
}
#[rustfmt::skip]
fn log_shmem_hit_rate(&self, is_hit: bool) {
measure::sample((self.prefix, "shmem cache hit rate"), is_hit as u8 as f64);
measure::sample("ALL shmem cache hit rate", is_hit as u8 as f64);
}
#[rustfmt::skip]
fn log_cache_hit_rate(&self, is_hit: bool) {
measure::sample((self.prefix, "rust cache hit rate"), is_hit as u8 as f64);
measure::sample("ALL rust cache hit rate", is_hit as u8 as f64);
}
}
impl<K, V> datastore::Store<K, V> for ShmStore<K, V>
where
K: Key + Copy + Hash + Eq + Send + Sync + 'static,
V: Clone + Serialize + DeserializeOwned + Send + Sync + 'static,
{
fn contains_key(&self, key: K) -> Result<bool> {
if self.cache.lock().contains(&key) {
return Ok(true);
}
Ok(shmffi::with(|segment| {
segment.table.contains_key(&self.hash_key(key))
}))
}
fn get(&self, key: K) -> Result<Option<V>> {
let cache_val_opt = self.cache.lock().get(&key).cloned();
self.log_cache_hit_rate(cache_val_opt.is_some());
if cache_val_opt.is_some() {
return Ok(cache_val_opt);
}
let hash = self.hash_key(key);
let val_opt: Option<V> = shmffi::with(|segment| {
segment
.table
.read(&hash)
.get()
.map(|heap_value| {
let bytes = heap_value.as_slice();
self.log_deserialize(bytes.len());
match self.compression {
Compression::None => deserialize(bytes),
Compression::Lz4 { .. } => lz4_decompress_and_deserialize(bytes),
Compression::Zstd { .. } => zstd_decompress_and_deserialize(bytes),
}
})
.transpose()
})?;
if let Some(val) = &val_opt {
self.cache.lock().put(key, val.clone());
}
self.log_shmem_hit_rate(val_opt.is_some());
Ok(val_opt)
}
fn insert(&self, key: K, val: V) -> Result<()> {
let blob = match self.compression {
Compression::None => serialize(&val)?,
Compression::Lz4 { compression_level } => {
serialize_and_lz4_compress(&val, compression_level)?
}
Compression::Zstd { compression_level } => {
serialize_and_zstd_compress(&val, compression_level)?
}
};
self.cache.lock().put(key, val);
let compressed_size = blob.len();
let blob = ocaml_blob::SerializedValue::BStr(&blob);
let did_insert = shmffi::with(|segment| {
segment.table.insert(
self.hash_key(key),
Some(Layout::from_size_align(blob.as_slice().len(), 1).unwrap()),
self.evictable,
|buffer| blob.to_heap_value_in(self.evictable, buffer),
)
});
if did_insert {
self.log_serialize(compressed_size);
}
Ok(())
}
fn move_batch(&self, keys: &mut dyn Iterator<Item = (K, K)>) -> Result<()> {
let mut cache = self.cache.lock();
for (old_key, new_key) in keys {
let old_hash = self.hash_key(old_key);
let new_hash = self.hash_key(new_key);
shmffi::with(|segment| {
let (header, data) = segment.table.inspect_and_remove(&old_hash, |value| {
let value = value.unwrap();
(value.header, <Box<[u8]>>::from(value.as_slice()))
});
cache.pop(&old_key);
segment.table.insert(
new_hash,
Some(Layout::from_size_align(data.len(), 1).unwrap()),
header.is_evictable(),
|buffer| {
buffer.copy_from_slice(&data);
ocaml_blob::HeapValue {
header,
data: std::ptr::NonNull::new(buffer.as_mut_ptr()).unwrap(),
}
},
);
// We choose not to `cache.put(new_key, ...)` here.
});
}
Ok(())
}
fn remove_batch(&self, keys: &mut dyn Iterator<Item = K>) -> Result<()> {
let mut cache = self.cache.lock();
for key in keys {
cache.pop(&key);
let hash = self.hash_key(key);
let contains = shmffi::with(|segment| segment.table.contains_key(&hash));
if !contains {
continue;
}
let _size = shmffi::with(|segment| {
segment
.table
.inspect_and_remove(&hash, |value| value.unwrap().as_slice().len())
});
}
Ok(())
}
}
fn serialize<T: Serialize>(val: &T) -> Result<Vec<u8>> {
let mut serialized = Vec::new();
bincode::serialize_into(&mut serialized, &intern::WithIntern(val))?;
Ok(serialized)
}
fn deserialize<T: DeserializeOwned>(serialized: &[u8]) -> Result<T> {
Ok(intern::WithIntern::strip(bincode::deserialize(serialized))?)
}
fn serialize_and_lz4_compress<T: Serialize>(val: &T, level: u32) -> Result<Vec<u8>> {
let encoder = lz4::EncoderBuilder::new().level(level).build(vec![])?;
let mut w = std::io::BufWriter::new(encoder);
bincode::serialize_into(&mut w, &intern::WithIntern(val))?;
w.flush()?;
let encoder = w.into_inner().expect("into_inner returned Err after flush");
let (compressed, result) = encoder.finish();
result?;
Ok(compressed)
}
fn lz4_decompress_and_deserialize<R: Read, T: DeserializeOwned>(r: R) -> Result<T> {
let r = lz4::Decoder::new(r)?;
let mut r = std::io::BufReader::new(r);
Ok(intern::WithIntern::strip(bincode::deserialize_from(
&mut r,
))?)
}
fn serialize_and_zstd_compress<T: Serialize>(val: &T, level: i32) -> Result<Vec<u8>> {
let mut compressed = vec![];
let w = zstd::Encoder::new(&mut compressed, level)?.auto_finish();
let mut w = std::io::BufWriter::new(w);
bincode::serialize_into(&mut w, &intern::WithIntern(val))?;
drop(w);
Ok(compressed)
}
fn zstd_decompress_and_deserialize<R: Read, T: DeserializeOwned>(r: R) -> Result<T> {
let r = zstd::Decoder::new(r)?;
let mut r = std::io::BufReader::new(r);
Ok(intern::WithIntern::strip(bincode::deserialize_from(
&mut r,
))?)
}
impl<K, V> std::fmt::Debug for ShmStore<K, V> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ShmStore").finish()
}
}
/// There seems to be a problem with using the impl of `Hash` for interned
/// symbols: since they're only 32-bit IDs, hashes based on them tend to
/// collide, which our shmrs library cannot tolerate. Instead, we use this
/// custom hashing trait and hash the entire string representation of the
/// symbol. We might want to revisit this later and see whether there's a way to
/// hash these less expensively.
///
/// If an implementor of this trait also implements `Borrow<[u8]>`, its impl of
/// `hash_key` must behave the same as `impl Key for [u8]` (which just invokes
/// `state.write(self)`).
pub trait Key {
fn hash_key<H: Digest>(&self, state: &mut H);
}
impl Key for [u8] {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update(self);
}
}
impl Key for pos::TypeName {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update(self.as_bytes());
}
}
impl Key for pos::ModuleName {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update(self.as_bytes());
}
}
impl Key for pos::FunName {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update(self.as_bytes());
}
}
impl Key for pos::ConstName {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update(self.as_bytes());
}
}
impl<T: AsRef<str>> Key for (pos::TypeName, T) {
fn hash_key<H: Digest>(&self, state: &mut H) {
let type_name: &str = self.0.as_ref();
state.update(type_name);
let member_name: &str = self.1.as_ref();
state.update(member_name);
}
}
impl Key for pos::RelativePath {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update([self.prefix() as u8]);
state.update(self.suffix());
}
}
impl Key for hh24_types::ToplevelSymbolHash {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update(self.as_u64().to_ne_bytes())
}
}
impl Key for hh24_types::ToplevelCanonSymbolHash {
fn hash_key<H: Digest>(&self, state: &mut H) {
state.update(self.as_u64().to_ne_bytes())
}
}
extern "C" {
fn hh_log_level() -> ocamlrep::Value<'static>;
}
fn shm_log_level() -> isize {
// SAFETY: We rely on sharedmem having been initialized here.
unsafe { hh_log_level() }.as_int().unwrap()
}
/// A `datastore::Store` which writes its values to sharedmem (via the `shmffi`
/// crate) as OCaml-marshaled values. Can be configured to compress the
/// marshaled blobs using `Compression`.
pub struct OcamlShmStore<K, V> {
/// An LRU cache of hashconsed values in front of the serialized shm heap.
cache: Mutex<lru::LruCache<K, V>>,
evictable: bool,
compression: Compression,
prefix: &'static str,
}
impl<K, V> OcamlShmStore<K, V>
where
K: Key + Copy + Hash + Eq + Send + Sync + 'static,
V: Clone + Send + Sync + 'static,
{
pub fn new(prefix: &'static str, evictability: Evictability, compression: Compression) -> Self {
Self {
cache: Mutex::new(lru::LruCache::new(NonZeroUsize::new(1000).unwrap())),
evictable: matches!(evictability, Evictability::Evictable),
compression,
prefix,
}
}
/// Fetch the value corresponding to the given key and deserialize it
/// directly onto the OCaml heap.
///
/// # Safety
///
/// Must be invoked on the main thread. Calls into the OCaml runtime and may
/// trigger a GC, so no unrooted OCaml values may exist. The returned
/// `UnsafeOcamlPtr` is unrooted and could be invalidated if the GC is
/// triggered after this method returns.
pub unsafe fn get_ocaml(&self, key: K) -> Option<UnsafeOcamlPtr> {
self.get_ocaml_by_hash(self.hash_key(&key))
}
/// Fetch the value corresponding to the given key (when the key type `K`
/// can be represented with a byte string, and implements `Borrow<[u8]>`)
/// and deserialize it directly onto the OCaml heap.
///
/// # Safety
///
/// Must be invoked on the main thread. Calls into the OCaml runtime and may
/// trigger a GC, so no unrooted OCaml values may exist. The returned
/// `UnsafeOcamlPtr` is unrooted and could be invalidated if the GC is
/// triggered after this method returns.
pub unsafe fn get_ocaml_by_byte_string(&self, key: &[u8]) -> Option<UnsafeOcamlPtr>
where
K: Borrow<[u8]>,
{
self.get_ocaml_by_hash(self.hash_key(key))
}
unsafe fn get_ocaml_by_hash(&self, hash: u64) -> Option<UnsafeOcamlPtr> {
extern "C" {
fn caml_input_value_from_block(data: *const u8, size: usize) -> UnsafeOcamlPtr;
}
let bytes_opt = shmffi::with(|segment| {
segment.table.read(&hash).get().map(|heap_value| {
self.decompress(heap_value.as_slice(), heap_value.header.uncompressed_size())
.unwrap()
.into_owned()
})
});
let v = bytes_opt.map(|bytes| caml_input_value_from_block(bytes.as_ptr(), bytes.len()));
self.log_shmem_hit_rate(v.is_some());
v
}
fn decompress<'a>(&self, bytes: &'a [u8], uncompressed_size: usize) -> Result<Cow<'a, [u8]>> {
self.log_deserialize(bytes.len());
Ok(match self.compression {
Compression::None => Cow::Borrowed(bytes),
Compression::Lz4 { .. } => Cow::Owned(lz4_decompress(bytes, uncompressed_size)?),
Compression::Zstd { .. } => Cow::Owned(zstd_decompress(bytes)?),
})
}
fn hash_key<Q: ?Sized + Key>(&self, key: &Q) -> u64
where
K: Borrow<Q>,
{
let mut hasher = md5::Md5::new();
hasher.update(self.prefix);
key.hash_key(&mut hasher);
// hh_shared just takes the first 64 bits of the 128-bit MD5 digest.
u64::from_ne_bytes((&hasher.finalize()[0..8]).try_into().unwrap())
}
#[rustfmt::skip]
fn log_serialize(&self, compressed: usize, original: usize) {
if shm_log_level() < 1 {
return;
}
let compressed = compressed as f64;
let original = original as f64;
let saved = original - compressed;
let ratio = compressed / original;
// shmrs doesn't actually allow us to count the total including
// header/padding/alignment. `shmffi` just reuses the `compressed`
// number for this stat, so do the same for now.
measure::sample((self.prefix, "total bytes including header and padding"), compressed);
measure::sample(("ALL bytes", "total bytes including header and padding"), compressed);
measure::sample((self.prefix, "bytes serialized into shared heap"), compressed);
measure::sample("ALL bytes serialized into shared heap", compressed);
measure::sample((self.prefix, "bytes saved in shared heap due to compression"), saved);
measure::sample("ALL bytes saved in shared heap due to compression", saved);
measure::sample((self.prefix, "shared heap compression ratio"), ratio);
measure::sample("ALL bytes shared heap compression ratio", ratio);
}
#[rustfmt::skip]
fn log_deserialize(&self, compressed_size: usize) {
if shm_log_level() < 1 {
return;
}
measure::sample((self.prefix, "bytes deserialized from shared heap"), compressed_size as f64);
measure::sample("ALL bytes deserialized from shared heap", compressed_size as f64);
}
#[rustfmt::skip]
fn log_shmem_hit_rate(&self, is_hit: bool) {
if shm_log_level() < 1 {
return;
}
measure::sample((self.prefix, "shmem cache hit rate"), is_hit as u8 as f64);
measure::sample("ALL shmem cache hit rate", is_hit as u8 as f64);
}
#[rustfmt::skip]
fn log_cache_hit_rate(&self, is_hit: bool) {
if shm_log_level() < 1 {
return;
}
measure::sample((self.prefix, "rust cache hit rate"), is_hit as u8 as f64);
measure::sample("ALL rust cache hit rate", is_hit as u8 as f64);
}
}
impl<K, V> datastore::Store<K, V> for OcamlShmStore<K, V>
where
K: Key + Copy + Hash + Eq + Send + Sync + 'static,
V: ToOcamlRep + FromOcamlRep + Clone + Send + Sync + 'static,
{
fn contains_key(&self, key: K) -> Result<bool> {
if self.cache.lock().contains(&key) {
return Ok(true);
}
Ok(shmffi::with(|segment| {
segment.table.contains_key(&self.hash_key(&key))
}))
}
fn get(&self, key: K) -> Result<Option<V>> {
let cache_val_opt = self.cache.lock().get(&key).cloned();
self.log_cache_hit_rate(cache_val_opt.is_some());
if cache_val_opt.is_some() {
return Ok(cache_val_opt);
}
let bytes_opt = shmffi::with(|segment| {
segment
.table
.read(&self.hash_key(&key))
.get()
.map(|heap_value| -> Result<_> {
Ok(self
.decompress(heap_value.as_slice(), heap_value.header.uncompressed_size())?
.into_owned())
})
.transpose()
})?;
let val_opt: Option<V> = bytes_opt
.map(|bytes| -> Result<_> {
let arena = ocamlrep::Arena::new();
let value = unsafe { ocamlrep_marshal::input_value(&bytes, &arena) };
Ok(V::from_ocamlrep(value)?)
})
.transpose()?;
if let Some(val) = &val_opt {
self.cache.lock().put(key, val.clone());
}
self.log_shmem_hit_rate(val_opt.is_some());
Ok(val_opt)
}
fn insert(&self, key: K, val: V) -> Result<()> {
let arena = ocamlrep::Arena::new();
let ocaml_val = arena.add_root(&val);
let mut bytes = std::io::Cursor::new(Vec::with_capacity(4096));
ocamlrep_marshal::output_value(
&mut bytes,
ocaml_val,
ocamlrep_marshal::ExternFlags::empty(),
)?;
let bytes = bytes.into_inner();
let uncompressed_size = bytes.len();
let bytes = match self.compression {
Compression::None => bytes,
Compression::Lz4 { .. } => lz4_compress(&bytes)?,
Compression::Zstd { compression_level } => zstd_compress(&bytes, compression_level)?,
};
let compressed_size = bytes.len();
self.cache.lock().put(key, val);
let did_insert = shmffi::with(|segment| {
segment.table.insert(
self.hash_key(&key),
Some(Layout::from_size_align(bytes.len(), 1).unwrap()),
self.evictable,
|buffer| {
buffer.copy_from_slice(&bytes);
let header = ocaml_blob::HeapValueHeaderFields {
buffer_size: bytes.len(),
uncompressed_size,
is_serialized: true,
is_evictable: self.evictable,
};
ocaml_blob::HeapValue {
header: header.into(),
data: std::ptr::NonNull::from(buffer).cast(),
}
},
)
});
if did_insert {
self.log_serialize(compressed_size, uncompressed_size);
}
Ok(())
}
fn move_batch(&self, keys: &mut dyn Iterator<Item = (K, K)>) -> Result<()> {
let mut cache = self.cache.lock();
for (old_key, new_key) in keys {
let old_hash = self.hash_key(&old_key);
let new_hash = self.hash_key(&new_key);
shmffi::with(|segment| {
let (header, data) = segment.table.inspect_and_remove(&old_hash, |value| {
let value = value.unwrap();
(value.header, <Box<[u8]>>::from(value.as_slice()))
});
cache.pop(&old_key);
segment.table.insert(
new_hash,
Some(Layout::from_size_align(data.len(), 1).unwrap()),
header.is_evictable(),
|buffer| {
buffer.copy_from_slice(&data);
ocaml_blob::HeapValue {
header,
data: std::ptr::NonNull::new(buffer.as_mut_ptr()).unwrap(),
}
},
);
// We choose not to `cache.put(new_key, ...)` here.
});
}
Ok(())
}
fn remove_batch(&self, keys: &mut dyn Iterator<Item = K>) -> Result<()> {
let mut cache = self.cache.lock();
for key in keys {
cache.pop(&key);
let hash = self.hash_key(&key);
let contains = shmffi::with(|segment| segment.table.contains_key(&hash));
if !contains {
continue;
}
let _size = shmffi::with(|segment| {
segment
.table
.inspect_and_remove(&hash, |value| value.unwrap().as_slice().len())
});
}
Ok(())
}
}
fn lz4_compress(bytes: &[u8]) -> Result<Vec<u8>> {
Ok(lz4::block::compress(bytes, None, false)?)
}
fn lz4_decompress(compressed: &[u8], uncompressed_size: usize) -> Result<Vec<u8>> {
Ok(lz4::block::decompress(
compressed,
Some(uncompressed_size.try_into().unwrap()),
)?)
}
fn zstd_compress(mut bytes: &[u8], level: i32) -> Result<Vec<u8>> {
let mut compressed = vec![];
zstd::stream::copy_encode(&mut bytes, &mut compressed, level)?;
Ok(compressed)
}
fn zstd_decompress(mut compressed: &[u8]) -> Result<Vec<u8>> {
let mut decompressed = vec![];
zstd::stream::copy_decode(&mut compressed, &mut decompressed)?;
Ok(decompressed)
}
impl<K, V> std::fmt::Debug for OcamlShmStore<K, V> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("OcamlShmStore").finish()
}
} |
TOML | hhvm/hphp/hack/src/shmffi/cargo/ocaml_blob/Cargo.toml | # @generated by autocargo
[package]
name = "ocaml_blob"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../ocaml_blob.rs"
crate-type = ["lib", "staticlib"]
[dependencies]
libc = "0.2.139"
lz4 = "1.24.0"
ocamlrep = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" }
shmrs = { version = "0.0.0", path = "../../../shmrs" }
[dev-dependencies]
rand = { version = "0.8", features = ["small_rng"] } |
TOML | hhvm/hphp/hack/src/shmffi/cargo/shmffi/Cargo.toml | # @generated by autocargo
[package]
name = "shmffi"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../shmffi.rs"
test = false
doctest = false
crate-type = ["lib", "staticlib"]
[dependencies]
libc = "0.2.139"
ocaml_blob = { version = "0.0.0", path = "../ocaml_blob" }
ocamlrep = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" }
ocamlrep_ocamlpool = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" }
once_cell = "1.12"
shmrs = { version = "0.0.0", path = "../../../shmrs" } |
TOML | hhvm/hphp/hack/src/shmffi/cargo/shm_store/Cargo.toml | # @generated by autocargo
[package]
name = "shm_store"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../shm_store.rs"
test = false
doctest = false
crate-type = ["lib", "staticlib"]
[dependencies]
anyhow = "1.0.71"
bincode = "1.3.3"
datastore = { version = "0.0.0", path = "../../../hackrs/datastore" }
hh24_types = { version = "0.0.0", path = "../../../utils/hh24_types" }
intern = { version = "0.1.0", path = "../../../utils/intern" }
lru = "0.10.0"
lz4 = "1.24.0"
md-5 = "0.10"
measure = { version = "0.0.0", path = "../../../utils/rust/measure" }
ocaml_blob = { version = "0.0.0", path = "../ocaml_blob" }
ocamlrep = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" }
ocamlrep_marshal = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" }
parking_lot = { version = "0.12.1", features = ["send_guard"] }
pos = { version = "0.0.0", path = "../../../hackrs/pos/cargo/pos" }
serde = { version = "1.0.176", features = ["derive", "rc"] }
shmffi = { version = "0.0.0", path = "../shmffi" }
zstd = { version = "0.11.2+zstd.1.5.2", features = ["experimental", "zstdmt"] } |
TOML | hhvm/hphp/hack/src/shmrs/Cargo.toml | # @generated by autocargo
[package]
name = "shmrs"
version = "0.0.0"
edition = "2021"
[lib]
path = "shmrs.rs"
[dependencies]
hashbrown = { version = "0.12.3", features = ["nightly", "raw", "serde"] }
libc = "0.2.139"
nohash-hasher = "0.2"
static_assertions = "1.1.0"
[dev-dependencies]
nix = "0.25"
rand = { version = "0.8", features = ["small_rng"] } |
Rust | hhvm/hphp/hack/src/shmrs/chashmap.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::alloc::AllocError;
use std::alloc::Allocator;
use std::alloc::Layout;
use std::hash::BuildHasher;
use std::hash::Hash;
use std::hash::Hasher;
use std::mem::MaybeUninit;
use std::ptr::NonNull;
use hashbrown::hash_map::DefaultHashBuilder;
use crate::filealloc::FileAlloc;
use crate::hashmap::Map;
use crate::shardalloc::ShardAlloc;
use crate::shardalloc::ShardAllocControlData;
use crate::shardalloc::SHARD_ALLOC_MIN_CHUNK_SIZE;
use crate::sync::RwLock;
use crate::sync::RwLockReadGuard;
use crate::sync::RwLockRef;
use crate::sync::RwLockWriteGuard;
/// Timeout for acquiring shard locks.
///
/// We'd like to not hang forever if another worker craches (or is killed)
/// while holding the lock (e.g. because of an OOM kill)
pub const LOCK_TIMEOUT: Option<std::time::Duration> = Some(std::time::Duration::new(60, 0));
/// The number of shards.
///
/// DashMap uses (nproc * 4) rounded up to the next power of two. Let's do the
/// same under the assumption of 40 processors.
///
/// Must be a power of 2, as we use `trailing_zeros` for bitshifting.
pub const NUM_SHARDS: usize = 256;
static_assertions::const_assert!(NUM_SHARDS.is_power_of_two());
/// The non-evictable allocator itself allocates regions of memory in chunks.
const NON_EVICTABLE_CHUNK_SIZE: usize = 1024 * 1024;
/// Minimum evictable bytes for an evictable shard.
///
/// The program will panic if a `CMap` is initialized with less evictable
/// bytes per shard.
///
/// See also `CMap::initialize_with_hasher`
pub const MINIMUM_EVICTABLE_BYTES_PER_SHARD: usize = SHARD_ALLOC_MIN_CHUNK_SIZE;
/// This struct gives access to a shard, including its hashmap and its
/// allocators.
struct Shard<'shm, 'a, K, V, S> {
map: RwLockWriteGuard<'a, Map<'shm, K, V, S>>,
alloc_non_evictable: &'a ShardAlloc<'shm>,
alloc_evictable: &'a ShardAlloc<'shm>,
}
/// Each value stored in a concurrent hashmap needs to keep track of
/// some bookkeeping and the concurrent hashmap needs to be able to
/// access that bookkeeping.
///
/// We force the bookkeeping on the value type, because the value type
/// can optimize representation.
pub trait CMapValue {
/// A hash map contains both references to evictable and non-evictable data.
///
/// When we've removed evictable data from the evictable heaps, we also have
/// to remove any value that might reference that data. This function tells us
/// whether or not the value points to evictable data, and thus whether or not
/// it should be evicted.
fn points_to_evictable_data(&self) -> bool;
/// An evictable heap can contain data which, in addition to being evictable
/// upon memory pressure, can be removed via invalidation. We call this type
/// of data "flushable". This function tells us whether a value in the hash map
/// points to flushable data in the evitcable heap, so that they can be removed.
fn points_to_flushable_data(&self) -> bool;
/// A hash map value holds a pointer to its corresponding data in the heap.
/// This function returns that pointer.
fn ptr(&self) -> &NonNull<u8>;
}
/// Represents a lookup for a particular key.
///
/// Holds a read lock on the shard for that key and can perform the lookup in
/// that shard for the value associated with the key. If `.get()` returns
/// `None`, the map does not contain the key.
///
/// Makes sure the underlying locks are released once the value goes out of
/// scope.
pub struct CMapValueReader<'shm, 'a, K, V, S> {
shard: RwLockReadGuard<'a, Map<'shm, K, V, S>>,
key: &'a K,
}
impl<'shm, 'a, K, V, S> CMapValueReader<'shm, 'a, K, V, S>
where
K: std::cmp::Eq + std::hash::Hash,
S: std::hash::BuildHasher,
{
pub fn get(&self) -> Option<&V> {
self.shard.get(self.key)
}
}
/// A concurrent hash map implemented as multiple sharded non-concurrent
/// hash maps.
///
/// This is the struct as laid out in memory. As such, it should live
/// in shared memory.
///
/// Use `initialize` or `attach` to get an interface into the map.
///
/// ## Invariants
/// There are important invariants about this data structure. Failing to
/// uphold these invariants might crash the program.
///
/// 1. Each sharded hashmap has two sharded allocators: one for
/// non-evictable items, and one for evictable items. Putting
/// non-evictable items in the evictable shard (and vice versa)
/// is an invariant violation.
/// 2. Each sharded hashmap can only contain pointers to values in
/// its own sharded allocators. Pointing to a value in a different
/// shard allocator is an invariant violation.
pub struct CMap<'shm, K, V, S = DefaultHashBuilder> {
hash_builder: S,
max_evictable_bytes_per_shard: usize,
file_alloc: &'shm FileAlloc,
shard_allocs_non_evictable: [RwLock<ShardAllocControlData>; NUM_SHARDS],
shard_allocs_evictable: [RwLock<ShardAllocControlData>; NUM_SHARDS],
maps: [RwLock<Map<'shm, K, V, S>>; NUM_SHARDS],
}
/// A reference to a concurrent hash map.
///
/// This struct is merely a reference to the shared memory data. As such,
/// it is process-local.
///
/// Obtained by calling `initialize` or `attach` on `CMap`.
pub struct CMapRef<'shm, K, V, S = DefaultHashBuilder> {
hash_builder: S,
pub max_evictable_bytes_per_shard: usize,
file_alloc: &'shm FileAlloc,
shard_allocs_non_evictable: Vec<ShardAlloc<'shm>>,
shard_allocs_evictable: Vec<ShardAlloc<'shm>>,
maps: Vec<RwLockRef<'shm, Map<'shm, K, V, S>>>,
}
impl<'shm, K, V> CMap<'shm, K, V, DefaultHashBuilder> {
/// Initialize a new concurrent hash map at the given location.
///
/// See `initialize_with_hasher`
pub unsafe fn initialize(
cmap: &'shm mut MaybeUninit<Self>,
file_alloc: &'shm FileAlloc,
max_evictable_bytes_per_shard: usize,
) -> CMapRef<'shm, K, V, DefaultHashBuilder> {
Self::initialize_with_hasher(
cmap,
DefaultHashBuilder::new(),
file_alloc,
max_evictable_bytes_per_shard,
)
}
}
impl<'shm, K, V, S: Clone> CMap<'shm, K, V, S> {
/// Initialize a new concurrent hash map at the given location.
///
/// Safety:
/// - You must only initialize once and exactly once.
/// - Use `attach` to attach other processes to this memory location.
/// - The hash builder must not contain pointers to process-local memory.
/// - Don't mutate or read the shared memory segment outside this API!
///
/// Panics:
/// - If `file_size` is not large enough.
/// - If `max_evictable_bytes_per_shard` is less than
/// `MINIMUM_EVICTABLE_BYTES_PER_SHARD`
pub unsafe fn initialize_with_hasher(
cmap: &'shm mut MaybeUninit<Self>,
hash_builder: S,
file_alloc: &'shm FileAlloc,
max_evictable_bytes_per_shard: usize,
) -> CMapRef<'shm, K, V, S> {
// Initialize the memory properly.
//
// See MaybeUninit docs for examples.
let mut shard_allocs_non_evictable: [MaybeUninit<RwLock<ShardAllocControlData>>;
NUM_SHARDS] = MaybeUninit::uninit().assume_init();
for shard_alloc in &mut shard_allocs_non_evictable[..] {
*shard_alloc = MaybeUninit::new(RwLock::new(ShardAllocControlData::new()));
}
let mut shard_allocs_evictable: [MaybeUninit<RwLock<ShardAllocControlData>>; NUM_SHARDS] =
MaybeUninit::uninit().assume_init();
for shard_alloc in &mut shard_allocs_evictable[..] {
*shard_alloc = MaybeUninit::new(RwLock::new(ShardAllocControlData::new()));
}
let mut maps: [MaybeUninit<RwLock<Map<'shm, K, V, S>>>; NUM_SHARDS] =
MaybeUninit::uninit().assume_init();
for map in &mut maps[..] {
*map = MaybeUninit::new(RwLock::new(Map::new()));
}
cmap.as_mut_ptr().write(CMap {
hash_builder,
max_evictable_bytes_per_shard,
file_alloc,
shard_allocs_non_evictable: MaybeUninit::array_assume_init(shard_allocs_non_evictable),
shard_allocs_evictable: MaybeUninit::array_assume_init(shard_allocs_evictable),
maps: MaybeUninit::array_assume_init(maps),
});
let cmap = cmap.assume_init_mut();
// Initialize map locks.
let maps: Vec<RwLockRef<'shm, _>> = cmap
.maps
.iter_mut()
.map(|r| r.initialize().unwrap())
.collect();
// Initialize shard allocator locks.
let mut shard_allocs_non_evictable: Vec<ShardAlloc<'shm>> =
Vec::with_capacity(cmap.shard_allocs_non_evictable.len());
for lock in &mut cmap.shard_allocs_non_evictable {
shard_allocs_non_evictable.push(ShardAlloc::new(
lock.initialize().unwrap(),
cmap.file_alloc,
NON_EVICTABLE_CHUNK_SIZE,
false,
));
}
let mut shard_allocs_evictable: Vec<ShardAlloc<'shm>> =
Vec::with_capacity(cmap.shard_allocs_evictable.len());
for lock in &mut cmap.shard_allocs_evictable {
shard_allocs_evictable.push(ShardAlloc::new(
lock.initialize().unwrap(),
cmap.file_alloc,
max_evictable_bytes_per_shard,
true,
));
}
// Initialize maps themselves.
for map in maps.iter() {
map.write(LOCK_TIMEOUT)
.unwrap()
.reset_with_hasher(cmap.file_alloc, cmap.hash_builder.clone());
}
CMapRef {
hash_builder: cmap.hash_builder.clone(),
max_evictable_bytes_per_shard: cmap.max_evictable_bytes_per_shard,
file_alloc: cmap.file_alloc,
shard_allocs_non_evictable,
shard_allocs_evictable,
maps,
}
}
/// Attach to an already initialized concurrent hash map.
///
/// Safety:
/// - The map at this pointer must already be initialized by a different
/// process (or by the calling process itself).
pub unsafe fn attach(cmap: &'shm MaybeUninit<Self>) -> CMapRef<'shm, K, V, S> {
// Safety: already initialized!
let cmap = cmap.assume_init_ref();
// Attach to the map locks.
let maps: Vec<RwLockRef<'shm, _>> = cmap.maps.iter().map(|r| r.attach()).collect();
// Attach shard allocators.
let mut shard_allocs_non_evictable: Vec<ShardAlloc<'shm>> =
Vec::with_capacity(cmap.shard_allocs_non_evictable.len());
for lock in &cmap.shard_allocs_non_evictable {
shard_allocs_non_evictable.push(ShardAlloc::new(
lock.attach(),
cmap.file_alloc,
NON_EVICTABLE_CHUNK_SIZE,
false,
));
}
let mut shard_allocs_evictable: Vec<ShardAlloc<'shm>> =
Vec::with_capacity(cmap.shard_allocs_evictable.len());
for lock in &cmap.shard_allocs_evictable {
shard_allocs_evictable.push(ShardAlloc::new(
lock.attach(),
cmap.file_alloc,
cmap.max_evictable_bytes_per_shard,
true,
));
}
CMapRef {
hash_builder: cmap.hash_builder.clone(),
max_evictable_bytes_per_shard: cmap.max_evictable_bytes_per_shard,
file_alloc: cmap.file_alloc,
shard_allocs_non_evictable,
shard_allocs_evictable,
maps,
}
}
}
impl<'shm, K: Hash + Eq, V: CMapValue, S: BuildHasher> CMapRef<'shm, K, V, S> {
fn shard_index_for(&self, key: &K) -> usize {
if NUM_SHARDS == 1 {
return 0;
}
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
let hash = hasher.finish();
// The higher bits are also used by hashbrown's HashMap.
// This is a cheap mixer to get some entropy for shard selection.
let hash: u64 = hash.wrapping_mul(0x9e3779b97f4a7c15);
(hash >> (64 - NUM_SHARDS.trailing_zeros())) as usize
}
fn shard_for_writing<'a>(&'a self, key: &K) -> Shard<'shm, 'a, K, V, S> {
let shard_index = self.shard_index_for(key);
let map = self.maps[shard_index].write(LOCK_TIMEOUT).unwrap();
let alloc_non_evictable = &self.shard_allocs_non_evictable[shard_index];
let alloc_evictable = &self.shard_allocs_evictable[shard_index];
Shard {
map,
alloc_non_evictable,
alloc_evictable,
}
}
fn shard_for_reading<'a>(&'a self, key: &K) -> RwLockReadGuard<'a, Map<'shm, K, V, S>> {
let shard_index = self.shard_index_for(key);
self.maps[shard_index].read(LOCK_TIMEOUT).unwrap()
}
/// Remove from the index the ones that don't satisfy the predicate
/// and compact everything remaining.
#[allow(unused)]
fn filter_and_compact<'a, P: FnMut(&mut V) -> bool>(
shard: &mut Shard<'shm, 'a, K, V, S>,
mut f: P,
) {
let entries_to_invalidate = shard.map.drain_filter(|_, value| !f(value));
entries_to_invalidate.for_each(|(_, value)| {
let data = value.ptr();
shard.alloc_evictable.mark_as_unreachable(data)
});
shard.alloc_evictable.compact()
}
/// Empty a shard.
fn empty_shard<'a>(shard: &mut Shard<'shm, 'a, K, V, S>) {
// Remove all values that might point to evictable data.
shard
.map
.retain(|_, value| !value.points_to_evictable_data());
// Safety: We've just removed all pointers to values in the allocator
// on the previous line.
unsafe {
shard.alloc_evictable.reset();
}
}
/// Insert a value into the map.
///
/// If a layout is specified, this function will first allocate suitable
/// memory and pass it on to the `value` producer. If no layout is
/// specified, a reference to an empty byte slice will be used.
///
/// If `evictable` is true, the function might choose to not allocate
/// memory, in which case the `value` producer will not be called. The
/// return type indicates whether or not a value is inserted into the map.
///
/// Note that calling `points_to_evictable_data` on the value produced must
/// match `evictable`.
pub fn insert(
&self,
key: K,
layout: Option<Layout>,
evictable: bool,
value: impl FnOnce(&mut [u8]) -> V,
) -> bool {
let empty_slice: &mut [u8] = &mut [];
let mut shard = self.shard_for_writing(&key);
let ptr_opt = match layout {
None => Some(NonNull::new(empty_slice as *mut [u8]).unwrap()),
Some(layout) => {
if evictable
&& layout.align() + layout.size() > self.max_evictable_bytes_per_shard / 2
{
// Requested memory is too large, do not allocate
None
} else if evictable {
match shard.alloc_evictable.allocate(layout) {
Ok(ptr) => Some(ptr),
Err(AllocError) => {
// The allocator is full, empty the shard and try again.
// This time allocation MUST succeed.
Self::empty_shard(&mut shard);
Some(shard.alloc_evictable.allocate(layout).unwrap())
}
}
} else {
Some(shard.alloc_non_evictable.allocate(layout).unwrap())
}
}
};
if let Some(mut ptr) = ptr_opt {
// Safety: we are the only ones with access to the allocated chunk
let buffer = unsafe { ptr.as_mut() };
let v = value(buffer);
assert!(v.points_to_evictable_data() == evictable);
shard.map.insert(key, v);
return true;
}
false
}
/// Acquire a read lock on the shard which may contain the value associated
/// with the given key. If the map contains a value for this key,
/// `.get()` will return `Some`.
pub fn read<'a>(&'a self, key: &'a K) -> CMapValueReader<'shm, '_, K, V, S> {
let shard = self.shard_for_reading(key);
CMapValueReader { shard, key }
}
/// Inspect a value, then remove it from the map.
///
/// Ideally, we'd return the removed value directly, however, we can't
/// return the value, because it might contain invalid references. What
/// we can do is pass a reference to a closure, which limits the lifetime
/// of that value.
pub fn inspect_and_remove<R>(&self, key: &K, inspect: impl FnOnce(Option<&V>) -> R) -> R {
let mut shard_lock = self.shard_for_writing(key);
let value = shard_lock.map.remove(key);
// This is quite unsafe. We must make sure we hold the lock as long
// as the value is in use, because the value might point to data in
// the heap, which might get evicted if an other writer is active!
//
// As such, I am dropping the `value` and `shard_lock`(in that order!)
// manually as a coding hint.
let res = inspect(value.as_ref());
drop(value);
drop(shard_lock);
// DO NOT USE value HERE! The lock has been released.
res
}
/// Check if the map contains a value for a key.
pub fn contains_key(&self, key: &K) -> bool {
let shard = self.shard_for_reading(key);
shard.contains_key(key)
}
/// Return the total number of bytes allocated.
///
/// Note that this might include bytes that were later free'd, as we
/// (currently) don't free memory to the OS.
pub fn allocated_bytes(&self) -> usize {
self.file_alloc.allocated_bytes()
}
/// Return the number of total entries in the hash map.
///
/// Will loop over each shard.
pub fn len(&self) -> usize {
self.maps
.iter()
.map(|map| map.read(LOCK_TIMEOUT).unwrap().len())
.sum()
}
/// Return true if the hashmap is empty.
/// Will loop over each shard.
pub fn is_empty(&self) -> bool {
self.maps
.iter()
.all(|map| map.read(LOCK_TIMEOUT).unwrap().is_empty())
}
}
#[cfg(test)]
mod integration_tests {
use std::collections::HashMap;
use std::collections::HashSet;
use std::time::Duration;
use nix::sys::wait::WaitStatus;
use nix::unistd::ForkResult;
use rand::prelude::*;
use super::*;
struct U64Value(u64);
impl CMapValue for U64Value {
fn points_to_evictable_data(&self) -> bool {
false
}
fn points_to_flushable_data(&self) -> bool {
false
}
fn ptr(&self) -> &NonNull<u8> {
// Since none of the existing unit tests below actually write to
// memory, we should not invoke this function to attempt accessing it
panic!("This method should not be invoked!")
}
}
#[test]
fn test_insert_many() {
const NUM_PROCS: usize = 20;
const NUM_INSERTS_PER_PROC: usize = 1000;
const OP_SLEEP: Duration = Duration::from_micros(10);
const MEM_HEAP_SIZE: usize = 100 * 1024 * 1024;
struct Segment<'shm> {
file_alloc: MaybeUninit<FileAlloc>,
table: MaybeUninit<CMap<'shm, u64, U64Value>>,
}
let mut rng = StdRng::from_seed([0; 32]);
let scenarios: Vec<Vec<(u64, u64)>> = std::iter::repeat_with(|| {
std::iter::repeat_with(|| (rng.next_u64() % 10_000, rng.next_u64() % 10_000))
.take(NUM_INSERTS_PER_PROC)
.collect()
})
.take(NUM_PROCS)
.collect();
let mmap_ptr = unsafe {
libc::mmap(
std::ptr::null_mut(),
MEM_HEAP_SIZE,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED | libc::MAP_ANONYMOUS,
-1,
0,
)
};
assert_ne!(mmap_ptr, libc::MAP_FAILED);
let layout = std::alloc::Layout::new::<Segment<'_>>();
assert_eq!(mmap_ptr.align_offset(layout.align()), 0);
let segment = mmap_ptr as *mut MaybeUninit<Segment<'static>>;
let cmap = unsafe {
let segment = &mut *segment;
segment.write(Segment {
file_alloc: MaybeUninit::uninit(),
table: MaybeUninit::uninit(),
});
let segment = segment.assume_init_mut();
segment
.file_alloc
.write(FileAlloc::new(mmap_ptr, MEM_HEAP_SIZE, layout.size()));
let file_alloc = segment.file_alloc.assume_init_mut();
CMap::initialize(&mut segment.table, file_alloc, 128)
};
let mut child_procs = vec![];
for scenario in &scenarios {
match unsafe { nix::unistd::fork() }.unwrap() {
ForkResult::Parent { child } => {
child_procs.push(child);
}
ForkResult::Child => {
// Exercise attach as well.
let cmap: CMapRef<'static, u64, U64Value> = unsafe {
let segment = mmap_ptr as *const MaybeUninit<Segment<'static>>;
let segment = &*segment;
let segment = segment.assume_init_ref();
CMap::attach(&segment.table)
};
for &(key, value) in scenario.iter() {
cmap.insert(key, None, false, |_buffer| {
std::thread::sleep(OP_SLEEP);
U64Value(value)
});
}
std::process::exit(0)
}
}
}
for pid in child_procs {
match nix::sys::wait::waitpid(pid, None).unwrap() {
WaitStatus::Exited(_, status) => assert_eq!(status, 0),
status => panic!("unexpected status for pid {:?}: {:?}", pid, status),
}
}
let mut expected: HashMap<u64, HashSet<u64>> = HashMap::new();
for scenario in scenarios {
for (key, value) in scenario {
expected.entry(key).or_default().insert(value);
}
}
for (key, values) in expected {
let value = cmap.read(&key).get().unwrap().0;
assert!(values.contains(&value));
}
assert_eq!(unsafe { libc::munmap(mmap_ptr, MEM_HEAP_SIZE) }, 0);
}
} |
Rust | hhvm/hphp/hack/src/shmrs/error.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
/// A system error.
#[derive(Debug, Copy, Clone)]
pub struct Errno {
pub errno: i64,
}
impl Errno {
#[cfg(not(target_os = "macos"))]
fn errno() -> libc::c_int {
// Safety: Reads the errno global.
//
// Note that we don't call strerror as it is not thread-safe.
unsafe { *libc::__errno_location() }
}
#[cfg(target_os = "macos")]
fn errno() -> libc::c_int {
// Safety: Reads the errno global.
//
// Note that we don't call strerror as it is not thread-safe.
unsafe { *libc::__error() }
}
/// Get the global errno info by reading in errno and calling strerror.
pub fn get_global() -> Self {
Self {
errno: Self::errno() as i64,
}
}
/// Check that the given errno number is zero, otherwise, return
/// an `Errno` as a result.
pub fn from(errno: libc::c_int) -> Result<(), Self> {
if errno == 0 {
Ok(())
} else {
Err(Errno {
errno: errno as i64,
})
}
}
/// Read and return the global errno value if the given condition holds.
pub fn if_(cond: bool) -> Result<(), Self> {
if !cond {
Ok(())
} else {
Err(Self::get_global())
}
}
} |
Rust | hhvm/hphp/hack/src/shmrs/filealloc.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::alloc::AllocError;
use std::alloc::Allocator;
use std::alloc::Layout;
use std::ptr::NonNull;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
/// An allocator that can allocate chunks from a file.
pub struct FileAlloc {
file_start: usize,
file_size: usize,
next_free_byte: AtomicUsize,
}
impl FileAlloc {
/// Create a new file allocator.
///
/// - `file_start` points to the beginning of the file
/// - `file_size` is the total file size
/// - `next_free_byte` is a locked `u64` indicating the next free byte
pub fn new(file_start: *mut libc::c_void, file_size: usize, next_free_byte: usize) -> Self {
Self {
file_start: file_start as usize,
file_size,
next_free_byte: AtomicUsize::new(next_free_byte),
}
}
/// Return the total number of allocated bytes.
pub fn allocated_bytes(&self) -> usize {
self.next_free_byte.load(Ordering::Relaxed)
}
}
unsafe impl Allocator for FileAlloc {
fn allocate(&self, l: Layout) -> Result<NonNull<[u8]>, AllocError> {
// A read-cmpxchg loop. Try to move the next-free-byte pointer but
// check for out-of-memory errors. Retry if an other allocation has
// succeeded in the meantime.
loop {
// We are only dealing with access to one atomic. There's no
// other loads/stores that need to be synchronized. As such, we
// can use relaxed ordering throughout.
let next_free_byte = self.next_free_byte.load(Ordering::Relaxed);
let ptr: *mut u8 = self.file_start as *mut u8;
let ptr = unsafe { ptr.add(next_free_byte) };
let align_offset = ptr.align_offset(l.align());
let total_bytes = match align_offset.checked_add(l.size()) {
None => return Err(AllocError),
Some(total_bytes) => total_bytes,
};
if total_bytes > self.file_size - next_free_byte {
return Err(AllocError);
}
let new_next_free_byte = next_free_byte + total_bytes;
match self.next_free_byte.compare_exchange(
next_free_byte,
new_next_free_byte,
Ordering::Relaxed,
Ordering::Relaxed,
) {
Ok(_) => {
let ptr = unsafe { ptr.add(align_offset) };
let slice = unsafe { std::slice::from_raw_parts(ptr, l.size()) };
return Ok(NonNull::from(slice));
}
Err(_) => {
// Try again!
}
}
}
}
unsafe fn deallocate(&self, _ptr: NonNull<u8>, _layout: Layout) {
// Doesn't do anything
}
} |
Rust | hhvm/hphp/hack/src/shmrs/hashmap.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::ops::Deref;
use std::ops::DerefMut;
use hashbrown::hash_map::DefaultHashBuilder;
use hashbrown::HashMap;
use crate::filealloc::FileAlloc;
/// A hash map that lives in shared memory.
///
/// This is a wrapper around hashbrown's `HashMap`. The hash map lives
/// and allocates in shared memory.
pub struct Map<'shm, K, V, S = DefaultHashBuilder>(Option<HashMap<K, V, S, &'shm FileAlloc>>);
impl<'shm, K, V, S> Deref for Map<'shm, K, V, S> {
type Target = HashMap<K, V, S, &'shm FileAlloc>;
fn deref(&self) -> &Self::Target {
self.0.as_ref().unwrap()
}
}
impl<'shm, K, V, S> DerefMut for Map<'shm, K, V, S> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.as_mut().unwrap()
}
}
impl<'shm, K, V> Map<'shm, K, V, DefaultHashBuilder> {
/// Re-allocate the hash map.
///
/// See `reset_with_hasher`
pub fn reset(&mut self, alloc: &'shm FileAlloc) {
self.0 = Some(HashMap::new_in(alloc));
}
}
impl<'shm, K, V, S> Map<'shm, K, V, S> {
/// Return a map layout with an uninitialized map and allocator.
///
/// Call `reset` to actually allocate the map.
pub fn new() -> Self {
Self(None)
}
/// Re-allocate the hash map.
///
/// Uses the given file allocator to allocate the hashmap's table.
pub fn reset_with_hasher(&mut self, alloc: &'shm FileAlloc, hash_builder: S) {
// ??? apparently if we construct HashMap without capacity, then it won't
// allocate any space for its table via alloc, but subsequent access
// will assume that something was allocated. By giving it a capacity, we
// avoid this.
let map = HashMap::with_capacity_and_hasher_in(1, hash_builder, alloc);
self.0 = Some(map);
}
}
#[cfg(test)]
mod integration_tests {
use std::collections::HashMap;
use std::collections::HashSet;
use std::mem::MaybeUninit;
use std::time::Duration;
use nix::sys::wait::WaitStatus;
use nix::unistd::ForkResult;
use rand::prelude::*;
use super::*;
use crate::filealloc::FileAlloc;
use crate::sync::RwLock;
struct InsertMany {
file_alloc: FileAlloc,
map: RwLock<Map<'static, u64, u64>>,
}
#[test]
fn test_insert_many() {
const NUM_PROCS: usize = 20;
const NUM_INSERTS_PER_PROC: usize = 1000;
const OP_SLEEP: Duration = Duration::from_micros(10);
const MEM_HEAP_SIZE: usize = 10 * 1024 * 1024;
let mut rng = StdRng::from_seed([0; 32]);
let scenarios: Vec<Vec<(u64, u64)>> = std::iter::repeat_with(|| {
std::iter::repeat_with(|| (rng.next_u64() % 1000, rng.next_u64() % 1000))
.take(NUM_INSERTS_PER_PROC)
.collect()
})
.take(NUM_PROCS)
.collect();
let mmap_ptr = unsafe {
libc::mmap(
std::ptr::null_mut(),
MEM_HEAP_SIZE,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED | libc::MAP_ANONYMOUS,
-1,
0,
)
};
assert_ne!(mmap_ptr, libc::MAP_FAILED);
let inserter_ptr: *mut MaybeUninit<InsertMany> = mmap_ptr as *mut _;
let inserter: &'static mut MaybeUninit<InsertMany> =
// Safety:
// - Pointer is not null
// - Pointer is aligned on a page
// - This is the only reference to the data, and the lifteim is
// static as we don't unmap the memory
unsafe { &mut *inserter_ptr };
// Safety: Initialize the memory properly
let inserter = unsafe {
inserter.as_mut_ptr().write(InsertMany {
file_alloc: FileAlloc::new(
mmap_ptr,
MEM_HEAP_SIZE,
std::mem::size_of::<InsertMany>(),
),
map: RwLock::new(Map::new()),
});
inserter.assume_init_mut()
};
// Safety: We are the only ones to attach to this lock.
let map = unsafe { inserter.map.initialize() }.unwrap();
map.write(None).unwrap().reset(&inserter.file_alloc);
let mut child_procs = vec![];
for scenario in &scenarios {
match unsafe { nix::unistd::fork() }.unwrap() {
ForkResult::Parent { child } => {
child_procs.push(child);
}
ForkResult::Child => {
for &(key, value) in scenario.iter() {
let mut guard = map.write(None).unwrap();
guard.insert(key, value);
std::thread::sleep(OP_SLEEP);
// Make sure we sleep while holding the lock.
drop(guard);
}
std::process::exit(0)
}
}
}
for pid in child_procs {
match nix::sys::wait::waitpid(pid, None).unwrap() {
WaitStatus::Exited(_, status) => assert_eq!(status, 0),
status => panic!("unexpected status for pid {:?}: {:?}", pid, status),
}
}
let guard = map.read(None).unwrap();
let mut expected: HashMap<u64, HashSet<u64>> = HashMap::new();
for scenario in scenarios {
for (key, value) in scenario {
expected.entry(key).or_default().insert(value);
}
}
for (key, values) in expected {
let value = guard[&key];
assert!(values.contains(&value));
}
// Must drop! Otherwise, munmap will already have unmapped the lock!
drop(guard);
assert_eq!(unsafe { libc::munmap(mmap_ptr, MEM_HEAP_SIZE) }, 0);
}
} |
Rust | hhvm/hphp/hack/src/shmrs/segment.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::hash::BuildHasherDefault;
use std::mem::MaybeUninit;
use nohash_hasher::NoHashHasher;
use crate::chashmap::CMap;
use crate::chashmap::CMapRef;
use crate::filealloc::FileAlloc;
/// The u64s are the first 8 bytes of a shasum. As such, they should have
/// enough entropy to be used directly as keys into the hashmap.
///
/// Note that the underlying hash table implementation (hashbrown) requires
/// entropy in both the upper and the lower bits of this u64. If this
/// assumption ever changes, performance will suffer.
type HashBuilder = BuildHasherDefault<NoHashHasher<u64>>;
/// Shared memory segment that is used to store the sharded hashtable.
///
/// The shared memory pointer that we obtain from `mmap` is directly cast
/// to this struct. Its fields are all `MaybeUninit` because we can't
/// initialize them all at once.
pub struct ShmemTableSegment<'shm, T> {
file_alloc: MaybeUninit<FileAlloc>,
table: MaybeUninit<CMap<'shm, u64, T, HashBuilder>>,
}
/// A reference to an attached shared memory segment.
///
/// This struct is merely a reference to the shared memory segment and
/// the data it contains. As such, it is process-local.
///
/// Obtained by calling `initialize` or `attach` on `ShmemTableSegment`.
pub struct ShmemTableSegmentRef<'shm, T> {
pub table: CMapRef<'shm, u64, T, HashBuilder>,
}
impl<'shm, T> ShmemTableSegment<'shm, T> {
/// Initialize a shared memory segment, by setting up the file allocator
/// and the hash tables.
///
/// Space requirements:
/// - There will be NUM_SHARDS "evictable" shards; each will, upon first write, allocate max_evictable_bytes_per_shard
/// which never subsequently grows: items will be allocated within it.
/// Also each shard will have a hashmap, about the size of (K,V) per entry with some overhead,
/// which gets allocated with small capacity at the start and grows with count of items.
/// - There will be NUM_SHARDS "non-evitable" shards; each will, upon first write,
/// allocate NON_EVICTABLE_CHUNK_SIZE bytes (currently 1024k) and each time this chunk is filled then it will allocate more.
/// Additionally there's another hashmap, as above.
/// - And there's a fixed overhead of sizeof(ShmemTableSegment) at the start
/// - file_size must be large enough to fit all of these, else you'll get panics!
///
/// Safety:
/// - You must only intialize once and exactly once.
/// - Use `attach` to attach other processes to this memory location.
/// - Obviously, `file_start` and `file_size` shouldn't lie.
/// - Make sure the lifetime returned matches the lifetime of the shared
/// memory pointer.
/// - Don't mutate or read the shared memory segment outside this API!
///
/// Panics:
/// - If `files_size` is not large enough.
pub unsafe fn initialize(
file_start: *mut libc::c_void,
file_size: usize,
max_evictable_bytes_per_shard: usize,
) -> ShmemTableSegmentRef<'shm, T> {
let (self_ptr, next_free_byte) =
Self::maybe_unint_ptr_and_next_free_byte(file_start, file_size);
// Safety: Doing this cast assumes:
// - The lifetime matches.
// - We are the sole users of the underlying memory.
let segment: &'shm mut MaybeUninit<Self> = &mut *self_ptr;
segment.write(ShmemTableSegment {
file_alloc: MaybeUninit::uninit(),
table: MaybeUninit::uninit(),
});
let segment = segment.assume_init_mut();
segment
.file_alloc
.write(FileAlloc::new(file_start, file_size, next_free_byte));
let file_alloc = segment.file_alloc.assume_init_mut();
let table = CMap::initialize_with_hasher(
&mut segment.table,
BuildHasherDefault::default(),
file_alloc,
max_evictable_bytes_per_shard,
);
ShmemTableSegmentRef { table }
}
/// Attach to an already initialized shared memory segment.
///
/// Safety:
/// - The segment at this pointer must already be initialized by a
/// different process (or by the calling process itself).
pub unsafe fn attach(
file_start: *mut libc::c_void,
file_size: usize,
) -> ShmemTableSegmentRef<'shm, T> {
let (self_ptr, _) = Self::maybe_unint_ptr_and_next_free_byte(file_start, file_size);
let segment: &'shm MaybeUninit<Self> = &*self_ptr;
let segment = segment.assume_init_ref();
let table = CMap::attach(&segment.table);
ShmemTableSegmentRef { table }
}
unsafe fn maybe_unint_ptr_and_next_free_byte(
file_start: *mut libc::c_void,
file_size: usize,
) -> (*mut MaybeUninit<Self>, usize) {
let layout = std::alloc::Layout::new::<Self>();
let file_start = file_start as *mut u8;
let align_offset = file_start.align_offset(layout.align());
let total_size = align_offset + layout.size();
assert!(file_size >= total_size);
let ptr = file_start.add(align_offset);
let ptr = ptr as *mut MaybeUninit<Self>;
(ptr, total_size)
}
} |
Rust | hhvm/hphp/hack/src/shmrs/shardalloc.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::alloc::AllocError;
use std::alloc::Allocator;
use std::alloc::Layout;
use std::ptr::NonNull;
use crate::filealloc::FileAlloc;
use crate::sync::RwLockRef;
/// A pointer to a chunk.
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, PartialOrd)]
struct ChunkPtr(*mut u8);
impl ChunkPtr {
fn null() -> Self {
ChunkPtr(std::ptr::null_mut())
}
fn is_null(self) -> bool {
self.0.is_null()
}
fn is_aligned(self) -> bool {
self.0.align_offset(std::mem::align_of::<ChunkPtr>()) == 0
}
fn next_chunk_ptr(self) -> *mut ChunkPtr {
assert!(!self.is_null());
assert!(self.is_aligned());
// Yes, you read that right, a pointer to a pointer!
let chunk_next_ptr: *mut ChunkPtr = self.0 as _;
chunk_next_ptr
}
fn get_next_chunk(self) -> ChunkPtr {
let chunk_next_ptr = self.next_chunk_ptr();
// Safety: ptr is (1) valid (2) aligned (3) it points to a initialized value
unsafe { chunk_next_ptr.read() }
}
fn set_next_chunk(self, value: ChunkPtr) {
let chunk_next_ptr = self.next_chunk_ptr();
// Safety: ptr is (1) valid (2) aligned
unsafe { chunk_next_ptr.write(value) };
}
}
/// 32-bit header storing information about the slice of data succeeding it.
#[derive(Debug)]
struct AllocHeader(u32);
const REACHABLE_MASK: u32 = 1 << 31;
impl AllocHeader {
fn new(data_size: usize) -> Self {
let mut header = 0;
// We use the first bit of the header to determine whether the data
// held in the next `data_size` bytes is reachable
header |= REACHABLE_MASK;
// Ensure that the size of data can fit within 31 bits
assert!(data_size as u32 <= (u32::MAX >> 1));
header |= data_size as u32;
Self(header)
}
fn mark_as_unreachable(&mut self) {
self.0 &= !REACHABLE_MASK;
}
#[allow(unused)]
fn reachable(&self) -> bool {
self.0 & REACHABLE_MASK != 0
}
}
/// Structure that contains the control data for a shard allocator.
///
/// This structure should be allocated in shared memory. Turn it
/// into an actual allocator by combining it with a `FileAlloc` using
/// `ShardAlloc::new`.
pub struct ShardAllocControlData {
/// A linked-list of filled chunks. Might be null.
///
/// The first word is aligned and points to the next element of the
/// linked list.
filled_chunks: ChunkPtr,
/// A linked-list of free chunks. Might be null.
///
/// The first word is aligned and points to the next element of the
/// linked list.
free_chunks: ChunkPtr,
/// Pointer to the first byte of the current chunk.
///
/// Note that the first word of the chunk is reserved for metadata
/// (i.e. a pointer that can be set if the chunk is added to the
/// filled or free chunks list).
current_start: ChunkPtr,
/// Pointer to the next free byte in the current chunk.
///
/// Might be null if no current chunk has been initialized yet.
current_next: *mut u8,
/// End of the current chunk. Do not allocate past this pointer.
current_end: *mut u8,
}
/**
* Safety:
* - The methods of ShardAllocControlData below all mutate the direct fields
* by taking &mut self, so there is no concurrent writes to the fields themselves
* - ChunkPtr is a bookkeeping struct, and we perform all mutations to the inner
linked list pointers via methods taking &mut self, so again we are
protected by the upper level rwlock.
* - current_next and current_end are simply raw pointer types for bookkeeping
* and are not dereferenced directly in concurrent context
*/
unsafe impl Sync for ShardAllocControlData {}
unsafe impl Send for ShardAllocControlData {}
impl ShardAllocControlData {
/// A new empty allocator. Useful as a placeholder.
pub fn new() -> Self {
Self {
filled_chunks: ChunkPtr::null(),
free_chunks: ChunkPtr::null(),
current_start: ChunkPtr::null(),
current_next: std::ptr::null_mut(),
current_end: std::ptr::null_mut(),
}
}
}
impl ShardAllocControlData {
/// Mark the current chunk as filled by adding it to the "filled chunks"
/// list.
fn mark_current_chunk_as_filled(&mut self) {
if self.current_start.is_null() {
return;
}
self.current_start.set_next_chunk(self.filled_chunks);
self.filled_chunks = self.current_start;
self.current_start = ChunkPtr::null();
self.current_next = std::ptr::null_mut();
self.current_end = std::ptr::null_mut();
}
/// Mark the currently filled chunks as free!
fn mark_filled_chunks_as_free(&mut self) {
// Find the last "filled chunk"
let mut last_filled = ChunkPtr::null();
let mut this_filled = self.filled_chunks;
while !this_filled.is_null() {
last_filled = this_filled;
this_filled = this_filled.get_next_chunk();
}
if last_filled.is_null() {
// Nothing to move
return;
}
// Update its next pointer.
last_filled.set_next_chunk(self.free_chunks);
self.free_chunks = self.filled_chunks;
self.filled_chunks = ChunkPtr::null();
}
fn set_current_chunk(&mut self, chunk_start: ChunkPtr, chunk_size: usize) {
chunk_start.set_next_chunk(ChunkPtr::null());
self.current_start = chunk_start;
self.current_next = unsafe { chunk_start.0.add(std::mem::size_of::<*mut u8>()) };
self.current_end = unsafe { chunk_start.0.add(chunk_size) };
}
/// Pop a free chunk of the free list. Update the current-chunk pointers.
///
/// Returns true on success, false if no free chunk was available.
fn pop_free_chunk(&mut self, chunk_size: usize) -> bool {
if self.free_chunks.is_null() {
return false;
}
let current_chunk = self.free_chunks;
self.free_chunks = current_chunk.get_next_chunk();
self.set_current_chunk(current_chunk, chunk_size);
true
}
/// Attempt to allocate a slice within the current chunk, and move `current_next`
/// by the newly allocated slice's size and alignment offset.
///
/// If the new slice is too large to fit within the current chunk, we return None,
/// signaling to the call site that an additional chunk needs to be allocated
/// to accommodate this slice.
///
/// Otherwise, return Some(ptr) with `ptr` pointing to the start of the
/// successfully allocated slice.
fn alloc_slice_within_chunk(&mut self, l: Layout) -> Option<NonNull<[u8]>> {
let size = l.size();
let header = AllocHeader::new(size);
let header_align = std::mem::align_of::<AllocHeader>();
let header_size = std::mem::size_of::<AllocHeader>();
// We must align to both the AllocHeader and provided layout, so that we can
// safely `ptr::write` the header at the address of `pointer` below.
let align_offset = self
.current_next
.align_offset(std::cmp::max(l.align(), header_align));
let mut pointer = unsafe { self.current_next.add(align_offset) };
let new_current = unsafe { pointer.add(header_size).add(size) };
if new_current > self.current_end {
return None;
}
debug_assert!(!new_current.is_null());
debug_assert_eq!(pointer.align_offset(std::mem::align_of::<AllocHeader>()), 0);
self.current_next = new_current;
// Write header into memory
unsafe { std::ptr::write(pointer as *mut AllocHeader, header) };
// Move pointer by header_size
pointer = unsafe { pointer.add(header_size) };
let slice = unsafe { std::slice::from_raw_parts(pointer, size) };
Some(NonNull::from(slice))
}
}
/// The minimum chunk size an allocator can be initialized with.
///
/// `ShardAlloc::new` will panic if given a smaller `chunk_size`.
pub const SHARD_ALLOC_MIN_CHUNK_SIZE: usize = 64;
/// An allocator used for shared memory hash maps.
///
/// For now, each shard allocator is a bumping allocator that requests chunks from
/// the underlying file allocator.
///
/// Since its control structures lives somewhere in shared memory, it's bound
/// by a lifetime parameter that represents the lifetime of the shared memory
/// region.
#[derive(Clone)]
pub struct ShardAlloc<'shm> {
/// Mutable control data.
control_data: RwLockRef<'shm, ShardAllocControlData>,
/// Underlying file allocator used to request new chunks and allocate
/// large chunks.
file_alloc: &'shm FileAlloc,
/// Is the allocator of a fixed pre-allocated size? If this flag is true
/// the allocator will refuse to allocate stuff once it's first chunk is
/// full.
is_fixed_size: bool,
/// The chunk size that the allocator will use to allocate new chunks.
chunk_size: usize,
}
impl<'shm> ShardAlloc<'shm> {
/// Create a new shard allocator using the given lock-protected control
/// data and a file allocator.
///
/// This function will fail if `chunk_size` < `SHARD_ALLOC_MIN_CHUNK_SIZE`
/// bytes. As some of the first bytes of a chunk are used as a header.
pub unsafe fn new(
control_data: RwLockRef<'shm, ShardAllocControlData>,
file_alloc: &'shm FileAlloc,
chunk_size: usize,
is_fixed_size: bool,
) -> Self {
assert!(chunk_size >= SHARD_ALLOC_MIN_CHUNK_SIZE);
Self {
control_data,
file_alloc,
is_fixed_size,
chunk_size,
}
}
fn alloc_large(&self, l: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.file_alloc.allocate(l)
}
/// Mark the current chunk as filled. Then pop one of the chunks that have
/// previously been marked as free to use as the new current chunk for writing.
/// If there are no free chunks, ask `FileAlloc` to allocate a new chunk.
fn alloc_chunk(&self, control_data: &mut ShardAllocControlData) -> Result<(), AllocError> {
control_data.mark_current_chunk_as_filled();
if !control_data.pop_free_chunk(self.chunk_size) {
let l =
Layout::from_size_align(self.chunk_size, std::mem::align_of::<ChunkPtr>()).unwrap();
let ptr = self.file_alloc.allocate(l)?;
control_data.set_current_chunk(ChunkPtr(ptr.as_ptr() as *mut u8), self.chunk_size);
}
Ok(())
}
/// Reset the allocator.
///
/// All previously allocated chunks will be marked as free.
///
/// Safety:
///
/// - Of course, all values that were previously allocated using this
/// allocator are now garbage. You shouldn't try to read them anymore!
pub unsafe fn reset(&self) {
let mut control_data = self.control_data.write(None).unwrap();
control_data.mark_current_chunk_as_filled();
control_data.mark_filled_chunks_as_free();
}
// TODO(milliechen): implement
pub fn compact(&self) {
unimplemented!()
}
/// Given a pointer to a slice of data, find the header preceding it
/// and unset its "reachable" bit. Once marked as unreachable, it will
/// not be retained during the compaction phase.
pub fn mark_as_unreachable(&self, ptr: &NonNull<u8>) {
let header_ptr = self.get_header(ptr);
let mut header = unsafe { header_ptr.read() };
header.mark_as_unreachable();
unsafe { header_ptr.write(header) }
}
#[allow(unused)]
fn is_data_reachable(&self, ptr: &NonNull<u8>) -> bool {
let header_ptr = self.get_header(ptr);
let header = unsafe { header_ptr.read() };
header.reachable()
}
fn get_header(&self, ptr: &NonNull<u8>) -> *mut AllocHeader {
let data_ptr = ptr.as_ptr() as *mut u8;
let header_size = std::mem::size_of::<AllocHeader>();
let header_ptr = unsafe { data_ptr.sub(header_size) as *mut AllocHeader };
debug_assert!(!header_ptr.is_null());
header_ptr
}
}
unsafe impl<'shm> Allocator for ShardAlloc<'shm> {
fn allocate(&self, l: Layout) -> Result<NonNull<[u8]>, AllocError> {
// Large allocations go directly to the underlying file allocator.
// We'll consider an allocation as large if it is larger than 5% of
// the chunk size. That means, unusable memory due to internal
// fragmentation will be less than 5%.
//
// We don't special case large allocations when the allocator size is
// fixed.
if !self.is_fixed_size && l.size() > self.chunk_size / 20 {
return self.alloc_large(l);
}
let mut control_data = self.control_data.write(None).unwrap();
match control_data.alloc_slice_within_chunk(l) {
Some(ptr) => Ok(ptr),
None => {
// Refuse to allocate another chunk if this allocator is marked to
// have a fixed size
if self.is_fixed_size && !control_data.current_next.is_null() {
return Err(AllocError);
}
// Allocate another chunk
self.alloc_chunk(&mut control_data)?;
// Try allocate the slice within the new chunk
let alloc_result = control_data.alloc_slice_within_chunk(l);
alloc_result.ok_or(AllocError)
}
}
}
unsafe fn deallocate(&self, _ptr: NonNull<u8>, _layout: Layout) {
// Doesn't do anything.
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::sync::RwLock;
const CHUNK_SIZE: usize = 200 * 1024;
const SLICE_SIZE: usize = 10 * 1024;
const FILE_ALLOC_SIZE: usize = 10 * 1024 * 1024;
fn with_file_alloc(f: impl FnOnce(&FileAlloc)) {
let mut vec: Vec<u8> = vec![0; FILE_ALLOC_SIZE];
let vec_ptr = vec.as_mut_ptr();
let file_alloc = FileAlloc::new(vec_ptr as *mut _, FILE_ALLOC_SIZE, 0);
f(&file_alloc);
drop(file_alloc);
drop(vec);
}
#[test]
fn test_alloc_size_zero() {
with_file_alloc(|file_alloc| {
let core_data = RwLock::new(ShardAllocControlData::new());
let core_data_ref = unsafe { core_data.initialize().unwrap() };
let alloc = unsafe { ShardAlloc::new(core_data_ref, file_alloc, CHUNK_SIZE, false) };
let layout = std::alloc::Layout::from_size_align(0, 1).unwrap();
let _ = alloc.allocate(layout).unwrap();
})
}
#[test]
fn test_alloc_many() {
with_file_alloc(|file_alloc| {
let core_data = RwLock::new(ShardAllocControlData::new());
let core_data_ref = unsafe { core_data.initialize().unwrap() };
let alloc = unsafe { ShardAlloc::new(core_data_ref, file_alloc, CHUNK_SIZE, false) };
let header_size = std::mem::size_of::<AllocHeader>();
let slice_size = CHUNK_SIZE / 20 - header_size;
let layout = std::alloc::Layout::from_size_align(slice_size, 1).unwrap();
// Allocate 20 slices.
// Each slice combined with its header occupies 1/20 of a chunk.
// But because of the 8-byte padding at the start of the chunk,
// the last slice we allocate will not fit within the current chunk
// and cause a new chunk to be created.
for _ in 0..20 {
let _ = alloc.allocate(layout).unwrap();
}
let control_data = alloc.control_data.write(None).unwrap();
// Check chunk bounds.
assert_eq!(
unsafe { control_data.current_start.0.add(CHUNK_SIZE) },
control_data.current_end
);
// Check the new chunk contains exactly one slice.
assert_eq!(
unsafe {
control_data
.current_start
.0
.add(std::mem::size_of::<*mut u8>())
.add(header_size)
.add(slice_size)
},
control_data.current_next
);
})
}
#[test]
fn test_mark_as_unreachable() {
with_file_alloc(|file_alloc| {
let core_data = RwLock::new(ShardAllocControlData::new());
let core_data_ref = unsafe { core_data.initialize().unwrap() };
let alloc = unsafe { ShardAlloc::new(core_data_ref, file_alloc, CHUNK_SIZE, false) };
let layout = std::alloc::Layout::from_size_align(SLICE_SIZE, 1).unwrap();
let slice = alloc.allocate(layout).unwrap();
let slice_ptr = NonNull::new(slice.as_ptr() as *mut u8).unwrap();
assert!(alloc.is_data_reachable(&slice_ptr));
alloc.mark_as_unreachable(&slice_ptr);
assert!(!alloc.is_data_reachable(&slice_ptr));
})
}
} |
Rust | hhvm/hphp/hack/src/shmrs/shmrs.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#![feature(maybe_uninit_array_assume_init)]
#![feature(allocator_api)]
/// A Rust library for interacting with shared memory.
pub mod chashmap;
pub mod error;
pub mod filealloc;
pub mod hashmap;
pub mod segment;
pub mod shardalloc;
pub mod sync; |
Rust | hhvm/hphp/hack/src/shmrs/sync.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::cell::UnsafeCell;
use std::ops::Deref;
use std::ops::DerefMut;
use std::time::Duration;
use crate::error::Errno;
// Some pthread-functions are not exported in the `libc` crate.
//
// Note that these are not supported on all platforms, in particular
// on macOS.
#[cfg(target_os = "linux")]
extern "C" {
fn pthread_rwlock_timedwrlock(
lock: *mut libc::pthread_rwlock_t,
timespec: *const libc::timespec,
) -> libc::c_int;
fn pthread_rwlock_timedrdlock(
lock: *mut libc::pthread_rwlock_t,
timespec: *const libc::timespec,
) -> libc::c_int;
}
/// Errors that can occur while handling locks.
#[derive(Debug)]
pub enum LockError {
Errno(Errno),
Timeout,
}
impl From<Errno> for LockError {
fn from(errno: Errno) -> Self {
if errno.errno == libc::ETIMEDOUT.try_into().unwrap() {
Self::Timeout
} else {
Self::Errno(errno)
}
}
}
/// A reader-writer lock that can be used in a shared-memory
/// (inter-process) context.
///
/// This struct is the moveable and unusable as long as you haven't
/// called `initialize` or `attach` on it.
///
/// You should call `initialize` or `attach` to consider the memory
/// location as pinned from which point you can acquire the lock.
pub struct RwLock<T> {
lock: libc::pthread_rwlock_t,
data: UnsafeCell<T>,
}
/// A reference to an initialized or attached reader-writer lock.
///
/// After calling `initialize` or `attach` on `RwLock`, I'd like
/// the interface to be as safe as possible (to avoid users shooting)
/// themselves in the foot. Therefore, I provide this wrapper type
/// around an immutable reference to `RwLock`.
///
/// Having this wrapper type ensures the underlying `RwLock` isn't
/// modified while a reference to it is live.
///
/// A side-effect of having two types (`RwLock` and `RwLockRef`) is
/// that it is impossible for a process to acquire a lock without
/// calling either `initialize` or `attach`.
pub struct RwLockRef<'a, T>(&'a RwLock<T>);
impl<T> RwLock<T> {
/// Create a new uninitialized lock.
///
/// Note that the lock can still be moved around freely. However, you
/// can't actually acquire it yet. Initialize or attach the lock by
/// calling `initialize` or `acquire`.
///
/// We are not using a RAII API here, because we can't guarantee that the
/// initializing process outlives other attaching processes. Destroying
/// a lock that's still in use by other processes is highly unsafe.
///
/// We use a two-step initialization procedure because we need two types:
/// One type to hold the actual data and one type that holds a reference
/// to that data, and as such avoids accidentally moving or modifying the
/// data.
pub fn new(value: T) -> Self {
Self {
lock: libc::PTHREAD_RWLOCK_INITIALIZER,
data: UnsafeCell::new(value),
}
}
/// Initialize the lock and return a reference to it.
///
/// The reference can be used to acquire the actual lock. Using a
/// reference prevents the underlying data from being moved while
/// the lock is in use.
///
/// Safety:
/// - No other process should try to initialize the lock at the same.
/// - The lock should not be held by another process.
/// - As long as the lock is in use, you can't initialize it again!
/// - You shouldn't be mutating the `RwLock` after `initialize`
/// or attached are called.
pub unsafe fn initialize(&self) -> Result<RwLockRef<'_, T>, LockError> {
let mut attr: libc::pthread_rwlockattr_t = std::mem::zeroed();
Errno::from(libc::pthread_rwlockattr_init(&mut attr as *mut _))?;
Self::set_prefer_writer(&mut attr as *mut _)?;
// Allow access from multiple processes
Errno::from(libc::pthread_rwlockattr_setpshared(
&mut attr as *mut _,
libc::PTHREAD_PROCESS_SHARED,
))?;
Errno::from(libc::pthread_rwlock_init(
self.lock_ptr(),
&attr as *const _,
))?;
Ok(self.attach())
}
/// Attach to an already initialized lock.
///
/// Safety:
/// - The lock should already be initialized by another process
/// (or the calling process)
pub unsafe fn attach(&self) -> RwLockRef<'_, T> {
RwLockRef(self)
}
#[inline(always)]
fn lock_ptr(&self) -> *mut libc::pthread_rwlock_t {
&self.lock as *const _ as *mut _
}
#[cfg(target_os = "linux")]
unsafe fn set_prefer_writer(attr: *mut libc::pthread_rwlockattr_t) -> Result<(), LockError> {
// Not defined in the libc crate. Linux specific. See pthread.h.
const LIBC_PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP: libc::c_int = 2;
Errno::from(libc::pthread_rwlockattr_setkind_np(
attr,
LIBC_PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP,
))
.map_err(Into::into)
}
#[cfg(not(target_os = "linux"))]
unsafe fn set_prefer_writer(_attr: *mut libc::pthread_rwlockattr_t) -> Result<(), LockError> {
// Non-Linux Oses don't have this flag.
Ok(())
}
/// Destroy the underlying lock.
///
/// Safety:
/// - Subsequent use of the lock is undefined until the lock is
/// reinitialized by calling `initialize`.
/// - No thread must hold the lock.
/// - Attempting to destroy an uninitialized lock is undefined behavior.
pub unsafe fn destroy(&mut self) -> Result<(), LockError> {
Errno::from(libc::pthread_rwlock_destroy(self.lock_ptr())).map_err(Into::into)
}
}
impl<'a, T> RwLockRef<'a, T> {
/// Locks this rwlock with shared read access, blocking the current
/// thread until it can be acquired.
///
/// Note that, according to the pthread_rwlock_rdlock manual page, this
/// does support recursive locking.
///
/// The optional [timeout] parameter can be specified to abort blocking
/// after the given duration. In that case a `LockError::Timeout` is returned.
/// Note that on non-Linux platforms the timeout option will be ignored.
pub fn read(self, timeout: Option<Duration>) -> Result<RwLockReadGuard<'a, T>, LockError> {
// Safety: A RwLockRef can only be obtained by calling
// RwLock::initialize or -acquire. Therefore, we should
// have a pointer to a valid rwlock.
let success = Self::try_fastlock(|| unsafe {
libc::pthread_rwlock_tryrdlock(self.0.lock_ptr()) == 0
});
if !success {
unsafe {
let errno = match timeout {
None => libc::pthread_rwlock_rdlock(self.0.lock_ptr()),
Some(timeout) => {
let timespec = Self::timespec_for_duration(timeout)?;
Self::pthread_rwlock_timedrdlock(self.0.lock_ptr(), ×pec)
}
};
Errno::from(errno)?;
}
}
Ok(RwLockReadGuard { lock: self })
}
/// Locks this rwlock with exclusive write access, blocking the current
/// thread until it can be acquired.
///
/// Note that, according to the pthread_rwlock_wrlock manual page, this
/// returns EDEADLK of the current thread already holds the lock.
///
/// The optional [timeout] parameter can be specified to abort blocking
/// after the given duration. In that case a `LockError::Timeout` is returned.
/// Note that on non-Linux platforms the timeout option will be ignored.
pub fn write(self, timeout: Option<Duration>) -> Result<RwLockWriteGuard<'a, T>, LockError> {
// Safety: A RwLockRef can only be obtained by calling
// RwLock::initialize or -acquire. Therefore, we should
// have a pointer to a valid rwlock.
let success = Self::try_fastlock(|| unsafe {
libc::pthread_rwlock_trywrlock(self.0.lock_ptr()) == 0
});
if !success {
unsafe {
let errno = match timeout {
None => libc::pthread_rwlock_wrlock(self.0.lock_ptr()),
Some(timeout) => {
let timespec = Self::timespec_for_duration(timeout)?;
Self::pthread_rwlock_timedwrlock(self.0.lock_ptr(), ×pec)
}
};
Errno::from(errno)?;
}
}
Ok(RwLockWriteGuard { lock: self })
}
/// Unlocks the rwlock.
///
/// Safety: The thread must be locked, duh.
unsafe fn unlock(self) {
Errno::from(libc::pthread_rwlock_unlock(self.0.lock_ptr())).unwrap();
}
#[inline]
fn try_fastlock(try_lock: impl Fn() -> bool) -> bool {
for counter in 1..=20 {
if try_lock() {
return true;
}
if counter <= 10 {
for _ in 0..(4 << counter) {
std::hint::spin_loop();
}
} else {
std::thread::yield_now();
}
}
false
}
fn timespec_for_duration(duration: Duration) -> Result<libc::timespec, Errno> {
let mut timespec = libc::timespec {
tv_sec: 0,
tv_nsec: 0,
};
// Safety: timespec is a valid pointer
unsafe {
// pthread timeouts are based on `CLOCK_REALTIME` (see man page),
// so we have to use that one.
Errno::if_(libc::clock_gettime(libc::CLOCK_REALTIME, &mut timespec) != 0)?;
}
let now = Duration::new(
timespec.tv_sec.try_into().unwrap(),
timespec.tv_nsec.try_into().unwrap(),
);
let deadline = now + duration;
timespec.tv_sec = TryInto::<libc::time_t>::try_into(deadline.as_secs()).unwrap();
timespec.tv_nsec = TryInto::<libc::c_long>::try_into(deadline.subsec_nanos()).unwrap();
Ok(timespec)
}
#[cfg(target_os = "linux")]
unsafe fn pthread_rwlock_timedwrlock(
lock: *mut libc::pthread_rwlock_t,
timespec: *const libc::timespec,
) -> libc::c_int {
pthread_rwlock_timedwrlock(lock, timespec)
}
#[cfg(target_os = "linux")]
unsafe fn pthread_rwlock_timedrdlock(
lock: *mut libc::pthread_rwlock_t,
timespec: *const libc::timespec,
) -> libc::c_int {
pthread_rwlock_timedrdlock(lock, timespec)
}
#[cfg(not(target_os = "linux"))]
unsafe fn pthread_rwlock_timedwrlock(
lock: *mut libc::pthread_rwlock_t,
_timespec: *const libc::timespec,
) -> libc::c_int {
// On non-Linux platforms we ignore the timeouts because
// the timed pthread functions might not be available.
libc::pthread_rwlock_wrlock(lock)
}
#[cfg(not(target_os = "linux"))]
unsafe fn pthread_rwlock_timedrdlock(
lock: *mut libc::pthread_rwlock_t,
_timespec: *const libc::timespec,
) -> libc::c_int {
// On non-Linux platforms we ignore the timeouts because
// the timed pthread functions might not be available.
libc::pthread_rwlock_rdlock(lock)
}
}
impl<'a, T> Clone for RwLockRef<'a, T> {
fn clone(&self) -> Self {
Self(self.0)
}
}
impl<'a, T> Copy for RwLockRef<'a, T> {}
/// Read guard for `RwLock`
pub struct RwLockReadGuard<'a, T> {
lock: RwLockRef<'a, T>,
}
impl<T> Deref for RwLockReadGuard<'_, T> {
type Target = T;
fn deref(&self) -> &T {
// Safety: lock semantics!
unsafe { &*self.lock.0.data.get() }
}
}
impl<T> Drop for RwLockReadGuard<'_, T> {
fn drop(&mut self) {
// Safety: we have a locked lock!
unsafe {
self.lock.unlock();
}
}
}
/// Write guard for `RwLock`
pub struct RwLockWriteGuard<'a, T> {
lock: RwLockRef<'a, T>,
}
impl<T> Deref for RwLockWriteGuard<'_, T> {
type Target = T;
fn deref(&self) -> &T {
// Safety: lock semantics!
unsafe { &*self.lock.0.data.get() }
}
}
impl<T> DerefMut for RwLockWriteGuard<'_, T> {
fn deref_mut(&mut self) -> &mut T {
// Safety: lock semantics!
unsafe { &mut *self.lock.0.data.get() }
}
}
impl<T> Drop for RwLockWriteGuard<'_, T> {
fn drop(&mut self) {
// Safety: we have a locked lock!
unsafe {
self.lock.unlock();
}
}
}
unsafe impl<T: Send> Send for RwLock<T> {}
unsafe impl<T: Send + Sync> Sync for RwLock<T> {}
unsafe impl<T: Sync> Sync for RwLockReadGuard<'_, T> {}
unsafe impl<T: Sync> Sync for RwLockWriteGuard<'_, T> {}
#[cfg(test)]
mod integration_tests {
use std::mem::MaybeUninit;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::time::Duration;
use nix::sys::wait::WaitStatus;
use nix::unistd::ForkResult;
use rand::prelude::*;
use super::*;
struct Incr {
counter: RwLock<u64>,
}
#[test]
fn test_incrementor() {
// Test scenario: Launch 20 processes that each increment a counter
// 1000 times. Each iteration has a probability of 0.5 to increase
// the counter by 1 and sleep a bit while holding the lock. Or do a
// read-sleep cycle 3 times while holding the readers lock, assuring
// the value doesn't change.
let mmap_size = std::mem::size_of::<Incr>();
let mmap_ptr = unsafe {
libc::mmap(
std::ptr::null_mut(),
mmap_size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED | libc::MAP_ANONYMOUS,
-1,
0,
)
};
assert_ne!(mmap_ptr, libc::MAP_FAILED);
let incr_ptr: *mut MaybeUninit<Incr> = mmap_ptr as *mut _;
let incr: &'static mut MaybeUninit<Incr> =
// Safety:
// - Pointer is not null
// - Pointer is aligned on a page
// - This is the only reference to the data, and the lifetime is
// static as we don't unmap the memory.
unsafe { &mut *incr_ptr };
// Safety: Initialize the memory properly
let incr = unsafe {
incr.as_mut_ptr().write(Incr {
counter: RwLock::new(0),
});
incr.assume_init_mut()
};
// Safety: We are the only ones to attach to this lock!
let lock = unsafe { incr.counter.initialize().unwrap() };
const NUM_PROCS: u64 = 10;
const NUM_INCRS_PER_PROC: u64 = 100;
const NUM_CONSEQ_READS: u64 = 2;
const OP_SLEEP: Duration = Duration::from_millis(1);
let mut child_procs = vec![];
for child_index in 0..NUM_PROCS {
match unsafe { nix::unistd::fork() }.unwrap() {
ForkResult::Parent { child } => {
child_procs.push(child);
}
ForkResult::Child => {
let mut seed: [u8; 32] = [0; 32];
seed[0..8].copy_from_slice(&child_index.to_be_bytes());
let mut rng = StdRng::from_seed(seed);
let mut num_incrs = 0;
while num_incrs < NUM_INCRS_PER_PROC {
if rng.gen_bool(0.5) {
let mut guard = lock.write(None).unwrap();
*guard += 1;
std::thread::sleep(OP_SLEEP);
drop(guard);
num_incrs += 1;
} else {
let guard = lock.read(None).unwrap();
let init_val = *guard;
for _ in 0..NUM_CONSEQ_READS {
std::thread::sleep(OP_SLEEP);
assert_eq!(*guard, init_val);
}
drop(guard);
}
}
std::process::exit(0)
}
}
}
for pid in child_procs {
match nix::sys::wait::waitpid(pid, None).unwrap() {
WaitStatus::Exited(_, status) => assert_eq!(status, 0),
status => panic!("unexpected status for pid {:?}: {:?}", pid, status),
}
}
assert_eq!(*lock.read(None).unwrap(), NUM_PROCS * NUM_INCRS_PER_PROC);
assert_eq!(unsafe { libc::munmap(mmap_ptr, mmap_size) }, 0);
}
struct TimeoutSetup {
lock: RwLock<()>,
has_locked: AtomicBool,
}
#[test]
fn test_timeout() {
// Scenario:
// 1. the child process takes the lock
// 2. the child process notifies master it has taken the
// lock by setting the atomic bool
// 3. the child now waits forever
// 4. the master tries to take the lock with a timeout
// 5. the timeout triggers
// 6. the master kills the child and cleans up
let mmap_size = std::mem::size_of::<TimeoutSetup>();
let mmap_ptr = unsafe {
libc::mmap(
std::ptr::null_mut(),
mmap_size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED | libc::MAP_ANONYMOUS,
-1,
0,
)
};
assert_ne!(mmap_ptr, libc::MAP_FAILED);
let setup_ptr: *mut MaybeUninit<TimeoutSetup> = mmap_ptr as *mut _;
let setup: &'static mut MaybeUninit<TimeoutSetup> =
// Safety:
// - Pointer is not null
// - Pointer is aligned on a page
// - This is the only reference to the data, and the lifetime is
// static as we don't unmap the memory.
unsafe { &mut *setup_ptr };
// Safety: Initialize the memory properly
let setup = unsafe {
setup.as_mut_ptr().write(TimeoutSetup {
lock: RwLock::new(()),
has_locked: AtomicBool::new(false),
});
setup.assume_init_mut()
};
// Safety: We are the only ones to attach to this lock!
let lock = unsafe { setup.lock.initialize().unwrap() };
match unsafe { nix::unistd::fork() }.unwrap() {
ForkResult::Parent { child } => {
// Wait until the child has acquired the lock
while !setup.has_locked.load(Ordering::SeqCst) {
std::thread::yield_now();
}
// Acquiring the lock should now trigger the timeout
match lock.write(Some(Duration::from_millis(200))) {
Err(LockError::Timeout) => {}
Err(e) => panic!("expected a timeout, but got Err({:?})", e),
Ok(..) => panic!("expected a timeout, but acquired the lock"),
};
// Kill the child and wait for it
nix::sys::signal::kill(child, nix::sys::signal::Signal::SIGTERM).unwrap();
nix::sys::wait::waitpid(child, None).unwrap();
}
ForkResult::Child => {
// Take the lock indefinitely
let _guard = lock.write(None).unwrap();
// Inform the master process that the lock has been taken
setup.has_locked.store(true, Ordering::SeqCst);
loop {
std::thread::sleep(Duration::from_secs(1));
}
// This is unreachable, so the guard is never dropped!
}
}
}
} |
hhvm/hphp/hack/src/socket/dune | (library
(name socket)
(wrapped false)
(modules socket)
(libraries opaque_digest sys_utils utils_exit)) |
|
OCaml | hhvm/hphp/hack/src/socket/socket.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* Initializes the unix domain socket *)
let unix_socket sock_name =
try
Sys_utils.with_umask 0o111 (fun () ->
Sys_utils.mkdir_no_fail (Filename.dirname sock_name);
if Sys.file_exists sock_name then Sys.remove sock_name;
let (domain, addr) =
if Sys.win32 then
Unix.(PF_INET, Unix.ADDR_INET (inet_addr_loopback, 0))
else
Unix.(PF_UNIX, Unix.ADDR_UNIX sock_name)
in
let sock = Unix.socket domain Unix.SOCK_STREAM 0 in
let () = Unix.set_close_on_exec sock in
let () = Unix.setsockopt sock Unix.SO_REUSEADDR true in
let () = Unix.bind sock addr in
let () = Unix.listen sock 10 in
let () =
match Unix.getsockname sock with
| Unix.ADDR_UNIX _ -> ()
| Unix.ADDR_INET (_, port) ->
let oc = open_out_bin sock_name in
output_binary_int oc port;
close_out oc
in
sock)
with
| Unix.Unix_error (err, _, _) ->
Printf.eprintf "%s\n" (Unix.error_message err);
Exit.exit Exit_status.Socket_error
(* So the sockaddr_un structure puts a strict limit on the length of a socket
* address. This appears to be 104 chars on mac os x and 108 chars on my
* centos box. *)
let max_addr_length = 103
let min_name_length = 17
let get_path path =
(* Path will resolve the realpath, in case two processes are referring to the
* same socket using different paths (like with symlinks *)
let path = path |> Path.make |> Path.to_string in
let dir = Filename.dirname path ^ "/" in
let filename = Filename.basename path in
let root_part = Filename.chop_extension filename in
let root_length = String.length root_part in
let extension_length = String.length filename - root_length in
let extension = String.sub filename root_length extension_length in
(* It's possible that the directory path is too long. If so, let's give up and
* use /tmp/ *)
let dir =
if String.length dir > max_addr_length - min_name_length then
Sys_utils.temp_dir_name
else
dir
in
let max_root_part_length =
max_addr_length - String.length dir - extension_length
in
let root_part =
if root_length > max_root_part_length then
let len = String.length root_part in
let prefix = String.sub root_part 0 5 in
let suffix = String.sub root_part (len - 5) 5 in
let digest = OpaqueDigest.to_hex (OpaqueDigest.string root_part) in
(* 5 char prefix + 5 char suffix + 2 periods *)
let max_digest_length = max_root_part_length - 12 in
let digest_part =
if String.length digest > max_digest_length then
String.sub digest 0 max_digest_length
else
digest
in
prefix ^ "." ^ digest_part ^ "." ^ suffix
else
root_part
in
Filename.concat dir (Printf.sprintf "%s%s" root_part extension)
let init_unix_socket socket_file = unix_socket (get_path socket_file) |
OCaml | hhvm/hphp/hack/src/stubs/ai.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let do_ _ _ _ = ()
module InfoService = struct
type target_type =
| Function
| Method
| Constructor
type fun_call = {
name: string;
type_: target_type;
pos: Pos.absolute;
caller: string;
callees: string list; (* includes overrides, etc. *)
}
type throws = {
thrower: string;
(* the name of a function or method that throws/leaks *)
filename: string;
(* location of the function or method *)
exceptions: string list; (* names of types of thrown exceptions *)
}
type result = {
fun_calls: fun_call list;
throws: throws list;
}
let empty_result = { fun_calls = []; throws = [] }
let go _ _ _ _ _ = empty_result
end
module ServerFindDepFiles = struct
let go _ _ _ = []
end
module ServerFindRefs = struct
type member =
| Method of string
| Property of string
| Class_const of string
| Typeconst of string
type action =
| Class of string
| Member of string * member
| Function of string
| GConst of string
| LocalVar of {
filename: Relative_path.t;
file_content: string;
line: int;
char: int;
}
let go _ _ _ = []
end
module TraceService = struct
type member =
| Method of string
| Property of string
| Class_const of string
| Typeconst of string
type action =
| Class of string
| Member of string * member
| Function of string
| GConst of string
| LocalVar of {
filename: Relative_path.t;
file_content: string;
line: int;
char: int;
}
let go _ _ _ _ = ""
end |
OCaml | hhvm/hphp/hack/src/stubs/ai_options.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type t = { analyses: string list }
let prepare ~server:_ _ = { analyses = [] }
let modify_shared_mem _options config = config |
OCaml | hhvm/hphp/hack/src/stubs/artifactStore.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type artifact =
(* Just a plain value *)
| Inline_artifact of string
(* An artifact that is a directory *)
| Path_artifact of Path.t
[@@deriving show]
type file_system_mode =
| Local
| Distributed
[@@deriving show]
type config = {
mode: file_system_mode;
temp_dir: Path.t;
max_cas_bytes: int;
max_inline_bytes: int;
}
let file_system_mode_of_string mode_str =
match mode_str with
| "Local" -> Some Local
| "Distributed" -> Some Distributed
| _ -> None
let string_of_file_system_mode file_system_mode =
match file_system_mode with
| Local -> "Local"
| Distributed -> "Distributed"
let default_config ~temp_dir =
{
mode = Distributed;
temp_dir;
max_cas_bytes = 50_000;
max_inline_bytes = 2000;
} |
OCaml | hhvm/hphp/hack/src/stubs/buildMain.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let go _ _ _ _ = ()
let get_live_targets _ = ([], []) |
OCaml | hhvm/hphp/hack/src/stubs/ci_util.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type status = StatusUnknown [@@deriving enum, show]
type info = {
id: string;
owner: string;
status: status;
(* Enqueued timestamp *)
created_t: float;
(* Dequeued timestamp; 0 if still in queue *)
started_t: float;
}
[@@deriving show]
let begin_get_info () : info option Future.t = Future.of_value None
let end_get_info (future : info option Future.t option) =
ignore future;
None |
OCaml | hhvm/hphp/hack/src/stubs/clientMessages.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let ignore_ide_from _ = false
let waiting_for_server_to_be_started_doc = ""
let angery_reaccs_only () = false |
OCaml | hhvm/hphp/hack/src/stubs/clientRage.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type env = {
root: Path.t;
from: string;
rageid: string option;
desc: string;
lsp_log: string option;
}
let main _ _ = Exit.exit Exit_status.No_error
let verify_typechecker_err_src () = () |
OCaml | hhvm/hphp/hack/src/stubs/clowder_paste.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let clowder_download ?(timeout : float option) (handle : string) =
ignore (timeout, handle);
failwith "not implemented in this build"
let clowder_upload_and_get_handle ?(timeout : float option) (input : string) =
ignore (timeout, input);
failwith "not implemented in this build"
let clowder_upload_and_get_url ?(timeout : float option) (input : string) =
ignore (timeout, input);
failwith "not implemented in this build"
let clowder_upload_and_get_shellscript
?(timeout : float option) (input : string) =
ignore (timeout, input);
failwith "not implemented in this build" |
OCaml | hhvm/hphp/hack/src/stubs/decl_service_client.ml | type t = unit
let connect _ ~decl_state_dir:_ = failwith "not implemented"
let rpc_get_fun _ _ = failwith "not implemented"
let rpc_get_class _ _ = failwith "not implemneted"
let rpc_get_typedef _ _ = failwith "not implemented"
let rpc_get_gconst _ _ = failwith "not implemented"
let rpc_get_module _ _ = failwith "not implemented"
let rpc_get_folded_class _ _ = failwith "not implemented"
let rpc_store_folded_class _ _ _ = failwith "not implemented"
let rpc_get_type_kind _ _ = failwith "not implemented"
module Positioned = struct
let rpc_get_typedef_path _ _ = failwith "not implemented"
let rpc_get_full_pos _ _ _ _ = failwith "not implemented"
end
module Slow = struct
let rpc_get_gconst_path _ _ = failwith "not implemented"
let rpc_get_fun_path _ _ = failwith "not implemented"
let rpc_get_type_path _ _ = failwith "not implemented"
let rpc_get_module_path _ _ = failwith "not implemented"
let rpc_get_fun_canon_name _ _ = failwith "not implemented"
let rpc_get_type_canon_name _ _ = failwith "not implemented"
end |
OCaml | hhvm/hphp/hack/src/stubs/depgraph_decompress_ffi.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let decompress ~compressed_dg_path:_ = failwith "decompress not implemented" |
hhvm/hphp/hack/src/stubs/dune | (library
(name ai_stubs)
(wrapped false)
(modules ai)
(libraries ai_options errors)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name ai_options_stubs)
(wrapped false)
(modules ai_options)
(libraries errors)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name build)
(wrapped false)
(modules buildMain)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name rust_ffi_stubs)
(wrapped false)
(modules)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name rust_provider_backend_stubs)
(wrapped false)
(modules rust_provider_backend)
(libraries
file_info
relative_path)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name server_local_config_knobs_stubs)
(wrapped false)
(modules serverLocalConfigKnobs)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name linting_stubs)
(wrapped false)
(modules linting_service)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name load_script_stubs)
(wrapped false)
(modules load_script loadScriptConfig loadScriptUtils)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name messages_stubs)
(wrapped false)
(modules clientMessages)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name state_loader_stubs)
(wrapped false)
(modules saved_state_loader state_loader_futures state_loader_lwt)
(libraries
hg
typechecker_options)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name decl_service_client)
(wrapped false)
(modules decl_service_client)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name remote_old_decls_ffi)
(wrapped false)
(modules
remote_old_decls_ffi)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name hh_distc_ffi)
(wrapped false)
(modules
hh_distc_ffi)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name depgraph_decompress_ffi)
(wrapped false)
(modules
depgraph_decompress_ffi)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name experiments_config_file_stubs)
(wrapped false)
(modules experiments_config_file)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name extract_method_plugins_stubs)
(wrapped false)
(modules extract_method_plugins))
(library
(name ci_util_stubs)
(wrapped false)
(modules ci_util)
(libraries future)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name exec_command_stubs)
(wrapped false)
(modules exec_command)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name clowder_paste_stubs)
(wrapped false)
(modules clowder_paste)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name flytrap_stubs)
(wrapped false)
(modules flytrap)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name rage_stubs)
(wrapped false)
(modules clientRage)
(libraries utils_exit utils_core)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name security_stubs)
(wrapped false)
(modules security)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name folly_stubs)
(wrapped false)
(modules folly)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name startup_initializer_stubs)
(wrapped false)
(modules startup_initializer)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name formatting_stubs)
(wrapped false)
(modules formatting)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name shape_analysis_scuba_stubs)
(wrapped false)
(modules shape_analysis_scuba))
(library
(name file_scuba_logger_ffi_externs)
(wrapped false)
(modules file_scuba_logger_ffi_externs)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(library
(name sdt_analysis_remote_logging_stubs)
(wrapped false)
(modules sdt_analysis_remote_logging)) |
|
OCaml | hhvm/hphp/hack/src/stubs/exec_command.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type t =
| Current_executable
| Hg
| For_use_in_testing_only of string
| Gstack
| Hackfmt of string
| Hh_server of string
| Hh
| Ls
| Pgrep
| Ps
| Pstack
| Shell
| Strobeclient
| Watchman
| Watchman_diag
let to_string = function
| Current_executable -> Sys.executable_name
| Hg -> "hg"
| For_use_in_testing_only path -> path
| Gstack -> "gstack"
| Hackfmt path -> path
| Hh_server path -> path
| Hh -> "hh"
| Ls -> "ls"
| Pgrep -> "pgrep"
| Ps -> "ps"
| Pstack -> "pstack"
| Shell ->
if Sys.win32 then
"cmd.exe"
else
"/bin/sh"
| Strobeclient -> "strobeclient"
| Watchman -> "watchman"
| Watchman_diag -> "watchman-diag" |
OCaml | hhvm/hphp/hack/src/stubs/experiments_config_file.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let update
~(silent : bool) ~(file : string) ~(source : string option) ~(ttl : float) :
(string, string) result =
ignore (silent, file, source, ttl);
Ok "Experiments config update: nothing to do" |
OCaml | hhvm/hphp/hack/src/stubs/extract_method_plugins.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let find ~selection:_ ~entry:_ _ _ = [] |
OCaml | hhvm/hphp/hack/src/stubs/file_scuba_logger_ffi_externs.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type logger
type logger_guard
let from_config_file _ = failwith "not implemented"
let make_env_term _ = failwith "not implemented"
let close _ = failwith "not implemented"
let log_file_path _ = failwith "not implemented"
let scuba_dataset_name _ = failwith "not implemented" |
OCaml | hhvm/hphp/hack/src/stubs/folly.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let ensure_folly_init () = () |
OCaml | hhvm/hphp/hack/src/stubs/formatting.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let is_formattable _ : bool = false |
OCaml | hhvm/hphp/hack/src/stubs/hh_distc_ffi.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type handle
let spawn ~root:_ ~ss_dir:_ ~hhdg_path:_ _ = failwith "start not implemented"
let join _ = failwith "join_handle not implemented"
let cancel _ = failwith "cancel not implemented"
let is_finished _ = failwith "is_finished not implemented"
let get_fd _ = failwith "get_fd not implemented"
let recv _ = failwith "recv not implemented" |
OCaml | hhvm/hphp/hack/src/stubs/hulk_rpc_ffi.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let upload_payloads _ = failwith "upload_payloads not implemented"
let download_payload _ = failwith "download_payload not implemented" |
OCaml | hhvm/hphp/hack/src/stubs/jobRunner.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* The remote service mode *)
type remote_mode =
(* Real remote mode *)
| Remote
(* Pseudo remote on the same machine as client *)
| PseudoRemote
let get _ = failwith "not implemented" |
OCaml | hhvm/hphp/hack/src/stubs/linting_service.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let untyped_linters = []
let typed_linters = []
let lint_tast _ _ = () |
OCaml | hhvm/hphp/hack/src/stubs/loadScriptConfig.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type t = unit
let createLoadScriptConfig _ _ = ()
let default = ()
let saved_state_load_type_to_string _ = ""
let saved_state_load_type_ _ = ()
let use_sql _ = false |
OCaml | hhvm/hphp/hack/src/stubs/loadScriptUtils.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let delete_corrupted_saved_state _ = ()
let lock_saved_state _ = () |
OCaml | hhvm/hphp/hack/src/stubs/mercurialUtils.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let wait_until_stable_repository _ = true |
OCaml | hhvm/hphp/hack/src/stubs/prolog.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let facts_of_defs _ _ _ _ _ _ = [] |
OCaml | hhvm/hphp/hack/src/stubs/prologMain.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let output_facts _ _ = ()
let go _ _ = "" |
OCaml | hhvm/hphp/hack/src/stubs/remote_old_decls_ffi.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let get_decls _ _ _ = failwith "get_decls not implemented"
let put_decls ~silent:_ _ _ = failwith "put_decls not implemented" |
OCaml | hhvm/hphp/hack/src/stubs/rust_provider_backend.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type t
let make _ = failwith "unimplemented"
let set _ : unit = failwith "unimplemented"
let push_local_changes _ : unit = failwith "unimplemented"
let pop_local_changes _ : unit = failwith "unimplemented"
type find_symbol_fn = string -> (FileInfo.pos * FileInfo.name_type) option
type ctx_proxy = {
get_entry_contents: Relative_path.t -> string option;
is_pos_in_ctx: FileInfo.pos -> bool;
find_fun_canon_name_in_context: string -> string option;
find_type_canon_name_in_context: string -> string option;
find_const_in_context: find_symbol_fn;
find_fun_in_context: find_symbol_fn;
find_type_in_context: find_symbol_fn;
find_module_in_context: find_symbol_fn;
}
module Decl = struct
let direct_decl_parse_and_cache _ _ _ = failwith "unimplemented"
let add_shallow_decls _ _ = failwith "unimplemented"
let get_fun _ _ _ = failwith "unimplemented"
let get_shallow_class _ _ _ = failwith "unimplemented"
let get_typedef _ _ _ = failwith "unimplemented"
let get_gconst _ _ _ = failwith "unimplemented"
let get_module _ _ _ = failwith "unimplemented"
let get_folded_class _ _ _ = failwith "unimplemented"
let declare_folded_class _ _ _ = failwith "unimplemented"
let get_old_shallow_classes_batch _ _ = failwith "unimplemented"
let get_old_defs _ _ = failwith "unimplemented"
let oldify_defs _ _ : unit = failwith "unimplemented"
let remove_defs _ _ : unit = failwith "unimplemented"
let remove_old_defs _ _ : unit = failwith "unimplemented"
end
module File = struct
type file_type =
| Disk of string
| Ide of string
let get _ _ = failwith "unimplemented"
let get_contents _ _ = failwith "unimplemented"
let provide_file_for_tests _ _ _ = failwith "unimplemented"
let provide_file_for_ide _ _ _ = failwith "unimplemented"
let provide_file_hint _ _ _ = failwith "unimplemented"
let remove_batch _ _ = failwith "unimplemented"
end
module Naming = struct
module Types = struct
let add _ _ _ = failwith "unimplemented"
let get_pos _ _ _ = failwith "unimplemented"
let remove_batch _ _ = failwith "unimplemented"
let get_canon_name _ _ _ = failwith "unimplemented"
end
module Funs = struct
let add _ _ _ = failwith "unimplemented"
let get_pos _ _ _ = failwith "unimplemented"
let remove_batch _ _ = failwith "unimplemented"
let get_canon_name _ _ _ = failwith "unimplemented"
end
module Consts = struct
let add _ _ _ = failwith "unimplemented"
let get_pos _ _ _ = failwith "unimplemented"
let remove_batch _ _ = failwith "unimplemented"
end
module Modules = struct
let add _ _ _ = failwith "unimplemented"
let get_pos _ _ _ = failwith "unimplemented"
let remove_batch _ _ = failwith "unimplemented"
end
let get_db_path _ = failwith "unimplemented"
let set_db_path _ _ = failwith "unimplemented"
let get_filenames_by_hash _ _ = failwith "unimplemented"
end |
OCaml | hhvm/hphp/hack/src/stubs/saved_state_loader.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type process_success = {
command_line: string;
stdout: string;
}
type process_failure = string
type config = string * string SMap.t
module Watchman_options = struct
type t = {
root: Path.t;
sockname: Path.t option;
}
end
module Naming_and_dep_table_info = struct
type main_artifacts = {
naming_table_path: Path.t;
naming_sqlite_table_path: Path.t;
dep_table_path: Path.t;
errors_path: Path.t;
}
type dirty_files = {
master_changes: Relative_path.Set.t;
local_changes: Relative_path.Set.t;
}
type additional_info = {
mergebase_global_rev: Hg.global_rev option;
dirty_files_promise: dirty_files Future.t;
saved_state_distance: int option;
saved_state_age: int option;
}
end
module Naming_table_info = struct
type main_artifacts = { naming_table_path: Path.t }
type additional_info = unit
end
module Shallow_decls_info = struct
type main_artifacts = { shallow_decls_path: Path.t }
type additional_info = unit
end
type _ saved_state_type =
| Naming_and_dep_table_distc
: (Naming_and_dep_table_info.main_artifacts
* Naming_and_dep_table_info.additional_info)
saved_state_type
| Naming_table
: (Naming_table_info.main_artifacts * Naming_table_info.additional_info)
saved_state_type
| Shallow_decls
: (Shallow_decls_info.main_artifacts * Shallow_decls_info.additional_info)
saved_state_type
(** List of files changed since the saved-state's commit. This list of files may
include files other than Hack files, so the caller should filter the given list
as necessary. *)
type changed_files = Relative_path.t list
type ('main_artifacts, 'additional_info) load_result = {
main_artifacts: 'main_artifacts;
additional_info: 'additional_info;
manifold_path: string;
changed_files: changed_files;
corresponding_rev: Hg.hg_rev;
mergebase_rev: Hg.hg_rev;
is_cached: bool;
}
module LoadError = struct
type t = string
(* Please do not throw an exception here; it breaks hack for open source users *)
let short_user_message_of_error _ =
"Saved states are not supported in this build."
let medium_user_message_of_error _ =
"Saved states are not supported in this build."
let long_user_message_of_error _ =
"Saved states are not supported in this build."
let saved_state_manifold_api_key_of_error _ = None
let debug_details_of_error _ = ""
let category_of_error _ = ""
let is_error_actionable _ = false
end
let get_project_name _ = ""
let ignore_saved_state_version_mismatch ~ignore_hh_version = ignore_hh_version
let get_query_for_root ~root:_ ~relative_root:_ _ _ = "" |
OCaml | hhvm/hphp/hack/src/stubs/sdt_analysis_remote_logging.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type t = unit
let create :
strategy:[< `CodemodSdtCumulative | `CodemodSdtIndependent ] ->
log_remotely:bool ->
tag:string ->
t =
(fun ~strategy:_ ~log_remotely:_ ~tag:_ -> ())
let submit_patch_result :
t ->
patched_ids:string list ->
error_count:int ->
line_index:int ->
target_kind:[< `ClassLike | `Function ] ->
unit =
(fun _ ~patched_ids:_ ~error_count:_ ~line_index:_ ~target_kind:_ -> ()) |
OCaml | hhvm/hphp/hack/src/stubs/security.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type error = string
type success = Checks_skipped [@@deriving show]
let check_credentials ~attempt_fix =
ignore attempt_fix;
Ok Checks_skipped
let to_error_message_string error = error
let to_error_kind_string error = error |
OCaml | hhvm/hphp/hack/src/stubs/serverLocalConfigKnobs.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let apply_justknobs_overrides ~silent:_ config ~from:_ = config |
OCaml | hhvm/hphp/hack/src/stubs/shape_analysis_scuba.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let log_events_remotely _ _ = () |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.