language
stringlengths 0
24
| filename
stringlengths 9
214
| code
stringlengths 99
9.93M
|
---|---|---|
OCaml | hhvm/hphp/hack/src/server/serverIsSubtype.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Typing_defs
type pos = Relative_path.t * int * int
type type_spec =
| TSpos of pos
| TSjson of Hh_json.json
type query =
| PosJson of pos * Hh_json.json
| JsonPos of Hh_json.json * pos
| JsonJson of Hh_json.json * Hh_json.json
let expand_path file =
let path = Path.make file in
if Path.file_exists path then
Ok (Path.to_string path)
else
let file = Filename.concat (Sys.getcwd ()) file in
let path = Path.make file in
if Path.file_exists path then
Ok (Path.to_string path)
else
Error (Printf.sprintf "File not found: %s" file)
let get_type_at_pos ctx tast_map pos :
(Typing_env_types.env * locl_ty, string) result =
let (path, line, col) = pos in
let tast = Relative_path.Map.find tast_map path in
match ServerInferType.type_at_pos ctx tast line col with
| Some (env, ty) -> Ok (Tast_env.tast_env_as_typing_env env, ty)
| _ ->
Error
(Printf.sprintf
"Failed to get type for pos %s:%d:%d"
(Relative_path.to_absolute path)
line
col)
(* Returns list of error strings *)
let rec validate_free_type env locl_ty =
match get_node locl_ty with
(* notably, we don't validate arity of the type arguments
Extra args appear to be ignored when subtyping and
missing args only result in "true" if both types are missing that arg *)
| Tclass ((_, class_id), _exact, tyargs) ->
(match Typing_env.get_class env class_id with
| None -> ["Unbound class name " ^ class_id]
| Some _ -> validate_l env tyargs)
| Tunion tyargs
| Tintersection tyargs
| Ttuple tyargs ->
validate_l env tyargs
| Toption locl_ty -> validate_free_type env locl_ty
| Tnonnull
| Tdynamic
| Tprim _
(* json_to_locl_ty already validates the name for unapplied_alias *)
| Tunapplied_alias _
| Tany _ ->
[]
| Tvec_or_dict (kty, vty) ->
validate_free_type env kty @ validate_free_type env vty
| Tfun tfun ->
validate_l env (List.map tfun.ft_params ~f:(fun fp -> fp.fp_type.et_type))
@ validate_free_type env tfun.ft_ret.et_type
| Tshape { s_origin = _origin; s_unknown_value = _kind; s_fields = fm } ->
let field_tys =
List.map (TShapeMap.values fm) ~f:(fun field -> field.sft_ty)
in
validate_l env field_tys
| Tnewtype (_name, tyargs, as_ty) ->
(* Typing_print.json_to_locl_ty already validates the name
Interestingly it doesn't validate that the given "as" matches
the defined one *)
validate_l env tyargs @ validate_free_type env as_ty
(* These aren't even created by Typing_print.json_to_locl_ty *)
| Tneg _
| Tvar _
| Taccess _
| Tdependent _
(* Unsupported b/c relative/erroneous *)
| Tgeneric _ ->
[Printf.sprintf "Unsupported free type %s" (Typing_print.full env locl_ty)]
and validate_l env locl_tyl =
List.concat_map locl_tyl ~f:(validate_free_type env)
let get_type_from_json ctx json : (locl_ty, string list) result =
let locl_ty = Typing_print.json_to_locl_ty ~keytrace:[] ctx json in
match locl_ty with
| Ok locl_ty ->
let env = Typing_env_types.empty ctx Relative_path.default ~droot:None in
(match validate_free_type env locl_ty with
| [] -> Ok locl_ty
| errl -> Error errl)
| Error err -> Error [show_deserialization_error err]
let get_type_spec_from_json json : (type_spec, string) result =
match Hh_json.Access.get_string "kind" (json, []) with
| Ok (value, _keytrace) ->
(match value with
| "type" ->
(match Hh_json.Access.get_obj "type" (json, []) with
| Ok (json, _keytrace) -> Ok (TSjson json)
| Error failure -> Error (Hh_json.Access.access_failure_to_string failure))
| "pos" ->
(match Hh_json.Access.get_string "pos" (json, []) with
| Ok ((pos_str : string), _keytrace) ->
(match String.split ~on:':' pos_str with
| [file; line; col] ->
(match expand_path file with
| Ok file ->
let path = Relative_path.create_detect_prefix file in
Ok (TSpos (path, int_of_string line, int_of_string col))
| Error e -> Error e)
| _ ->
Error
(Printf.sprintf
"Position %s is malformed. Expected file:line:column"
pos_str))
| Error failure -> Error (Hh_json.Access.access_failure_to_string failure))
| bad_kind -> Error ("Unexpected kind " ^ bad_kind))
| Error failure -> Error (Hh_json.Access.access_failure_to_string failure)
type is_subtype_result = {
is_subtype: bool;
ty_left: string;
ty_right: string;
}
let is_subtype env l_ty r_ty : is_subtype_result =
{
is_subtype = Typing_subtype.is_sub_type env l_ty r_ty;
ty_left = Typing_print.full env l_ty;
ty_right = Typing_print.full env r_ty;
}
let helper
acc
ctx
(query_with_path_alist : (int * Relative_path.t option * query) list) :
(int * (is_subtype_result, string list) result) list =
let (ctx, tast_map) =
let paths =
List.filter_map query_with_path_alist ~f:(fun (_, path_opt, _) ->
path_opt)
in
ServerInferTypeBatch.get_tast_map ctx paths
in
List.map query_with_path_alist ~f:(fun (i, _path_opt, query) ->
let result =
match query with
| JsonJson (json_l, json_r) ->
Result.combine
(get_type_from_json ctx json_l)
(get_type_from_json ctx json_r)
~ok:(fun l r ->
let env =
Typing_env_types.empty ctx Relative_path.default ~droot:None
in
is_subtype env l r)
~err:(fun a b -> a @ b)
| JsonPos (json, pos) ->
Result.combine
(get_type_from_json ctx json)
(Result.map_error (get_type_at_pos ctx tast_map pos) ~f:(fun e ->
[e]))
~ok:(fun l (env, r) -> is_subtype env l r)
~err:(fun a b -> a @ b)
| PosJson (pos, json) ->
Result.combine
(Result.map_error (get_type_at_pos ctx tast_map pos) ~f:(fun e ->
[e]))
(get_type_from_json ctx json)
~ok:(fun (env, l) r -> is_subtype env l r)
~err:(fun a b -> a @ b)
in
(i, result))
@ acc
let parallel_helper workers ctx query_with_path_alist :
(int * (is_subtype_result, string list) result) list =
let add_query_to_map_if_some_path map (i, path_opt, query) =
match path_opt with
| Some path ->
let entry = (i, path_opt, query) in
Relative_path.Map.update
path
(function
| None -> Some [entry]
| Some others -> Some (entry :: others))
map
| None -> map
in
let (query_with_some_path_alist, query_with_none_path_alist) =
List.partition_tf query_with_path_alist ~f:(fun (_, path_opt, _) ->
Option.is_some path_opt)
in
let query_with_some_path_alists_by_file =
List.fold
~init:Relative_path.Map.empty
~f:add_query_to_map_if_some_path
query_with_some_path_alist
|> Relative_path.Map.values
in
let query_with_path_alists =
query_with_some_path_alists_by_file @ [query_with_none_path_alist]
in
MultiWorker.call
workers
~job:(fun acc query_with_path_alist ->
helper acc ctx (List.concat query_with_path_alist))
~neutral:[]
~merge:List.rev_append
~next:(MultiWorker.next workers query_with_path_alists)
let check workers str env =
let ctx = Provider_utils.ctx_from_server_env env in
match Hh_json.json_of_string str with
| Hh_json.JSON_Array json_l ->
let spec_pair_result_alist =
List.mapi json_l ~f:(fun i json ->
let pair =
match json with
| Hh_json.JSON_Array [json_l; json_r] ->
let get_type_spec_from_json_with_el json =
Result.map_error (get_type_spec_from_json json) ~f:(fun e ->
[e])
in
Result.combine
(get_type_spec_from_json_with_el json_l)
(get_type_spec_from_json_with_el json_r)
~ok:(fun l r -> (l, r))
~err:(fun l r -> l @ r)
| _ -> Error ["Expected JSON array of size 2"]
in
(i, pair))
in
let (query_with_path_alist, error_alist) =
List.partition_map spec_pair_result_alist ~f:(fun (i, x) ->
match x with
| Ok (TSpos ((path, _, _) as pos), TSjson json) ->
Either.first (i, Some path, PosJson (pos, json))
| Ok (TSjson json_l, TSjson json_r) ->
Either.first (i, None, JsonJson (json_l, json_r))
| Ok (TSjson json, TSpos ((path, _, _) as pos)) ->
Either.first (i, Some path, JsonPos (json, pos))
| Ok (TSpos _, TSpos _) ->
Either.second
(i, Error ["Cannot provide a position for the both types"])
| Error e -> Either.second (i, Error e))
in
let num_files =
query_with_path_alist
|> List.filter_map ~f:(fun (_, path_opt, _) -> path_opt)
|> Relative_path.Set.of_list
|> Relative_path.Set.cardinal
in
(* TODO: Should we removes duplicates of the same query?
Or somehow even avoid multiple type-at-pos lookups for positions
that appear in multiplle distinct queries? *)
let result_alist =
if num_files < 10 then
helper [] ctx query_with_path_alist
else
parallel_helper workers ctx query_with_path_alist
in
let result_alist = result_alist @ error_alist in
let result_map_by_i = Int.Map.of_alist_exn result_alist in
let result_list =
List.mapi json_l ~f:(fun i _ -> Int.Map.find_exn result_map_by_i i)
in
let json_result_list =
List.map result_list ~f:(fun res ->
let (status, value) =
match res with
| Ok is_subtype_result ->
( "ok",
Hh_json.JSON_Object
[
("is_subtype", Hh_json.bool_ is_subtype_result.is_subtype);
("left", Hh_json.string_ is_subtype_result.ty_left);
("right", Hh_json.string_ is_subtype_result.ty_right);
] )
| Error l ->
("errors", Hh_json.JSON_Array (List.map l ~f:Hh_json.string_))
in
Hh_json.JSON_Object [(status, value)])
in
Ok (Hh_json.json_to_string (Hh_json.JSON_Array json_result_list))
| _ -> Error "Expected JSON array" |
OCaml Interface | hhvm/hphp/hack/src/server/serverIsSubtype.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val check :
MultiWorker.worker list option ->
string ->
ServerEnv.env ->
(string, string) result |
OCaml | hhvm/hphp/hack/src/server/serverLazyInit.ml | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* Lazy Initialization:
During Lazy initialization, hh_server tries to do as little work as possible.
The init_from_saved_state behavior is this:
Load from saved state -> Parse dirty files -> Naming -> Dirty Typecheck
The full_init behavior is this: (similar to eager init, but with lazy decl)
Full Parsing -> Naming -> Full Typecheck (with lazy decl)
*)
(* module Hack_bucket = Bucket *)
open Hh_prelude
(* module Bucket = Hack_bucket *)
open GlobalOptions
open Result.Export
open Reordered_argument_collections
open SearchServiceRunner
open ServerEnv
open ServerInitTypes
module SLC = ServerLocalConfig
type deptable = CustomDeptable of string
let deptable_with_filename (fn : string) : deptable = CustomDeptable fn
let lock_and_load_deptable
~(base_file_name : string)
~(deptable : deptable)
~(ignore_hh_version : bool) : unit =
match deptable with
| CustomDeptable fn ->
let () =
if
not
(Saved_state_loader.ignore_saved_state_version_mismatch
~ignore_hh_version)
then
let build_revision =
SaveStateService.saved_state_build_revision_read ~base_file_name
in
if not (String.equal build_revision Build_id.build_revision) then
raise
@@ Failure
(Printf.sprintf
("Saved-state build mismatch, this saved-state was built "
^^ " for version '%s', but we expected '%s'")
build_revision
Build_id.build_revision)
in
(* The new dependency graph is threaded through function calls
* instead of stored in a global *)
Hh_logger.log "Custom dependency graph will be loaded lazily from %s" fn
let run_saved_state_future
(genv : genv)
(ctx : Provider_context.t)
(dependency_table_saved_state_future :
( ( Saved_state_loader.Naming_and_dep_table_info.main_artifacts,
Saved_state_loader.Naming_and_dep_table_info.additional_info )
Saved_state_loader.load_result,
ServerInitTypes.load_state_error )
result
Future.t) : (loaded_info, load_state_error) result =
let t = Unix.gettimeofday () in
match Future.get dependency_table_saved_state_future ~timeout:60 with
| Error error ->
Hh_logger.log
"Unhandled Future.error from state loader: %s"
(Future.error_to_string error);
let e = Exception.wrap_unraised (Future.error_to_exn error) in
Error (Load_state_unhandled_exception e)
| Ok (Error error) -> Error error
| Ok (Ok deptable_result) ->
let (_ : float) =
Hh_logger.log_duration
"Finished downloading naming table and dependency graph."
t
in
let {
Saved_state_loader.main_artifacts;
additional_info;
changed_files;
manifold_path;
corresponding_rev;
mergebase_rev;
is_cached = _;
} =
deptable_result
in
let {
Saved_state_loader.Naming_and_dep_table_info.naming_table_path =
deptable_naming_table_blob_path;
dep_table_path;
compressed_dep_table_path;
naming_sqlite_table_path;
errors_path;
} =
main_artifacts
in
let {
Saved_state_loader.Naming_and_dep_table_info.mergebase_global_rev;
dirty_files_promise;
saved_state_distance;
saved_state_age;
} =
additional_info
in
let ignore_hh_version =
ServerArgs.ignore_hh_version genv.ServerEnv.options
in
let use_compressed_dep_graph =
genv.local_config.ServerLocalConfig.use_compressed_dep_graph
in
let deptable_fn =
if use_compressed_dep_graph then
let deptable_result =
Depgraph_decompress_ffi.decompress
~compressed_dg_path:(Path.to_string compressed_dep_table_path)
in
match deptable_result with
| Ok decompressed_depgraph_path ->
let deptable = deptable_with_filename decompressed_depgraph_path in
Hh_logger.log "Done decompressing dep graph";
lock_and_load_deptable
~base_file_name:(Path.to_string deptable_naming_table_blob_path)
~deptable
~ignore_hh_version;
decompressed_depgraph_path
| Error error ->
failwith (Printf.sprintf "Failed to decompress dep graph: %s" error)
else
let deptable = deptable_with_filename (Path.to_string dep_table_path) in
lock_and_load_deptable
~base_file_name:(Path.to_string deptable_naming_table_blob_path)
~deptable
~ignore_hh_version;
Path.to_string dep_table_path
in
let naming_table_fallback_path =
if Sys.file_exists (Path.to_string naming_sqlite_table_path) then (
Hh_logger.log "Using sqlite naming table from hack/64 saved state";
Some (Path.to_string naming_sqlite_table_path)
) else
ServerCheckUtils.get_naming_table_fallback_path genv
in
let (old_naming_table, old_errors) =
SaveStateService.load_saved_state_exn
ctx
~naming_table_fallback_path
~errors_path:(Path.to_string errors_path)
in
let t = Unix.time () in
(match
dirty_files_promise
|> Future.get
~timeout:
genv.local_config.SLC.load_state_natively_dirty_files_timeout
with
| Error error -> Error (Load_state_dirty_files_failure error)
| Ok
{
Saved_state_loader.Naming_and_dep_table_info.master_changes =
dirty_master_files;
local_changes = dirty_local_files;
} ->
let () = HackEventLogger.state_loader_dirty_files t in
let dirty_naming_files = Relative_path.Set.of_list changed_files in
let dirty_master_files = dirty_master_files in
let dirty_local_files = dirty_local_files in
let naming_table_manifold_path = Some manifold_path in
let saved_state_delta =
match (saved_state_distance, saved_state_age) with
| (_, None)
| (None, _) ->
None
| (Some distance, Some age) -> Some { distance; age }
in
Ok
{
naming_table_fn = Path.to_string naming_sqlite_table_path;
naming_table_fallback_fn = naming_table_fallback_path;
deptable_fn;
corresponding_rev = Hg.Hg_rev corresponding_rev;
mergebase_rev = mergebase_global_rev;
mergebase = Some mergebase_rev;
dirty_naming_files;
dirty_master_files;
dirty_local_files;
old_naming_table;
old_errors;
saved_state_delta;
naming_table_manifold_path;
})
(** [report update p ~other] is called because we just got a report that progress-meter [p]
should be updated with latest information [update]. The behavior of this function
is to (1) update progress-meter [p], and (2) report overall hh_server status,
making a judgment-call about what message to synthesize out of the progress
of both [p] and [other]. *)
let report
(update : (Exec_command.t * string list) option)
(progress : (string * float) option ref)
~(other : (string * float) option ref) =
begin
match update with
| None -> progress := None
| Some (cmd, _args) ->
progress :=
Some
(Exec_command.to_string cmd |> Filename.basename, Unix.gettimeofday ())
end;
(* To keep reporting human-understandable, we have to account for the fact that there
are actually two concurrent progress-meters going on, and we want to keep things
simple, so we only report the single longest-running of the two progress-meters' updates. *)
let msg =
match (!progress, !other) with
| (None, None) -> "loading saved state" (* if neither is going on *)
| (None, Some (msg, _time))
| (Some (msg, _time), None) ->
Printf.sprintf "waiting on %s..." msg
| (Some (msg1, time1), Some (_msg2, time2)) when Float.(time1 < time2) ->
Printf.sprintf "waiting on %s..." msg1
| (Some _, Some (msg2, _)) -> Printf.sprintf "waiting on %s..." msg2
in
ServerProgress.write "%s" msg;
()
let download_and_load_state_exn
~(genv : ServerEnv.genv) ~(ctx : Provider_context.t) ~(root : Path.t) :
(loaded_info, load_state_error) result =
let ignore_hh_version = ServerArgs.ignore_hh_version genv.options in
let (progress_naming_table_load, progress_dep_table_load) =
(ref None, ref None)
in
let ssopt = genv.local_config.ServerLocalConfig.saved_state in
let dependency_table_saved_state_future :
( ( Saved_state_loader.Naming_and_dep_table_info.main_artifacts,
Saved_state_loader.Naming_and_dep_table_info.additional_info )
Saved_state_loader.load_result,
ServerInitTypes.load_state_error )
result
Future.t =
Hh_logger.log "Downloading dependency graph from DevX infra";
let loader_future =
State_loader_futures.load
~ssopt
~progress_callback:(fun update ->
report
update
progress_dep_table_load
~other:progress_naming_table_load)
~watchman_opts:
Saved_state_loader.Watchman_options.{ root; sockname = None }
~ignore_hh_version
~saved_state_type:Saved_state_loader.Naming_and_dep_table_distc
|> Future.with_timeout
~timeout:genv.local_config.SLC.load_state_natively_download_timeout
in
let loader_future =
Future.continue_with loader_future @@ function
| Error e -> Error (Load_state_saved_state_loader_failure e)
| Ok v -> Ok v
in
loader_future
in
run_saved_state_future genv ctx dependency_table_saved_state_future
let calculate_state_distance_and_age_from_hg
(root : Path.t) (corresponding_base_revision : string) :
Hg.hg_rev option * Hg.global_rev option * ServerEnv.saved_state_delta option
=
let root = Path.to_string root in
let future =
Future.continue_with_future (Hg.current_mergebase_hg_rev root)
@@ fun mergebase_rev ->
Future.continue_with_future
(Hg.get_closest_global_ancestor mergebase_rev root)
@@ fun mergebase_global_rev ->
Future.continue_with_future
(Hg.get_closest_global_ancestor corresponding_base_revision root)
@@ fun corresponding_base_global_rev ->
Future.continue_with_future
(Hg.get_hg_revision_time (Hg.Hg_rev corresponding_base_revision) root)
@@ fun corresponding_time ->
Future.continue_with_future
(Hg.get_hg_revision_time (Hg.Hg_rev mergebase_rev) root)
@@ fun mergebase_time ->
let state_distance =
abs (mergebase_global_rev - corresponding_base_global_rev)
in
let state_age = abs (mergebase_time - corresponding_time) in
let saved_state_delta = { age = state_age; distance = state_distance } in
Future.of_value (mergebase_rev, mergebase_global_rev, saved_state_delta)
in
match Future.get future with
| Ok (a, b, c) -> (Some a, Some b, Some c)
| Error e ->
Hh_logger.log
"[serverLazyInit]: calculate_state_distance_and_age_from_hg failed: %s"
(Future.error_to_string e);
(None, None, None)
let use_precomputed_state_exn
~(root : Path.t)
(genv : ServerEnv.genv)
(ctx : Provider_context.t)
(info : ServerArgs.saved_state_target_info)
(cgroup_steps : CgroupProfiler.step_group) : loaded_info =
let {
ServerArgs.naming_table_path;
corresponding_base_revision;
deptable_fn;
compressed_deptable_fn;
changes;
naming_changes;
prechecked_changes;
} =
info
in
let ignore_hh_version = ServerArgs.ignore_hh_version genv.ServerEnv.options in
CgroupProfiler.step_start_end cgroup_steps "load deptable"
@@ fun _cgroup_step ->
let use_compressed_dep_graph =
genv.local_config.ServerLocalConfig.use_compressed_dep_graph
in
let deptable_fn =
if use_compressed_dep_graph && Option.is_some compressed_deptable_fn then
let compressed_deptable_fn = Option.value_exn compressed_deptable_fn in
let deptable_result =
Depgraph_decompress_ffi.decompress
~compressed_dg_path:compressed_deptable_fn
in
match deptable_result with
| Ok decompressed_depgraph_path ->
let deptable = deptable_with_filename decompressed_depgraph_path in
Hh_logger.log "Done decompressing dep graph";
lock_and_load_deptable
~base_file_name:naming_table_path
~deptable
~ignore_hh_version;
decompressed_depgraph_path
| Error error ->
failwith (Printf.sprintf "Failed to decompress dep graph: %s" error)
else (
if use_compressed_dep_graph && Option.is_none compressed_deptable_fn then (
Hh_logger.log
"Not using compressed dep graph because it's not available.";
HackEventLogger.tried_to_load_non_existant_compressed_dep_graph ()
);
let deptable = deptable_with_filename deptable_fn in
lock_and_load_deptable
~base_file_name:naming_table_path
~deptable
~ignore_hh_version;
deptable_fn
)
in
let changes = Relative_path.set_of_list changes in
let naming_changes = Relative_path.set_of_list naming_changes in
let prechecked_changes = Relative_path.set_of_list prechecked_changes in
let naming_sqlite_table_path =
ServerArgs.naming_sqlite_path_for_target_info info
in
let naming_table_fallback_path =
if Sys.file_exists naming_sqlite_table_path then (
Hh_logger.log "Using sqlite naming table from saved state";
Some naming_sqlite_table_path
) else
ServerCheckUtils.get_naming_table_fallback_path genv
in
let errors_path = ServerArgs.errors_path_for_target_info info in
let (old_naming_table, old_errors) =
CgroupProfiler.step_start_end cgroup_steps "load saved state"
@@ fun _cgroup_step ->
SaveStateService.load_saved_state_exn
ctx
~naming_table_fallback_path
~errors_path
in
let log_saved_state_age_and_distance =
ctx
|> Provider_context.get_tcopt
|> TypecheckerOptions.log_saved_state_age_and_distance
in
let (mergebase, mergebase_rev, saved_state_delta) =
if log_saved_state_age_and_distance then
calculate_state_distance_and_age_from_hg root corresponding_base_revision
else
(None, None, None)
in
{
naming_table_fn = naming_table_path;
naming_table_fallback_fn = naming_table_fallback_path;
deptable_fn;
corresponding_rev = Hg.Hg_rev corresponding_base_revision;
mergebase_rev;
mergebase;
dirty_naming_files = naming_changes;
dirty_master_files = prechecked_changes;
dirty_local_files = changes;
old_naming_table;
old_errors;
saved_state_delta;
naming_table_manifold_path = None;
}
(** This function ensures the naming-table is ready for us to do [update_reverse_naming_table_from_env_and_get_duplicate_name_errors]
on the [env.naming_table] forward-naming-table files.
- In the case of saved-state-init, [env.naming_table] has dirty files so we'll
remove them from the global naming table
- In the case of full init, I don't believe this is ever called...!
*)
let remove_items_from_reverse_naming_table_or_build_new_reverse_naming_table
(ctx : Provider_context.t)
(old_naming_table : Naming_table.t)
(parsing_files : Relative_path.Set.t)
(naming_table_fallback_fn : string option)
(t : float)
~(cgroup_steps : CgroupProfiler.step_group) : float =
CgroupProfiler.step_start_end cgroup_steps "naming from saved state"
@@ fun _cgroup_step ->
begin
match naming_table_fallback_fn with
| Some _ ->
(* Set the SQLite fallback path for the reverse naming table, then block out all entries in
any dirty files to make sure we properly handle file deletes. *)
Relative_path.Set.iter parsing_files ~f:(fun k ->
let open FileInfo in
match Naming_table.get_file_info old_naming_table k with
| None ->
(* If we can't find the file in [old_naming_table] we don't consider that an error, since
* it could be a new file that was added. *)
()
| Some
{
hash = _;
file_mode = _;
funs;
classes;
typedefs;
consts;
modules;
comments = _;
} ->
let backend = Provider_context.get_backend ctx in
let snd (_, x, _) = x in
Naming_provider.remove_type_batch
backend
(classes |> List.map ~f:snd);
Naming_provider.remove_type_batch
backend
(typedefs |> List.map ~f:snd);
Naming_provider.remove_fun_batch backend (funs |> List.map ~f:snd);
Naming_provider.remove_const_batch
backend
(consts |> List.map ~f:snd);
Naming_provider.remove_module_batch
backend
(modules |> List.map ~f:snd))
| None ->
HackEventLogger.invariant_violation_bug
"unexpected saved-state build-new-reverse-naming-table";
(* Name all the files from the old naming-table (except the new ones we parsed since
they'll be named by our caller, next). We assume the old naming-table came from a clean
state, which is why we skip checking for "already bound" conditions. *)
let old_hack_names =
Naming_table.filter old_naming_table ~f:(fun k _v ->
not (Relative_path.Set.mem parsing_files k))
in
Naming_table.fold old_hack_names ~init:() ~f:(fun k info () ->
Naming_global.ndecl_file_skip_if_already_bound ctx k info)
end;
HackEventLogger.naming_from_saved_state_end t;
Hh_logger.log_duration "NAMING_FROM_SAVED_STATE_END" t
(* Prechecked files are gated with a flag and not supported in AI/check/saving
* of saved state modes. *)
let use_prechecked_files (genv : ServerEnv.genv) : bool =
ServerPrecheckedFiles.should_use genv.options genv.local_config
&& Option.is_none (ServerArgs.ai_mode genv.options)
&& (not (ServerArgs.check_mode genv.options))
&& Option.is_none (ServerArgs.save_filename genv.options)
let get_old_and_new_defs_in_files
(old_naming_table : Naming_table.t)
(new_naming_table : Naming_table.t)
(files : Relative_path.Set.t) : FileInfo.names Relative_path.Map.t =
Relative_path.Set.fold
files
~f:
begin
fun path acc ->
let old_defs_in_file =
Naming_table.get_file_info old_naming_table path
|> Option.map ~f:FileInfo.simplify
in
let new_defs_in_file =
Naming_table.get_file_info new_naming_table path
|> Option.map ~f:FileInfo.simplify
in
let all_defs =
Option.merge
old_defs_in_file
new_defs_in_file
~f:FileInfo.merge_names
in
match all_defs with
| Some all_defs -> Relative_path.Map.add acc ~key:path ~data:all_defs
| None -> acc
end
~init:Relative_path.Map.empty
let names_to_deps (names : FileInfo.names) : Typing_deps.DepSet.t =
let open Typing_deps in
let { FileInfo.n_funs; n_classes; n_types; n_consts; n_modules } = names in
let add_deps_of_sset dep_ctor sset depset =
SSet.fold sset ~init:depset ~f:(fun n acc ->
DepSet.add acc (Dep.make (dep_ctor n)))
in
let deps = add_deps_of_sset (fun n -> Dep.Fun n) n_funs (DepSet.make ()) in
let deps = add_deps_of_sset (fun n -> Dep.Type n) n_classes deps in
let deps = add_deps_of_sset (fun n -> Dep.Type n) n_types deps in
let deps = add_deps_of_sset (fun n -> Dep.GConst n) n_consts deps in
let deps = add_deps_of_sset (fun n -> Dep.GConstName n) n_consts deps in
let deps = add_deps_of_sset (fun n -> Dep.Module n) n_modules deps in
deps
let log_fanout_information to_recheck_deps files_to_recheck =
(* we use lazy here to avoid expensive string generation when logging
* is not enabled *)
Hh_logger.log_lazy ~category:"fanout_information"
@@ lazy
Hh_json.(
json_to_string
@@ JSON_Object
[
("tag", string_ "saved_state_init_fanout");
( "hashes",
array_
string_
Typing_deps.(
List.map ~f:Dep.to_hex_string
@@ DepSet.elements to_recheck_deps) );
( "files",
array_
string_
Relative_path.(
List.map ~f:suffix @@ Set.elements files_to_recheck) );
])
(** Compare declarations loaded from the saved state to declarations based on
the current versions of dirty files. This lets us check a smaller set of
files than the set we'd check if old declarations were not available. *)
let get_files_to_recheck
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(old_naming_table : Naming_table.t)
(new_naming_table : Naming_table.t)
(defs_per_dirty_file : FileInfo.names Relative_path.Map.t)
(files_to_redeclare : Relative_path.Set.t) : Relative_path.Set.t =
let bucket_size = genv.local_config.SLC.type_decl_bucket_size in
let defs_per_file_to_redeclare =
Relative_path.Set.fold
files_to_redeclare
~init:Relative_path.Map.empty
~f:(fun path acc ->
match Relative_path.Map.find_opt defs_per_dirty_file path with
| Some info -> Relative_path.Map.add acc ~key:path ~data:info
| None -> acc)
in
let get_old_and_new_classes path : SSet.t =
let old_names =
Naming_table.get_file_info old_naming_table path
|> Option.map ~f:FileInfo.simplify
in
let new_names =
Naming_table.get_file_info new_naming_table path
|> Option.map ~f:FileInfo.simplify
in
let classes_from_names x = x.FileInfo.n_classes in
let old_classes = Option.map old_names ~f:classes_from_names in
let new_classes = Option.map new_names ~f:classes_from_names in
Option.merge old_classes new_classes ~f:SSet.union
|> Option.value ~default:SSet.empty
in
let dirty_names =
Relative_path.Map.fold
defs_per_dirty_file
~init:FileInfo.empty_names
~f:(fun _ -> FileInfo.merge_names)
in
let ctx = Provider_utils.ctx_from_server_env env in
Decl_redecl_service.oldify_type_decl
ctx
~bucket_size
genv.workers
get_old_and_new_classes
~defs:dirty_names;
let { Decl_redecl_service.fanout = { Fanout.to_recheck; _ }; _ } =
Decl_redecl_service.redo_type_decl
~bucket_size
ctx
~during_init:true
genv.workers
get_old_and_new_classes
~previously_oldified_defs:dirty_names
~defs:defs_per_file_to_redeclare
in
Decl_redecl_service.remove_old_defs ctx ~bucket_size genv.workers dirty_names;
let files_to_recheck = Naming_provider.get_files ctx to_recheck in
log_fanout_information to_recheck files_to_recheck;
files_to_recheck
(* We start off with a list of files that have changed since the state was
* saved (dirty_files), the naming table from the saved state (old_naming_table)
* and a map of the current class / function declarations per file (new_defs_per_file).
* We grab the declarations from both, to account for both the declarations
* that were deleted and those that are newly created.
* Then we use the deptable to figure out the files that
* referred to them. Finally we recheck the lot.
*
* Args:
*
* genv, env : environments
* old_naming_table: naming table at the time of the saved state
* new_naming_table: naming table after changes
* dirty_master_files and dirty_local_files: we need to typecheck these and,
* since their decl have changed, also all of their dependencies
* similar_files: we only need to typecheck these,
* not their dependencies since their decl are unchanged
* *)
let calculate_fanout_and_defer_or_do_type_check
(genv : ServerEnv.genv)
(env : ServerEnv.env)
~(old_naming_table : Naming_table.t)
~(new_naming_table : Naming_table.t)
~(dirty_master_files_unchanged_decls : Relative_path.Set.t)
~(dirty_master_files_changed_decls : Relative_path.Set.t)
~(dirty_local_files_unchanged_decls : Relative_path.Set.t)
~(dirty_local_files_changed_decls : Relative_path.Set.t)
(t : float)
(cgroup_steps : CgroupProfiler.step_group) : ServerEnv.env * float =
let start_t = Unix.gettimeofday () in
let dirty_files_unchanged_decls =
Relative_path.Set.union
dirty_master_files_unchanged_decls
dirty_local_files_unchanged_decls
in
let dirty_files_changed_decls =
Relative_path.Set.union
dirty_master_files_changed_decls
dirty_local_files_changed_decls
in
let old_and_new_defs_per_dirty_files_changed_decls =
get_old_and_new_defs_in_files
old_naming_table
new_naming_table
dirty_files_changed_decls
in
let old_and_new_defs_per_dirty_files =
ServerCheckUtils.extend_defs_per_file
genv
old_and_new_defs_per_dirty_files_changed_decls
env.naming_table
dirty_files_unchanged_decls
in
let old_and_new_defs_in_files files : FileInfo.names =
Relative_path.Map.fold
old_and_new_defs_per_dirty_files_changed_decls
~f:
begin
fun k v acc ->
if Relative_path.Set.mem files k then
FileInfo.merge_names v acc
else
acc
end
~init:FileInfo.empty_names
in
let ctx = Provider_utils.ctx_from_server_env env in
let master_deps =
old_and_new_defs_in_files dirty_master_files_changed_decls |> names_to_deps
in
let local_deps =
old_and_new_defs_in_files dirty_local_files_changed_decls |> names_to_deps
in
let get_files_to_recheck files_to_redeclare =
get_files_to_recheck
genv
env
old_naming_table
new_naming_table
old_and_new_defs_per_dirty_files
files_to_redeclare
in
let (env, to_recheck) =
if use_prechecked_files genv then
(* Start with dirty files and fan-out of local changes only *)
let to_recheck =
if genv.local_config.SLC.fetch_remote_old_decls then
get_files_to_recheck dirty_local_files_changed_decls
else
let deps = Typing_deps.add_all_deps env.deps_mode local_deps in
let files = Naming_provider.get_files ctx deps in
log_fanout_information deps files;
files
in
let env =
ServerPrecheckedFiles.init
env
~dirty_local_deps:local_deps
~dirty_master_deps:master_deps
in
(env, to_recheck)
else
(* Start with full fan-out immediately *)
let to_recheck =
if genv.local_config.SLC.fetch_remote_old_decls then
get_files_to_recheck dirty_files_changed_decls
else
let deps = Typing_deps.DepSet.union master_deps local_deps in
let deps = Typing_deps.add_all_deps env.deps_mode deps in
let files = Naming_provider.get_files ctx deps in
log_fanout_information deps files;
files
in
(env, to_recheck)
in
(* We still need to typecheck files whose declarations did not change *)
let to_recheck =
to_recheck
|> Relative_path.Set.union dirty_files_unchanged_decls
|> Relative_path.Set.union dirty_files_changed_decls
in
(* HACK: dump the fanout that we calculated and exit. This is for
`hh_fanout`'s regression testing vs. `hh_server`. This can be deleted once
we no longer worry about `hh_fanout` regressing vs. `hh_server`. Deletion
is tracked at T65464119. *)
if ServerArgs.dump_fanout genv.options then (
Hh_json.json_to_multiline_output
stdout
(Hh_json.JSON_Object
[
( "recheck_files",
Hh_json.JSON_Array
(Relative_path.Set.elements to_recheck
|> List.map ~f:Relative_path.to_absolute
|> List.map ~f:Hh_json.string_) );
]);
exit 0
) else
let env = { env with changed_files = dirty_files_changed_decls } in
let to_recheck =
if
not
genv.ServerEnv.local_config
.ServerLocalConfig.enable_type_check_filter_files
then
to_recheck
else
ServerCheckUtils.user_filter_type_check_files
~to_recheck
~reparsed:
(Relative_path.Set.union
dirty_files_unchanged_decls
dirty_files_changed_decls)
~is_ide_file:(fun _ -> false)
in
let (state_distance, state_age) =
match env.init_env.saved_state_delta with
| None -> (None, None)
| Some { distance; age } -> (Some distance, Some age)
in
let init_telemetry =
ServerEnv.Init_telemetry.make
ServerEnv.Init_telemetry.Init_lazy_dirty
(Telemetry.create ()
|> Telemetry.float_ ~key:"start_time" ~value:start_t
|> Telemetry.int_
~key:"dirty_master_files_unchanged_decls"
~value:
(Relative_path.Set.cardinal dirty_master_files_unchanged_decls)
|> Telemetry.int_
~key:"dirty_master_files_changed_decls"
~value:
(Relative_path.Set.cardinal dirty_master_files_changed_decls)
|> Telemetry.int_
~key:"dirty_local_files_unchanged_decls"
~value:
(Relative_path.Set.cardinal dirty_local_files_unchanged_decls)
|> Telemetry.int_
~key:"dirty_local_files_changed_decls"
~value:(Relative_path.Set.cardinal dirty_local_files_changed_decls)
|> Telemetry.int_
~key:"dirty_files_unchanged_decls"
~value:(Relative_path.Set.cardinal dirty_files_unchanged_decls)
|> Telemetry.int_
~key:"dirty_files_changed_decls"
~value:(Relative_path.Set.cardinal dirty_files_changed_decls)
|> Telemetry.int_
~key:"to_recheck"
~value:(Relative_path.Set.cardinal to_recheck)
|> Telemetry.int_opt ~key:"state_distance" ~value:state_distance
|> Telemetry.int_opt ~key:"state_age" ~value:state_age)
in
let result =
ServerInitCommon.defer_or_do_type_check
genv
env
(Relative_path.Set.elements to_recheck)
init_telemetry
t
~telemetry_label:"type_check_dirty"
~cgroup_steps
in
HackEventLogger.type_check_dirty
~start_t
~dirty_count:(Relative_path.Set.cardinal dirty_files_changed_decls)
~recheck_count:(Relative_path.Set.cardinal to_recheck);
Hh_logger.log
"ServerInit type_check_dirty count: %d. recheck count: %d"
(Relative_path.Set.cardinal dirty_files_changed_decls)
(Relative_path.Set.cardinal to_recheck);
result
let get_updates_exn ~(genv : ServerEnv.genv) ~(root : Path.t) :
Relative_path.Set.t * Watchman.clock option =
let start_t = Unix.gettimeofday () in
Hh_logger.log "Getting files changed while parsing...";
ServerNotifier.wait_until_ready genv.notifier;
let (changes, clock) = ServerNotifier.get_changes_async genv.notifier in
let files_changed_while_parsing =
match changes with
| ServerNotifier.StateEnter _
| ServerNotifier.StateLeave _
| ServerNotifier.Unavailable ->
Relative_path.Set.empty
| ServerNotifier.SyncChanges updates
| ServerNotifier.AsyncChanges updates ->
let root = Path.to_string root in
let filter p =
String.is_prefix p ~prefix:root && FindUtils.file_filter p
in
SSet.filter updates ~f:filter
|> Relative_path.relativize_set Relative_path.Root
in
ignore
(Hh_logger.log_duration
"Finished getting files changed while parsing"
start_t
: float);
Hh_logger.log "Watchclock: %s" (ServerEnv.show_clock clock);
HackEventLogger.changed_while_parsing_end start_t;
(files_changed_while_parsing, clock)
let initialize_naming_table
(progress_message : string)
?(fnl : Relative_path.t list option = None)
?(do_naming : bool = false)
~(cache_decls : bool)
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(cgroup_steps : CgroupProfiler.step_group) : ServerEnv.env * float =
ServerProgress.write "%s" progress_message;
let (get_next, count, t) =
match fnl with
| Some fnl ->
( MultiWorker.next genv.workers fnl,
Some (List.length fnl),
Unix.gettimeofday () )
| None ->
let (get_next, t) =
ServerInitCommon.directory_walk ~telemetry_label:"lazy.nt.indexing" genv
in
(get_next, None, t)
in
(* full init - too many files to trace all of them *)
let trace = false in
let (env, t) =
ServerInitCommon.parse_files_and_update_forward_naming_table
genv
env
~get_next
?count
t
~trace
~cache_decls
~telemetry_label:"lazy.nt.parsing"
~cgroup_steps
~worker_call:MultiWorker.wrapper
in
if do_naming then
ServerInitCommon
.update_reverse_naming_table_from_env_and_get_duplicate_name_errors
env
t
~telemetry_label:"lazy.nt.do_naming.naming"
~cgroup_steps
else
(env, t)
let write_symbol_info
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(cgroup_steps : CgroupProfiler.step_group)
(t : float) : ServerEnv.env * float =
let (env, t) =
ServerInitCommon
.update_reverse_naming_table_from_env_and_get_duplicate_name_errors
env
t
~telemetry_label:"write_symbol_info.naming"
~cgroup_steps
in
let namespace_map = ParserOptions.auto_namespace_map env.tcopt in
let paths = env.swriteopt.symbol_write_index_paths in
let paths_file = env.swriteopt.symbol_write_index_paths_file in
let referenced_file = env.swriteopt.symbol_write_referenced_out in
let include_hhi = env.swriteopt.symbol_write_include_hhi in
let ignore_paths = env.swriteopt.symbol_write_ignore_paths in
let incremental = env.swriteopt.symbol_write_sym_hash_in in
let gen_sym_hash = env.swriteopt.symbol_write_sym_hash_out in
let files =
if List.length paths > 0 || Option.is_some paths_file then
Symbol_indexable.from_options ~paths ~paths_file ~include_hhi
else
Symbol_indexable.from_naming_table
env.naming_table
~include_hhi
~ignore_paths
in
match env.swriteopt.symbol_write_index_paths_file_output with
| Some output ->
List.map
~f:(fun Symbol_indexable.{ path; _ } ->
Relative_path.storage_to_string path)
files
|> Out_channel.write_lines output;
(env, t)
| None ->
let out_dir =
match ServerArgs.write_symbol_info genv.options with
| None -> failwith "No write directory specified for --write-symbol-info"
| Some s -> s
in
(* Ensure we are writing to fresh files *)
let is_invalid =
try
if not (Sys.is_directory out_dir) then
true
else
Array.length (Sys.readdir out_dir) > 0
with
| _ ->
Sys_utils.mkdir_p out_dir;
false
in
if is_invalid then failwith "JSON write directory is invalid or non-empty";
Hh_logger.log "Indexing: %d files" (List.length files);
Hh_logger.log "Writing JSON to: %s" out_dir;
(match incremental with
| Some t -> Hh_logger.log "Reading hashtable from: %s" t
| None -> ());
let incremental =
Option.map ~f:(fun path -> Symbol_sym_hash.read ~path) incremental
in
let ctx = Provider_utils.ctx_from_server_env env in
let root_path = env.swriteopt.symbol_write_root_path in
let hhi_path = env.swriteopt.symbol_write_hhi_path in
let ownership = env.swriteopt.symbol_write_ownership in
Hh_logger.log "Ownership mode: %b" ownership;
Hh_logger.log "Gen_sym_hash: %b" gen_sym_hash;
Symbol_entrypoint.go
genv.workers
ctx
~referenced_file
~namespace_map
~gen_sym_hash
~ownership
~out_dir
~root_path
~hhi_path
~incremental
~files;
(env, t)
let write_symbol_info_full_init
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(cgroup_steps : CgroupProfiler.step_group) : ServerEnv.env * float =
let (env, t) =
initialize_naming_table
~cache_decls:true
"write symbol info initialization"
genv
env
cgroup_steps
in
write_symbol_info genv env cgroup_steps t
(* If we fail to load a saved state, fall back to typechecking everything *)
let full_init
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(cgroup_steps : CgroupProfiler.step_group) : ServerEnv.env * float =
let init_telemetry =
ServerEnv.Init_telemetry.make
ServerEnv.Init_telemetry.Init_lazy_full
(Telemetry.create ()
|> Telemetry.float_ ~key:"start_time" ~value:(Unix.gettimeofday ()))
in
let is_check_mode = ServerArgs.check_mode genv.options in
let existing_name_count =
Naming_table.fold env.naming_table ~init:0 ~f:(fun _ _ i -> i + 1)
in
if existing_name_count > 0 then begin
let desc = "full_init_naming_not_empty" in
Hh_logger.log
"INVARIANT_VIOLATION_BUG [%s] count=%d"
desc
existing_name_count;
HackEventLogger.invariant_violation_bug desc ~data_int:existing_name_count
end;
Hh_logger.log "full init";
let (env, t) =
initialize_naming_table
~do_naming:true
~cache_decls:true
"full initialization"
genv
env
cgroup_steps
in
ServerInitCommon.validate_no_errors env.errorl;
if not is_check_mode then
SearchServiceRunner.update_fileinfo_map
env.naming_table
~source:SearchUtils.Init;
let fnl = Naming_table.get_files env.naming_table in
ServerInitCommon.defer_or_do_type_check
genv
env
fnl
init_telemetry
t
~telemetry_label:"lazy.full.type_check"
~cgroup_steps
let parse_only_init
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(cgroup_steps : CgroupProfiler.step_group) : ServerEnv.env * float =
initialize_naming_table
~cache_decls:false
"parse-only initialization"
genv
env
cgroup_steps
let post_saved_state_initialization
~(do_indexing : bool)
~(genv : ServerEnv.genv)
~(env : ServerEnv.env)
~(state_result : loaded_info * Relative_path.Set.t * Watchman.clock option)
(cgroup_steps : CgroupProfiler.step_group) : ServerEnv.env * float =
let ((loaded_info : ServerInitTypes.loaded_info), changed_while_parsing, clock)
=
state_result
in
let trace = genv.local_config.SLC.trace_parsing in
let {
naming_table_fallback_fn;
dirty_naming_files;
dirty_local_files;
dirty_master_files;
old_naming_table;
mergebase_rev;
mergebase;
old_errors;
deptable_fn;
naming_table_fn = _;
corresponding_rev = _;
saved_state_delta;
naming_table_manifold_path;
} =
loaded_info
in
if genv.local_config.SLC.hg_aware then
if ServerArgs.is_using_precomputed_saved_state genv.options then begin
HackEventLogger.tried_to_be_hg_aware_with_precomputed_saved_state_warning
();
Hh_logger.log
"Warning: disabling restart on rebase (server was started with precomputed saved-state)"
end else
Option.iter mergebase_rev ~f:ServerRevisionTracker.initialize;
let env =
{
env with
init_env =
{
env.init_env with
mergebase;
naming_table_manifold_path;
saved_state_delta;
};
deps_mode =
(match ServerArgs.save_64bit genv.options with
| Some new_edges_dir ->
let human_readable_dep_map_dir =
ServerArgs.save_human_readable_64bit_dep_map genv.options
in
Typing_deps_mode.SaveToDiskMode
{
graph = Some deptable_fn;
new_edges_dir;
human_readable_dep_map_dir;
}
| None -> Typing_deps_mode.InMemoryMode (Some deptable_fn));
}
in
Hh_logger.log
"Number of files with errors: %d"
(Relative_path.Set.cardinal old_errors);
(* Load and parse PACKAGES.toml if it exists at the root. *)
let (errors, package_info) = PackageConfig.load_and_parse () in
let tcopt =
{ env.ServerEnv.tcopt with GlobalOptions.tco_package_info = package_info }
in
let env =
ServerEnv.{ env with tcopt; errorl = Errors.merge env.errorl errors }
in
(***********************************************************
INVARIANTS.
These might help make sense of the rest of the function. *)
(* Invariant: old_naming_table is Backed, and has empty delta *)
begin
match Naming_table.get_backed_delta_TEST_ONLY old_naming_table with
| None ->
HackEventLogger.invariant_violation_bug
"saved-state naming table not backed"
| Some { Naming_sqlite.file_deltas; _ }
when not (Relative_path.Map.is_empty file_deltas) ->
HackEventLogger.invariant_violation_bug
"saved-state naming table has deltas"
| Some _ -> ()
end;
(* Invariant: env.naming_table is Unbacked and empty *)
begin
match Naming_table.get_backed_delta_TEST_ONLY env.naming_table with
| None -> ()
| Some _ ->
HackEventLogger.invariant_violation_bug
"ServerLazyInit env.naming_table is backed"
end;
let count =
Naming_table.fold env.naming_table ~init:0 ~f:(fun _ _ acc -> acc + 1)
in
if count > 0 then
HackEventLogger.invariant_violation_bug
"ServerLazyInit env.naming_table is non-empty"
~data_int:count;
(* Invariant: env.disk_needs_parsing and env.needs_recheck are empty *)
if not (Relative_path.Set.is_empty env.disk_needs_parsing) then
HackEventLogger.invariant_violation_bug
"SeverLazyInit env.disk_needs_parsing is non-empty";
if not (Relative_path.Set.is_empty env.needs_recheck) then
HackEventLogger.invariant_violation_bug
"SeverLazyInit env.needs_recheck is non-empty";
(***********************************************************
NAMING TABLE.
Plan: we'll adjust the forward and reverse naming table to reflect
changes to files which changed since the saved-state. We'll also
reflect changes in files which had [phase=Errors.(Naming|Parsing)]
as well. Notionally that's because our current mechanism for handling
duplicate-name-errors requires all affected files to go through
the "update reverse naming table" procedure. (but it's redundant
because, elsewhere, we don't allow saved-state-generation in case
of naming errors...)
The actual implementation is confusing because it stores fragments
of naming-table in places you wouldn't expect:
1. [old_naming_table], which we got from the saved-state, is a [NamingTable.t]
that's "backed" i.e. it reflects just the sqlite file plus a delta, initially empty.
2. [env.naming_table] starts out as [Naming_table.empty] as it was created in
[ServerMain.setup_server]. We will add to it the forward-naming-table FileInfo.t
for all files discussed above, [parsing_files]
3. The reverse naming-table is made up of global mutable shmem delta with
eventual fallback to sqlite. We will write into that delta the reverse-names
that arise from the [parsing_files]. The same step also gathers any
duplicate-name errors.
4. Finally we'll merge the [env.naming_table] (which is only [parsed_files] at the moment)
into [old_naming_table] (which represents the sqlite file), and store the result
back in [env.naming_table]. At this point the naming-table, forward and reverse,
is complete. *)
let t = Unix.gettimeofday () in
let naming_files =
List.fold
~init:Relative_path.Set.empty
~f:Relative_path.Set.union
[
dirty_naming_files;
dirty_master_files;
dirty_local_files;
changed_while_parsing;
]
|> Relative_path.Set.filter ~f:FindUtils.path_filter
in
( CgroupProfiler.step_start_end cgroup_steps "remove fixmes"
@@ fun _cgroup_step -> Fixme_provider.remove_batch naming_files );
(* Parse dirty files only *)
let (env, t) =
ServerInitCommon.parse_files_and_update_forward_naming_table
genv
env
~get_next:
(MultiWorker.next
genv.workers
(Relative_path.Set.elements naming_files))
~count:(Relative_path.Set.cardinal naming_files)
t
~trace
~cache_decls:false (* Don't overwrite old decls loaded from saved state *)
~telemetry_label:"post_ss1.parsing"
~cgroup_steps
~worker_call:MultiWorker.wrapper
in
SearchServiceRunner.update_fileinfo_map
env.naming_table
~source:SearchUtils.TypeChecker;
let ctx = Provider_utils.ctx_from_server_env env in
let t =
remove_items_from_reverse_naming_table_or_build_new_reverse_naming_table
ctx
old_naming_table
naming_files
naming_table_fallback_fn
t
~cgroup_steps
in
if do_indexing then
write_symbol_info genv env cgroup_steps t
else
(* Do global naming on all dirty files *)
let (env, t) =
ServerInitCommon
.update_reverse_naming_table_from_env_and_get_duplicate_name_errors
env
t
~telemetry_label:"post_ss1.naming"
~cgroup_steps
in
ServerInitCommon.validate_no_errors env.errorl;
let new_naming_table = env.naming_table in
let env =
{
env with
clock;
naming_table = Naming_table.combine old_naming_table env.naming_table;
disk_needs_parsing = Relative_path.Set.empty;
needs_recheck = Relative_path.Set.union env.needs_recheck old_errors;
}
in
(***********************************************************
FANOUT.
What files should be checked?
We've already said that files-with-errors from the dirty saved state must be
rechecked. And we've already produced "duplicate name" errors if needed from
all the changed files. The question remaining is, what fanout should be checked?
Here, for each changed file, we compare its hash to the one saved
in the saved state. If the hashes are the same, then the declarations
on the file have not changed and we only need to retypecheck that file,
not all of its dependencies.
We call these files "similar" to their previous versions.
A similar check is also made later, inside [calculate_fanout_and_defer_or_do_type_check]
when it calls [get_files_to_recheck] which calls [Decl_redecl_service.redo_type_decl].
That obtains old decls from an online service and uses that for decl-diffing.
Anyway, the effect of this phase is to calculate fanout, techniques to determine
decl-diffing, using the prechecked algorithm too. Then, the fanout files are
combined into [env.needs_recheck], as a way of deferring the check until
the first iteration of ServerTypeCheck.
*)
let partition_unchanged_hash dirty_files =
Relative_path.Set.partition
(fun f ->
let old_info = Naming_table.get_file_info old_naming_table f in
let new_info = Naming_table.get_file_info env.naming_table f in
match (old_info, new_info) with
| (Some x, Some y) ->
(match (x.FileInfo.hash, y.FileInfo.hash) with
| (Some x, Some y) -> Int64.equal x y
| _ -> false)
| _ -> false)
dirty_files
in
let (dirty_master_files_unchanged_decls, dirty_master_files_changed_decls) =
partition_unchanged_hash dirty_master_files
in
let (dirty_local_files_unchanged_decls, dirty_local_files_changed_decls) =
partition_unchanged_hash dirty_local_files
in
let (env, t) =
calculate_fanout_and_defer_or_do_type_check
genv
env
~old_naming_table
~new_naming_table
~dirty_master_files_unchanged_decls
~dirty_master_files_changed_decls
~dirty_local_files_unchanged_decls
~dirty_local_files_changed_decls
t
cgroup_steps
in
(env, t)
let saved_state_init
~(do_indexing : bool)
~(load_state_approach : load_state_approach)
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(root : Path.t)
(cgroup_steps : CgroupProfiler.step_group) :
( (ServerEnv.env * float) * (loaded_info * Relative_path.Set.t),
load_state_error )
result =
let t = Unix.gettimeofday () in
let attempt_fix = genv.local_config.SLC.attempt_fix_credentials in
let () =
match Security.check_credentials ~attempt_fix with
| Ok success ->
HackEventLogger.credentials_check_end
(Printf.sprintf "saved_state_init: %s" (Security.show_success success))
t
| Error error ->
let kind = Security.to_error_kind_string error in
let message = Security.to_error_message_string error in
Hh_logger.log "Error kind: %s\nError message: %s" kind message;
HackEventLogger.credentials_check_failure
(Printf.sprintf "saved_state_init: [%s]" kind)
t
in
ServerProgress.write "loading saved state";
let ctx = Provider_utils.ctx_from_server_env env in
let do_ () : (loaded_info, load_state_error) result =
let state_result =
CgroupProfiler.step_start_end cgroup_steps "load saved state"
@@ fun _cgroup_step ->
match load_state_approach with
| Precomputed info ->
Ok (use_precomputed_state_exn ~root genv ctx info cgroup_steps)
| Load_state_natively -> download_and_load_state_exn ~genv ~ctx ~root
in
state_result
in
let t = Unix.gettimeofday () in
let state_result =
try
match do_ () with
| Error error -> Error error
| Ok loaded_info ->
let (changed_while_parsing, clock) = get_updates_exn ~genv ~root in
Ok (loaded_info, changed_while_parsing, clock)
with
| exn ->
let e = Exception.wrap exn in
Error (Load_state_unhandled_exception e)
in
HackEventLogger.saved_state_download_and_load_done
~load_state_approach:(show_load_state_approach load_state_approach)
~success:(Result.is_ok state_result)
~state_result:
(match state_result with
| Error _ -> None
| Ok (i, _, _) -> Some (show_loaded_info i))
t;
match state_result with
| Error err -> Error err
| Ok (loaded_info, changed_while_parsing, clock) ->
ServerProgress.write "loading saved state succeeded";
Hh_logger.log "Watchclock: %s" (ServerEnv.show_clock clock);
let (env, t) =
post_saved_state_initialization
~do_indexing
~state_result:(loaded_info, changed_while_parsing, clock)
~env
~genv
cgroup_steps
in
Ok ((env, t), (loaded_info, changed_while_parsing)) |
OCaml Interface | hhvm/hphp/hack/src/server/serverLazyInit.mli | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open ServerInitTypes
val full_init :
ServerEnv.genv ->
ServerEnv.env ->
CgroupProfiler.step_group ->
ServerEnv.env * float
val parse_only_init :
ServerEnv.genv ->
ServerEnv.env ->
CgroupProfiler.step_group ->
ServerEnv.env * float
val write_symbol_info_full_init :
ServerEnv.genv ->
ServerEnv.env ->
CgroupProfiler.step_group ->
ServerEnv.env * float
(** if [index] is true, call Glean indexer after init, otherwise typechecks *)
val saved_state_init :
do_indexing:bool ->
load_state_approach:load_state_approach ->
ServerEnv.genv ->
ServerEnv.env ->
Path.t ->
CgroupProfiler.step_group ->
( (ServerEnv.env * float) * (loaded_info * files_changed_while_parsing),
load_state_error )
result |
OCaml | hhvm/hphp/hack/src/server/serverLint.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open ServerEnv
open Utils
module Hack_bucket = Bucket
open Hh_prelude
module RP = Relative_path
(* For linting from stdin, we pass the file contents in directly because there's
no other way to get ahold of the contents of stdin from a worker process. But
when linting from disk, we want each individual worker to read the file off disk
by itself, so that we don't need to read all the files at the beginning and hold
them all in memory. *)
type lint_target = {
filename: RP.t;
contents: string option;
}
let lint tcopt _acc (files_with_contents : lint_target list) =
List.fold_left
files_with_contents
~f:
begin
fun acc { filename; contents } ->
let (errs, ()) =
Lints_core.do_ (fun () ->
Errors.ignore_ (fun () ->
let contents =
match contents with
| Some contents -> contents
| None ->
Sys_utils.cat (Relative_path.to_absolute filename)
in
Linting_main.lint tcopt filename contents))
in
errs @ acc
end
~init:[]
let lint_and_filter tcopt code acc fnl =
let lint_errs = lint tcopt acc fnl in
List.filter lint_errs ~f:(fun err -> Lints_core.get_code err = code)
let lint_all genv ctx code =
let next =
compose
(fun lst ->
lst
|> List.map ~f:(fun fn ->
{ filename = RP.create RP.Root fn; contents = None })
|> Hack_bucket.of_list)
(genv.indexer FindUtils.is_hack)
in
let errs =
MultiWorker.call
genv.workers
~job:(lint_and_filter ctx code)
~merge:List.rev_append
~neutral:[]
~next
in
List.map errs ~f:Lints_core.to_absolute
let create_rp : string -> RP.t = Relative_path.create Relative_path.Root
let prepare_errors_for_output errs =
errs
|> List.sort ~compare:(fun x y ->
Pos.compare (Lints_core.get_pos x) (Lints_core.get_pos y))
|> List.map ~f:Lints_core.to_absolute
let go genv ctx fnl =
let files_with_contents =
List.map fnl ~f:(fun filename ->
{ filename = create_rp filename; contents = None })
in
let errs =
if List.length files_with_contents > 10 then
MultiWorker.call
genv.workers
~job:(lint ctx)
~merge:List.rev_append
~neutral:[]
~next:(MultiWorker.next genv.workers files_with_contents)
else
lint ctx [] files_with_contents
in
prepare_errors_for_output errs
let go_stdin ctx ~(filename : string) ~(contents : string) =
let file_with_contents =
{ filename = create_rp filename; contents = Some contents }
in
lint ctx [] [file_with_contents] |> prepare_errors_for_output |
OCaml | hhvm/hphp/hack/src/server/serverLintTypes.ml | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Lint = Lints_core
type result = Pos.absolute Lint.t list
let output_json ?(pretty = false) oc el =
let errors_json = List.map el ~f:Lint.to_json in
let res =
Hh_json.JSON_Object
[
("errors", Hh_json.JSON_Array errors_json);
("version", Hh_json.JSON_String Hh_version.version);
]
in
Out_channel.output_string oc (Hh_json.json_to_string ~pretty res);
Out_channel.flush stderr
let output_text oc el format =
(* Essentially the same as type error output, except that we only have one
* message per error, and no additional 'typing reasons' *)
(if List.is_empty el then
Out_channel.output_string oc "No lint errors!\n"
else
let f =
match format with
| Errors.Context -> Lint.to_contextual_string
| Errors.Raw
| Errors.Plain ->
Lint.to_string
| Errors.Highlighted -> Lint.to_highlighted_string
in
let sl = List.map el ~f in
List.iter sl ~f:(fun s -> Printf.fprintf oc "%s\n%!" s));
Out_channel.flush oc |
OCaml | hhvm/hphp/hack/src/server/serverLocalConfig.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Config_file.Getters
module Hack_bucket = Bucket
open Hh_prelude
open Option.Monad_infix
module Bucket = Hack_bucket
let output_config_section title output_config =
Printf.eprintf "** %s:\n%!" title;
output_config ();
Printf.eprintf "\n%!";
()
module Watchman = struct
type t = {
(* use_watchman *)
enabled: bool;
(* in seconds *)
debug_logging: bool;
init_timeout: int;
sockname: string option;
subscribe: bool;
(* in seconds *)
synchronous_timeout: int;
}
let default =
{
debug_logging = false;
enabled = false;
(* buck and hgwatchman use a 10 second timeout, too *)
init_timeout = 10;
sockname = None;
subscribe = false;
synchronous_timeout = 120;
}
let load ~current_version ~default config =
let use_watchman =
bool_if_min_version
"use_watchman"
~default:default.enabled
~current_version
config
in
let enabled =
bool_if_min_version
"watchman_enabled"
~default:use_watchman
~current_version
config
in
let init_timeout =
int_ "watchman_init_timeout" ~default:default.init_timeout config
in
let sockname = string_opt "watchman_sockname" config in
let subscribe =
bool_if_min_version
"watchman_subscribe_v2"
~default:default.subscribe
~current_version
config
in
let synchronous_timeout =
int_
"watchman_synchronous_timeout"
~default:default.synchronous_timeout
config
in
let debug_logging =
bool_if_min_version
"watchman_debug_logging"
~default:default.debug_logging
~current_version
config
in
{
debug_logging;
enabled;
init_timeout;
sockname;
subscribe;
synchronous_timeout;
}
end
(** Allows to typecheck only a certain quantile of the workload. *)
type quantile = {
count: int;
(** The number of quantiles we want.
If this is n, we'll divide the workload in n groups. *)
index: int;
(** The index of the subgroup we'll process.
If this is i, we'll typecheck group number i out of the n groups.
this should be in interval [0; n] *)
}
type t = {
saved_state: GlobalOptions.saved_state;
min_log_level: Hh_logger.Level.t;
attempt_fix_credentials: bool;
(** Indicates whether we attempt to fix the credentials if they're broken *)
log_categories: string list;
log_large_fanouts_threshold: int option;
(** If a fanout is greater than this value, log stats about that fanout. *)
log_init_proc_stack_also_on_absent_from: bool;
(** A few select events like to log the init_proc_stack, but it's voluminous!
The default behavior is to log init_proc_stack only when "--from" is absent.
This flag lets us log it also when "--from" is present. *)
experiments: string list;
(** the list of experiments from the experiments config *)
experiments_config_meta: string; (** a free-form diagnostic string *)
use_saved_state: bool;
(** should we attempt to load saved-state? (subject to further options) *)
use_saved_state_when_indexing: bool;
(** should we attempt to load saved-state when running glean indexing? *)
require_saved_state: bool;
(** if attempting saved-state, should we fail upon failure? *)
load_state_natively: bool;
(** make hh_server query and download saved state. *)
load_state_natively_download_timeout: int; (** in seconds *)
load_state_natively_dirty_files_timeout: int; (** in seconds *)
type_decl_bucket_size: int;
extend_defs_per_file_bucket_size: int;
enable_on_nfs: bool;
enable_fuzzy_search: bool;
lazy_parse: bool;
lazy_init: bool;
max_purgatory_clients: int;
(** Monitor: Limit the number of clients that can sit in purgatory waiting
for a server to be started because we don't want this to grow unbounded. *)
search_chunk_size: int;
io_priority: int;
cpu_priority: int;
shm_dirs: string list;
shm_use_sharded_hashtbl: bool;
shm_cache_size: int;
(** Maximum shared memory cache size for evictable data.
If this is set to a negative value, eviction is disabled. *)
max_workers: int option;
use_dummy_informant: bool; (** See Informant. *)
informant_min_distance_restart: int;
use_full_fidelity_parser: bool;
interrupt_on_watchman: bool;
interrupt_on_client: bool;
trace_parsing: bool;
prechecked_files: bool;
enable_type_check_filter_files: bool;
(** Let the user configure which files to type check and
which files to ignore. This flag is not expected to be
rolled out broadly, rather it is meant to be used by
power users only. *)
ide_max_num_decls: int; (** tuning of clientIdeDaemon local cache *)
ide_max_num_shallow_decls: int; (** tuning of clientIdeDaemon local cache *)
ide_symbolindex_search_provider: string;
(** like [symbolindex_search_provider] but for IDE *)
predeclare_ide: bool;
max_typechecker_worker_memory_mb: int option;
(** if set, the worker will stop early at the end of a file if its heap exceeds this number *)
use_max_typechecker_worker_memory_for_decl_deferral: bool;
(** if set, the worker will perform the same check as for [max_typechecker_worker_memory_mb] after each decl
and, if over the limit, will defer *)
longlived_workers: bool;
hg_aware: bool;
hg_aware_parsing_restart_threshold: int;
hg_aware_redecl_restart_threshold: int;
hg_aware_recheck_restart_threshold: int;
ide_parser_cache: bool;
store_decls_in_saved_state: bool;
(** When enabled, save hot class declarations (for now, specified in a special
file in the repository) when generating a saved state. *)
idle_gc_slice: int;
(** Size of Gc.major_slice to be performed when server is idle. 0 to disable *)
populate_member_heaps: bool;
(** Populate the member signature heaps.
If disabled, instead load lazily from shallow classes. *)
fetch_remote_old_decls: bool;
(** Option to fetch old decls from remote decl store *)
skip_hierarchy_checks: bool;
(** Skip checks on hierarchy e.g. overrides, require extend, etc.
Set to true only for debugging purposes! *)
skip_tast_checks: bool;
(** Skip checks implemented using TAST visitors.
Set to true only for debugging purposes! *)
num_local_workers: int option;
(** If None, only the type check delegate's logic will be used.
If the delegate fails to type check, the typing check service as a whole
will fail. *)
defer_class_declaration_threshold: int option;
(** If set, defers class declarations after N lazy declarations; if not set,
always lazily declares classes not already in cache. *)
produce_streaming_errors: bool;
(** whether hh_server should write errors to errors.bin file *)
consume_streaming_errors: bool;
(** whether hh_client should read errors from errors.bin file *)
rust_provider_backend: bool;
(** Use Provider_backend.Rust_provider_backend as the global provider
* backend, servicing File_provider, Naming_provider, and Decl_provider
* using the hackrs implementation. *)
rust_elab: bool;
(** Use the Rust implementation of naming elaboration and NAST checks. *)
naming_sqlite_path: string option;
(** Enables the reverse naming table to fall back to SQLite for queries. *)
enable_naming_table_fallback: bool;
symbolindex_search_provider: string;
(** Selects a search provider for autocomplete and symbol search; see also [ide_symbolindex_search_provider] *)
symbolindex_quiet: bool;
symbolindex_file: string option;
tico_invalidate_files: bool;
(** Allows hh_server to invalidate units in hhvm based on local changes *)
tico_invalidate_smart: bool; (** Use finer grain hh_server dependencies *)
per_file_profiling: HackEventLogger.PerFileProfilingConfig.t;
(** turns on memtrace .ctf writes to this directory *)
memtrace_dir: string option;
go_to_implementation: bool;
(** Allows the IDE to show the 'find all implementations' button *)
allow_unstable_features: bool;
(** Allows unstable features to be enabled within a file via the '__EnableUnstableFeatures' attribute *)
watchman: Watchman.t;
workload_quantile: quantile option;
(** Allows to typecheck only a certain quantile of the workload. *)
rollout_group: string option;
(** A string from hh.conf, written to HackEventLogger telemetry. Before it got
into here, [t], it was first used as a lookup in ServerLocalConfigKnobs.
Intended meaning: what class of user is running hh_server, hence what experiments
should they be subject to. *)
specify_manifold_api_key: bool;
remote_old_decls_no_limit: bool;
(** Remove remote old decl fetching limit *)
cache_remote_decls: bool;
(** Configure whether fetch and cache remote decls *)
disable_naming_table_fallback_loading: bool;
(** Stop loading from OCaml marshalled naming table if sqlite table is missing. *)
use_type_alias_heap: bool; (** optimize type alias expansions *)
override_load_state_natively: bool;
(** Overrides load_state_natively on Sandcastle when true *)
use_server_revision_tracker_v2: bool;
(** control serverRevisionTracker.ml watchman subscription event tracking *)
use_hh_distc_instead_of_hulk: bool;
(** use hh_distc instead of hulk for remote typechecking *)
hh_distc_fanout_threshold: int;
(** POC: @bobren - fanout threshold where we trigger hh_distc *)
ide_load_naming_table_on_disk: bool;
(** POC: @nzthomas - allow ClientIdeDaemon to grab any naming table from disk before trying Watchman / Manifold *)
ide_naming_table_update_threshold: int;
(** POC: @nzthomas, if clientIDEDaemon is loading a naming table from disk instead of Manifold, set a globalrev distance threshold *)
ide_batch_process_changes: bool;
(* clientIdeDaemon should synchronously process file-changes, both for saved-state changes and incremental changes *)
dump_tast_hashes: bool;
(** Dump tast hashes into /tmp/hh_server/tast_hashes*)
use_compressed_dep_graph: bool;
(** POC: @bobren, use new fancy compressed dep graph that is 25% the size of the old one *)
glean_v2: bool; (** use newer glean database schema *)
}
let default =
{
saved_state = GlobalOptions.default_saved_state;
min_log_level = Hh_logger.Level.Info;
attempt_fix_credentials = false;
log_categories = [];
log_large_fanouts_threshold = None;
log_init_proc_stack_also_on_absent_from = false;
experiments = [];
experiments_config_meta = "";
use_saved_state = false;
use_saved_state_when_indexing = false;
require_saved_state = false;
load_state_natively = false;
load_state_natively_download_timeout = 60;
load_state_natively_dirty_files_timeout = 200;
type_decl_bucket_size = 1000;
extend_defs_per_file_bucket_size = 2000;
enable_on_nfs = false;
enable_fuzzy_search = true;
lazy_parse = false;
lazy_init = false;
max_purgatory_clients = 400;
search_chunk_size = 0;
io_priority = 7;
cpu_priority = 10;
shm_dirs = [GlobalConfig.shm_dir; GlobalConfig.tmp_dir];
shm_use_sharded_hashtbl = false;
shm_cache_size = -1;
max_workers = None;
use_dummy_informant = true;
informant_min_distance_restart = 100;
use_full_fidelity_parser = true;
interrupt_on_watchman = false;
interrupt_on_client = false;
trace_parsing = false;
prechecked_files = false;
enable_type_check_filter_files = false;
ide_max_num_decls = 5000;
ide_max_num_shallow_decls = 10000;
predeclare_ide = false;
max_typechecker_worker_memory_mb = None;
use_max_typechecker_worker_memory_for_decl_deferral = false;
longlived_workers = false;
hg_aware = false;
hg_aware_parsing_restart_threshold = 0;
hg_aware_redecl_restart_threshold = 0;
hg_aware_recheck_restart_threshold = 0;
ide_parser_cache = false;
store_decls_in_saved_state = false;
idle_gc_slice = 0;
populate_member_heaps = true;
fetch_remote_old_decls = false;
skip_hierarchy_checks = false;
skip_tast_checks = false;
num_local_workers = None;
defer_class_declaration_threshold = None;
produce_streaming_errors = true;
consume_streaming_errors = false;
rust_elab = false;
rust_provider_backend = false;
naming_sqlite_path = None;
enable_naming_table_fallback = false;
symbolindex_search_provider = "SqliteIndex";
(* the code actually doesn't use this default for ide_symbolindex_search_provider;
it defaults to whatever was computed for symbolindex_search_provider. *)
ide_symbolindex_search_provider = "SqliteIndex";
symbolindex_quiet = false;
symbolindex_file = None;
tico_invalidate_files = false;
tico_invalidate_smart = false;
per_file_profiling = HackEventLogger.PerFileProfilingConfig.default;
memtrace_dir = None;
go_to_implementation = true;
allow_unstable_features = false;
watchman = Watchman.default;
workload_quantile = None;
rollout_group = None;
specify_manifold_api_key = false;
remote_old_decls_no_limit = false;
cache_remote_decls = false;
disable_naming_table_fallback_loading = false;
use_type_alias_heap = false;
override_load_state_natively = false;
use_server_revision_tracker_v2 = false;
use_hh_distc_instead_of_hulk = true;
use_compressed_dep_graph = false;
(* Cutoff derived from https://fburl.com/scuba/hh_server_events/jvja9qns *)
hh_distc_fanout_threshold = 500_000;
ide_load_naming_table_on_disk = true;
ide_naming_table_update_threshold = 1000;
ide_batch_process_changes = true;
dump_tast_hashes = false;
glean_v2 = false;
}
let system_config_path =
let dir =
try Sys.getenv "HH_LOCALCONF_PATH" with
| _ -> BuildOptions.system_config_path
in
Filename.concat dir "hh.conf"
let apply_overrides ~silent ~current_version ~config ~from ~overrides =
(* We'll apply CLI overrides now at the start so that JustKnobs and experiments_config
can be informed about them, e.g. "--config rollout_group=foo" will be able
to guide the manner in which JustKnobs picks up values, and "--config use_justknobs=false"
will be able to disable it. Don't worry though -- we'll apply CLI overrides again at the end,
so they overwrite any changes brought by JustKnobs and experiments_config. *)
let config =
Config_file.apply_overrides ~config ~overrides ~log_reason:None
in
(* Now is the time for JustKnobs *)
let use_justknobs = bool_opt "use_justknobs" config in
let config =
match (use_justknobs, Sys_utils.deterministic_behavior_for_tests ()) with
| (Some false, _)
(* --config use_justknobs=false (or in hh.conf) will force JK off, regardless of anything else *)
| (None, true)
(* if use_justknobs isn't set, HH_TEST_MODE=1 (used in tests) will still turn JK off *)
->
config
| (Some true, _)
(* --config use_justknobs=true (or in hh.conf) will force JK on, regardless of anything else *)
| (None, false)
(* if use_justknobs isn't set, then HH_TEST_MODE unset or =0 will leave JK on *)
->
ServerLocalConfigKnobs.apply_justknobs_overrides ~silent config ~from
in
(* Now is the time for experiments_config overrides *)
let experiments_enabled =
bool_if_min_version
"experiments_config_enabled"
~default:false
~current_version
config
in
let (experiments_meta, config) =
if experiments_enabled then begin
Disk.mkdir_p GlobalConfig.tmp_dir;
let dir =
string_ "experiments_config_path" ~default:GlobalConfig.tmp_dir config
in
let owner = Sys_utils.get_primary_owner () in
let file =
Filename.concat dir (Printf.sprintf "hh.%s.experiments" owner)
in
let update =
bool_if_min_version
"experiments_config_update"
~default:false
~current_version
config
in
let ttl =
float_of_int
(int_ "experiments_config_ttl_seconds" ~default:86400 config)
in
let source = string_opt "experiments_config_source" config in
let meta =
if update then
match Experiments_config_file.update ~silent ~file ~source ~ttl with
| Ok meta -> meta
| Error message -> message
else
"Updating experimental config not enabled"
in
if Disk.file_exists file then
(* Apply the experiments overrides *)
let experiment_overrides = Config_file.parse_local_config file in
let config =
Config_file.apply_overrides
~config
~overrides:experiment_overrides
~log_reason:(Option.some_if (not silent) "Experiment_overrides")
in
(meta, config)
else
("Experimental config not found on disk", config)
end else
("Experimental config not enabled", config)
in
(* Finally, reapply the CLI overrides, since they should take
precedence over the experiments_config and JustKnobs. *)
let config =
Config_file.apply_overrides
~config
~overrides
~log_reason:(Option.some_if (not silent) "--config")
in
(experiments_meta, config)
let load_
system_config_path
~silent
~current_version
~current_rolled_out_flag_idx
~deactivate_saved_state_rollout
~from
overrides : t =
let config = Config_file.parse_local_config system_config_path in
let (experiments_config_meta, config) =
apply_overrides ~silent ~current_version ~config ~from ~overrides
in
(if not silent then
output_config_section "Combined config" @@ fun () ->
Config_file.print_to_stderr config);
let experiments =
string_list "experiments" ~default:default.experiments config
in
let log_categories =
string_list "log_categories" ~default:default.log_categories config
in
let log_large_fanouts_threshold =
int_opt "log_large_fanouts_threshold" config
in
let log_init_proc_stack_also_on_absent_from =
bool_ "log_init_proc_stack_also_on_absent_from" ~default:false config
in
let min_log_level =
match
Hh_logger.Level.of_enum_string
(String.lowercase
(string_
"min_log_level"
~default:(Hh_logger.Level.to_enum_string default.min_log_level)
config))
with
| Some level -> level
| None -> Hh_logger.Level.Debug
in
let use_saved_state =
bool_if_min_version
"use_mini_state"
~default:default.use_saved_state
~current_version
config
in
let use_saved_state_when_indexing =
bool_if_min_version
"use_mini_state_when_indexing"
~default:default.use_saved_state_when_indexing
~current_version
config
in
let require_saved_state =
bool_if_min_version
"require_saved_state"
~default:default.require_saved_state
~current_version
config
in
let saved_state_flags =
Saved_state_rollouts.make
~get_default:(fun name -> bool_ name ~default:false config)
~current_rolled_out_flag_idx
~deactivate_saved_state_rollout
~force_flag_value:(string_opt "ss_force" config)
in
(if not silent then
output_config_section "Saved state rollout flags" @@ fun () ->
Saved_state_rollouts.output saved_state_flags);
let project_metadata_w_flags =
bool_
"project_metadata_w_flags"
~default:default.saved_state.GlobalOptions.project_metadata_w_flags
config
in
let attempt_fix_credentials =
bool_if_min_version
"attempt_fix_credentials"
~default:default.attempt_fix_credentials
~current_version
config
in
let enable_on_nfs =
bool_if_min_version
"enable_on_nfs"
~default:default.enable_on_nfs
~current_version
config
in
let enable_fuzzy_search =
bool_if_min_version
"enable_fuzzy_search"
~default:default.enable_fuzzy_search
~current_version
config
in
let lazy_parse =
bool_if_min_version
"lazy_parse"
~default:default.lazy_parse
~current_version
config
in
let lazy_init =
bool_if_min_version
"lazy_init2"
~default:default.lazy_init
~current_version
config
in
let max_purgatory_clients =
int_ "max_purgatory_clients" ~default:default.max_purgatory_clients config
in
let search_chunk_size =
int_ "search_chunk_size" ~default:default.search_chunk_size config
in
let load_state_natively =
bool_if_min_version
"load_state_natively_v4"
~default:default.load_state_natively
~current_version
config
in
let load_state_natively_download_timeout =
int_
"load_state_natively_download_timeout"
~default:default.load_state_natively_download_timeout
config
in
let load_state_natively_dirty_files_timeout =
int_
"load_state_natively_dirty_files_timeout"
~default:default.load_state_natively_dirty_files_timeout
config
in
let use_dummy_informant =
bool_if_min_version
"use_dummy_informant"
~default:default.use_dummy_informant
~current_version
config
in
let informant_min_distance_restart =
int_
"informant_min_distance_restart"
~default:default.informant_min_distance_restart
config
in
let type_decl_bucket_size =
int_ "type_decl_bucket_size" ~default:default.type_decl_bucket_size config
in
let extend_defs_per_file_bucket_size =
int_
"extend_defs_per_file_bucket_size"
~default:default.extend_defs_per_file_bucket_size
config
in
let io_priority = int_ "io_priority" ~default:default.io_priority config in
let cpu_priority = int_ "cpu_priority" ~default:default.cpu_priority config in
let shm_dirs =
string_list "shm_dirs" ~default:default.shm_dirs config
|> List.map ~f:(fun dir -> Path.(to_string @@ make dir))
in
let shm_use_sharded_hashtbl =
bool_if_min_version
"shm_use_sharded_hashtbl"
~default:default.shm_use_sharded_hashtbl
~current_version
config
in
let shm_cache_size =
int_ "shm_cache_size" ~default:default.shm_cache_size config
in
let max_workers = int_opt "max_workers" config in
let interrupt_on_watchman =
bool_if_min_version
"interrupt_on_watchman"
~default:default.interrupt_on_watchman
~current_version
config
in
let interrupt_on_client =
bool_if_min_version
"interrupt_on_client"
~default:default.interrupt_on_client
~current_version
config
in
let use_full_fidelity_parser =
bool_if_min_version
"use_full_fidelity_parser"
~default:default.use_full_fidelity_parser
~current_version
config
in
let trace_parsing =
bool_if_min_version
"trace_parsing"
~default:default.trace_parsing
~current_version
config
in
let prechecked_files =
bool_if_min_version
"prechecked_files"
~default:default.prechecked_files
~current_version
config
in
let enable_type_check_filter_files =
bool_if_min_version
"enable_type_check_filter_files"
~default:default.enable_type_check_filter_files
~current_version
config
in
let ide_max_num_decls =
int_ "ide_max_num_decls" ~default:default.ide_max_num_decls config
in
let ide_max_num_shallow_decls =
int_
"ide_max_num_shallow_decls"
~default:default.ide_max_num_shallow_decls
config
in
let predeclare_ide =
bool_if_min_version
"predeclare_ide"
~default:default.predeclare_ide
~current_version
config
in
let max_typechecker_worker_memory_mb =
int_opt "max_typechecker_worker_memory_mb" config
in
let use_max_typechecker_worker_memory_for_decl_deferral =
bool_
"use_max_typechecker_worker_memory_for_decl_deferral"
~default:default.use_max_typechecker_worker_memory_for_decl_deferral
config
in
let longlived_workers =
bool_if_min_version
"longlived_workers"
~default:default.longlived_workers
~current_version
config
in
let hg_aware =
bool_if_min_version
"hg_aware"
~default:default.hg_aware
~current_version
config
in
let store_decls_in_saved_state =
bool_if_min_version
"store_decls_in_saved_state"
~default:default.store_decls_in_saved_state
~current_version
config
in
let hg_aware_parsing_restart_threshold =
int_
"hg_aware_parsing_restart_threshold"
~default:default.hg_aware_parsing_restart_threshold
config
in
let hg_aware_redecl_restart_threshold =
int_
"hg_aware_redecl_restart_threshold"
~default:default.hg_aware_redecl_restart_threshold
config
in
let hg_aware_recheck_restart_threshold =
int_
"hg_aware_recheck_restart_threshold"
~default:default.hg_aware_recheck_restart_threshold
config
in
let ide_parser_cache =
bool_if_min_version
"ide_parser_cache"
~default:default.ide_parser_cache
~current_version
config
in
let idle_gc_slice =
int_ "idle_gc_slice" ~default:default.idle_gc_slice config
in
let populate_member_heaps =
bool_if_min_version
"populate_member_heaps"
~default:default.populate_member_heaps
~current_version
config
in
let fetch_remote_old_decls =
bool_if_min_version
"fetch_remote_old_decls"
~default:default.fetch_remote_old_decls
~current_version
config
in
let skip_hierarchy_checks =
bool_if_min_version
"skip_hierarchy_checks"
~default:default.skip_hierarchy_checks
~current_version
config
in
let skip_tast_checks =
bool_if_min_version
"skip_tast_checks"
~default:default.skip_tast_checks
~current_version
config
in
let num_local_workers = int_opt "num_local_workers" config in
let defer_class_declaration_threshold =
int_opt "defer_class_declaration_threshold" config
in
let produce_streaming_errors =
bool_
"produce_streaming_errors"
~default:default.produce_streaming_errors
config
in
let consume_streaming_errors =
bool_
"consume_streaming_errors"
~default:default.consume_streaming_errors
config
in
let watchman =
Watchman.load ~current_version ~default:default.watchman config
in
let enable_naming_table_fallback =
bool_if_min_version
"enable_naming_table_fallback"
~default:default.enable_naming_table_fallback
~current_version
config
in
let naming_sqlite_path =
if enable_naming_table_fallback then
string_opt "naming_sqlite_path" config
else
None
in
let symbolindex_search_provider =
string_
"symbolindex_search_provider"
~default:default.symbolindex_search_provider
config
in
let ide_symbolindex_search_provider =
string_
"ide_symbolindex_search_provider"
~default:symbolindex_search_provider
config
in
let symbolindex_quiet =
bool_if_min_version
"symbolindex_quiet"
~default:default.symbolindex_quiet
~current_version
config
in
let symbolindex_file = string_opt "symbolindex_file" config in
let tico_invalidate_files =
bool_if_min_version
"tico_invalidate_files"
~default:default.tico_invalidate_files
~current_version
config
in
let tico_invalidate_smart =
bool_if_min_version
"tico_invalidate_smart"
~default:default.tico_invalidate_smart
~current_version
config
in
let profile_log =
bool_if_min_version
"profile_log"
~default:HackEventLogger.PerFileProfilingConfig.(default.profile_log)
~current_version
config
in
let profile_type_check_duration_threshold =
float_
"profile_type_check_duration_threshold"
~default:
HackEventLogger.PerFileProfilingConfig.(
default.profile_type_check_duration_threshold)
config
in
let profile_type_check_memory_threshold_mb =
int_
"profile_type_check_memory_threshold_mb"
~default:
HackEventLogger.PerFileProfilingConfig.(
default.profile_type_check_memory_threshold_mb)
config
in
let profile_type_check_twice =
bool_if_min_version
"profile_type_check_twice"
~default:
HackEventLogger.PerFileProfilingConfig.(
default.profile_type_check_twice)
~current_version
config
in
let profile_decling =
match string_ "profile_decling" ~default:"off" config with
| "off" -> HackEventLogger.PerFileProfilingConfig.DeclingOff
| "top_counts" -> HackEventLogger.PerFileProfilingConfig.DeclingTopCounts
| "all_telemetry" ->
HackEventLogger.PerFileProfilingConfig.DeclingAllTelemetry
{ callstacks = false }
| "all_telemetry_callstacks" ->
HackEventLogger.PerFileProfilingConfig.DeclingAllTelemetry
{ callstacks = true }
| _ ->
failwith
"profile_decling: off | top_counts | all_telemetry | all_telemetry_callstacks"
in
let profile_owner = string_opt "profile_owner" config in
let profile_desc = string_opt "profile_desc" config in
let profile_slow_threshold =
float_
"profile_slow_threshold"
~default:
HackEventLogger.PerFileProfilingConfig.(default.profile_slow_threshold)
config
in
let memtrace_dir = string_opt "memtrace_dir" config in
let go_to_implementation =
bool_if_min_version
"go_to_implementation"
~default:default.go_to_implementation
~current_version
config
in
let allow_unstable_features =
bool_if_min_version
"allow_unstable_features"
~default:default.allow_unstable_features
~current_version
config
in
let log_saved_state_age_and_distance =
bool_if_min_version
"log_saved_state_age_and_distance"
~default:
GlobalOptions.(
default_saved_state_loading.log_saved_state_age_and_distance)
~current_version
config
in
let use_manifold_cython_client =
bool_if_min_version
"use_manifold_cython_client"
~default:
GlobalOptions.(default_saved_state_loading.use_manifold_cython_client)
~current_version
config
in
let workload_quantile =
int_list_opt "workload_quantile" config >>= fun l ->
match l with
| [m; n] ->
if 0 <= m && m <= n then
Some { count = n; index = m }
else if 0 <= n && n <= m then
Some { count = m; index = n }
else
None
| _ -> None
in
let rollout_group = string_opt "rollout_group" config in
let specify_manifold_api_key =
bool_if_min_version
"specify_manifold_api_key"
~default:default.specify_manifold_api_key
~current_version
config
in
let remote_old_decls_no_limit =
bool_if_min_version
"remote_old_decls_no_limit"
~default:default.remote_old_decls_no_limit
~current_version
config
in
let saved_state_manifold_api_key =
(* overriding the local_config value so consumers of saved_state_manifold_api_key
* don't need to explicitly check for specify_manifold_api_key.
*)
if specify_manifold_api_key then
string_opt "saved_state_manifold_api_key" config
else
None
in
let rust_elab =
bool_if_min_version
"rust_elab"
~default:default.rust_elab
~current_version
config
in
let rust_provider_backend =
bool_if_min_version
"rust_provider_backend"
~default:default.rust_provider_backend
~current_version
config
in
let rust_provider_backend =
if rust_provider_backend && not shm_use_sharded_hashtbl then (
Hh_logger.warn
"You have rust_provider_backend=true but shm_use_sharded_hashtbl=false. This is incompatible. Turning off rust_provider_backend";
false
) else
rust_provider_backend
in
let rust_provider_backend =
if rust_provider_backend && populate_member_heaps then (
Hh_logger.warn
"You have rust_provider_backend=true but populate_member_heaps=true. This is incompatible. Turning off rust_provider_backend";
false
) else
rust_provider_backend
in
let cache_remote_decls =
bool_if_min_version
"cache_remote_decls"
~default:default.cache_remote_decls
~current_version
config
in
let disable_naming_table_fallback_loading =
bool_if_min_version
"disable_naming_table_fallback_loading"
~default:default.disable_naming_table_fallback_loading
~current_version
config
in
let use_type_alias_heap =
bool_if_min_version
"use_type_alias_heap"
~default:default.use_type_alias_heap
~current_version
config
in
let override_load_state_natively =
bool_if_min_version
"override_load_state_natively"
~default:default.override_load_state_natively
~current_version
config
in
let use_server_revision_tracker_v2 =
bool_if_min_version
"use_server_revision_tracker_v2"
~default:default.use_server_revision_tracker_v2
~current_version
config
in
let use_hh_distc_instead_of_hulk =
bool_if_min_version
"use_hh_distc_instead_of_hulk"
~default:default.use_hh_distc_instead_of_hulk
~current_version
config
in
let use_compressed_dep_graph =
bool_if_min_version
"use_compressed_dep_graph"
~default:default.use_compressed_dep_graph
~current_version
config
in
let hh_distc_fanout_threshold =
int_
"hh_distc_fanout_threshold"
~default:default.hh_distc_fanout_threshold
config
in
let ide_load_naming_table_on_disk =
bool_if_min_version
"ide_load_naming_table_on_disk"
~default:default.ide_load_naming_table_on_disk
~current_version
config
in
let ide_naming_table_update_threshold =
int_
"ide_naming_table_update_threshold"
~default:default.ide_naming_table_update_threshold
config
in
let ide_batch_process_changes =
bool_
"ide_batch_process_changes"
~default:default.ide_batch_process_changes
config
in
let dump_tast_hashes =
bool_ "dump_tast_hashes" ~default:default.dump_tast_hashes config
in
let glean_v2 = bool_ "glean_v2" ~default:default.glean_v2 config in
{
saved_state =
{
GlobalOptions.loading =
{
GlobalOptions.saved_state_manifold_api_key;
log_saved_state_age_and_distance;
use_manifold_cython_client;
};
rollouts = saved_state_flags;
project_metadata_w_flags;
};
min_log_level;
attempt_fix_credentials;
log_categories;
log_large_fanouts_threshold;
log_init_proc_stack_also_on_absent_from;
experiments;
experiments_config_meta;
use_saved_state;
use_saved_state_when_indexing;
require_saved_state;
load_state_natively;
load_state_natively_download_timeout;
load_state_natively_dirty_files_timeout;
max_purgatory_clients;
type_decl_bucket_size;
extend_defs_per_file_bucket_size;
enable_on_nfs;
enable_fuzzy_search;
lazy_parse;
lazy_init;
search_chunk_size;
io_priority;
cpu_priority;
shm_dirs;
shm_use_sharded_hashtbl;
shm_cache_size;
max_workers;
use_dummy_informant;
informant_min_distance_restart;
use_full_fidelity_parser;
interrupt_on_watchman;
interrupt_on_client;
trace_parsing;
prechecked_files;
enable_type_check_filter_files;
ide_max_num_decls;
ide_max_num_shallow_decls;
ide_symbolindex_search_provider;
predeclare_ide;
max_typechecker_worker_memory_mb;
use_max_typechecker_worker_memory_for_decl_deferral;
longlived_workers;
hg_aware;
hg_aware_parsing_restart_threshold;
hg_aware_redecl_restart_threshold;
hg_aware_recheck_restart_threshold;
ide_parser_cache;
store_decls_in_saved_state;
idle_gc_slice;
populate_member_heaps;
fetch_remote_old_decls;
skip_hierarchy_checks;
skip_tast_checks;
num_local_workers;
defer_class_declaration_threshold;
produce_streaming_errors;
consume_streaming_errors;
rust_elab;
rust_provider_backend;
naming_sqlite_path;
enable_naming_table_fallback;
symbolindex_search_provider;
symbolindex_quiet;
symbolindex_file;
tico_invalidate_files;
tico_invalidate_smart;
per_file_profiling =
{
HackEventLogger.PerFileProfilingConfig.profile_log;
profile_type_check_duration_threshold;
profile_type_check_memory_threshold_mb;
profile_type_check_twice;
profile_decling;
profile_owner;
profile_desc;
profile_slow_threshold;
};
memtrace_dir;
go_to_implementation;
allow_unstable_features;
watchman;
workload_quantile;
rollout_group;
specify_manifold_api_key;
remote_old_decls_no_limit;
cache_remote_decls;
disable_naming_table_fallback_loading;
use_type_alias_heap;
override_load_state_natively;
use_server_revision_tracker_v2;
use_hh_distc_instead_of_hulk;
use_compressed_dep_graph;
hh_distc_fanout_threshold;
ide_load_naming_table_on_disk;
ide_naming_table_update_threshold;
ide_batch_process_changes;
dump_tast_hashes;
glean_v2;
}
(** Loads the config from [path]. Uses JustKnobs and ExperimentsConfig to override.
On top of that, applies [config_overrides]. If [silent] then prints what it's doing
to stderr. *)
let load :
silent:bool ->
current_version:Config_file_version.version ->
current_rolled_out_flag_idx:int ->
deactivate_saved_state_rollout:bool ->
from:string ->
Config_file_common.t ->
t =
load_ system_config_path
let to_rollout_flags (options : t) : HackEventLogger.rollout_flags =
HackEventLogger.
{
log_saved_state_age_and_distance =
GlobalOptions.(
options.saved_state.loading.log_saved_state_age_and_distance);
fetch_remote_old_decls = options.fetch_remote_old_decls;
ide_max_num_decls = options.ide_max_num_decls;
ide_max_num_shallow_decls = options.ide_max_num_shallow_decls;
max_workers = Option.value options.max_workers ~default:(-1);
max_typechecker_worker_memory_mb =
Option.value options.max_typechecker_worker_memory_mb ~default:(-1);
use_max_typechecker_worker_memory_for_decl_deferral =
options.use_max_typechecker_worker_memory_for_decl_deferral;
specify_manifold_api_key = options.specify_manifold_api_key;
remote_old_decls_no_limit = options.remote_old_decls_no_limit;
populate_member_heaps = options.populate_member_heaps;
shm_use_sharded_hashtbl = options.shm_use_sharded_hashtbl;
shm_cache_size = options.shm_cache_size;
use_manifold_cython_client =
GlobalOptions.(options.saved_state.loading.use_manifold_cython_client);
disable_naming_table_fallback_loading =
options.disable_naming_table_fallback_loading;
use_type_alias_heap = options.use_type_alias_heap;
override_load_state_natively = options.override_load_state_natively;
use_server_revision_tracker_v2 = options.use_server_revision_tracker_v2;
rust_provider_backend = options.rust_provider_backend;
use_hh_distc_instead_of_hulk = options.use_hh_distc_instead_of_hulk;
use_compressed_dep_graph = options.use_compressed_dep_graph;
consume_streaming_errors = options.consume_streaming_errors;
hh_distc_fanout_threshold = options.hh_distc_fanout_threshold;
rust_elab = options.rust_elab;
ide_load_naming_table_on_disk = options.ide_load_naming_table_on_disk;
ide_naming_table_update_threshold =
options.ide_naming_table_update_threshold;
ide_batch_process_changes = options.ide_batch_process_changes;
glean_v2 = options.glean_v2;
} |
OCaml | hhvm/hphp/hack/src/server/serverMain.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerEnv
open Reordered_argument_collections
open Option.Monad_infix
(*****************************************************************************)
(* Main initialization *)
(*****************************************************************************)
let () = Printexc.record_backtrace true
let force_break_recheck_loop_for_test_ref = ref false
let force_break_recheck_loop_for_test x =
force_break_recheck_loop_for_test_ref := x
module MainInit : sig
val go :
genv ->
ServerArgs.options ->
(unit -> env) ->
(* init function to run while we have init lock *)
env
end = struct
(* This code is only executed when the options --check is NOT present *)
let go genv options init_fun =
let root = ServerArgs.root options in
let t = Unix.gettimeofday () in
let pid = Unix.getpid () in
begin
match ProcFS.first_cgroup_for_pid pid with
| Ok cgroup ->
Hh_logger.log "Server Pid: %d" pid;
Hh_logger.log "Server cGroup: %s" cgroup
| _ -> ()
end;
Hh_logger.log "Initializing Server (This might take some time)";
(* note: we only run periodical tasks on the root, not extras *)
let env = init_fun () in
Hh_logger.log "Server is partially ready";
ServerIdle.init genv root;
let t' = Unix.gettimeofday () in
Hh_logger.log "Took %f seconds." (t' -. t);
HackEventLogger.server_is_partially_ready ();
env
end
module Program = struct
let preinit () =
(* Warning: Global references inited in this function, should
be 'restored' in the workers, because they are not 'forked'
anymore. See `ServerWorker.{save/restore}_state`. *)
Sys_utils.set_signal
Sys.sigusr1
(Sys.Signal_handle Typing.debug_print_last_pos);
Sys_utils.set_signal
Sys.sigusr2
(Sys.Signal_handle
(fun _ ->
Hh_logger.log "Got sigusr2 signal. Going to shut down.";
Exit.exit
~msg:
"Hh_server received a stop signal. This can happen from a large rebase/update"
Exit_status.Server_shutting_down_due_to_sigusr2))
let run_once_and_exit
genv
env
(save_state_result : SaveStateServiceTypes.save_state_result option) =
let recheck_stats =
Option.map
~f:ServerEnv.RecheckLoopStats.to_user_telemetry
env.ServerEnv.last_recheck_loop_stats_for_actual_work
in
ServerError.print_error_list
stdout
~stale_msg:None
~output_json:(ServerArgs.json_mode genv.options)
~error_list:
(List.map (Errors.get_error_list env.errorl) ~f:User_error.to_absolute)
~save_state_result
~recheck_stats;
WorkerController.force_quit_all ();
let has_errors = not (Errors.is_empty env.errorl) in
let is_saving_state_and_ignoring_errors =
ServerArgs.gen_saved_ignore_type_errors genv.options
&& Option.is_some (ServerArgs.save_filename genv.options)
in
let error_code =
if has_errors then
if Option.is_some (ServerArgs.write_symbol_info genv.options) then
32
else if not is_saving_state_and_ignoring_errors then
1
else
0
else
0
in
exit error_code
(* filter and relativize updated file paths *)
let exit_if_critical_update genv ~(raw_updates : SSet.t) : unit =
let hhconfig_in_updates =
SSet.mem
raw_updates
(Relative_path.to_absolute ServerConfig.repo_config_path)
in
if hhconfig_in_updates then begin
let (new_config, _) = ServerConfig.load ~silent:false genv.options in
if not (ServerConfig.is_compatible genv.config new_config) then (
Hh_logger.log
"%s changed in an incompatible way; please restart %s.\n"
(Relative_path.suffix ServerConfig.repo_config_path)
GlobalConfig.program_name;
(* TODO: Notify the server monitor directly about this. *)
Exit.exit Exit_status.Hhconfig_changed
)
end;
let package_config_in_updates =
SSet.mem
raw_updates
(Relative_path.to_absolute PackageConfig.repo_config_path)
in
if package_config_in_updates then begin
Hh_logger.log
"%s changed; please restart %s.\n"
(Relative_path.suffix PackageConfig.repo_config_path)
GlobalConfig.program_name;
Exit.exit Exit_status.Package_config_changed
end
end
let finalize_init init_env typecheck_telemetry init_telemetry =
(* rest is just logging/telemetry *)
let t' = Unix.gettimeofday () in
let hash_telemetry = ServerUtils.log_and_get_sharedmem_load_telemetry () in
let telemetry =
Telemetry.create ()
|> Telemetry.duration ~start_time:init_env.init_start_t
|> Telemetry.object_
~key:"init"
~value:(ServerEnv.Init_telemetry.get init_telemetry)
|> Telemetry.object_ ~key:"typecheck" ~value:typecheck_telemetry
|> Telemetry.object_ ~key:"hash" ~value:hash_telemetry
|> Telemetry.int_
~key:"heap_size"
~value:(SharedMem.SMTelemetry.heap_size ())
in
HackEventLogger.server_is_ready telemetry;
Hh_logger.log
"SERVER_IS_READY. Took %f seconds to init. Telemetry:\n%s"
(t' -. init_env.init_start_t)
(Telemetry.to_string telemetry);
()
(*****************************************************************************)
(* The main loop *)
(*****************************************************************************)
(** Query for changed files. This is a hard to understand method...
[let (env, changes, new_clock, may_be_stale, telemetry) = query_notifier genv env query_kind start_time].
CARE! [query_kind] is hard to understand...
* [`Sync] -- a watchman sync query, i.e. guarantees to have picked up all updates
up to the moment it was invoked. Watchman does this by writing a dummy file
and waiting until the OS notifies about it; the OS guarantees to send notifications in order.
* [`Async] -- picks up changes that watchman has pushed over the subscription, but we don't
do a sync, so therefore there might be changes on disk that watchman will tell us about
in future.
* [`Skip] -- CARE! Despite its name, this also behaves much like [`Async].
The return value [may_be_stale] indicates that the most recent watchman event that got pushed
was not a "sync" to the dummy file. This will never be true for [`Sync] query kind (which deliberately
waits for the sync), but it might be true or false for [`Async] and [`Skip]. The caller can
use this as a hint that we don't know whether there are more disk changes.
Personally, I've never actually seen it be true. It's surfaced to the user in clientCheckStatus.ml
with the message "this may be stale, probably due to watchman being unresponsive". *)
let query_notifier
(genv : ServerEnv.genv)
(env : ServerEnv.env)
(query_kind : [> `Async | `Skip | `Sync ])
(start_time : float) :
ServerEnv.env
* Relative_path.Set.t
* Watchman.clock option
* bool
* Telemetry.t =
let telemetry =
Telemetry.create () |> Telemetry.duration ~key:"start" ~start_time
in
let (env, (raw_updates, clock)) =
match query_kind with
| `Sync ->
( env,
(try ServerNotifier.get_changes_sync genv.notifier with
| Watchman.Timeout -> (ServerNotifier.Unavailable, None)) )
| `Async ->
( { env with last_notifier_check_time = start_time },
ServerNotifier.get_changes_async genv.notifier )
| `Skip -> (env, (ServerNotifier.AsyncChanges SSet.empty, None))
in
let telemetry = Telemetry.duration telemetry ~key:"notified" ~start_time in
let unpack_updates = function
| ServerNotifier.Unavailable -> (true, SSet.empty)
| ServerNotifier.StateEnter _ -> (true, SSet.empty)
| ServerNotifier.StateLeave _ -> (true, SSet.empty)
| ServerNotifier.AsyncChanges updates -> (true, updates)
| ServerNotifier.SyncChanges updates -> (false, updates)
in
let (updates_stale, raw_updates) = unpack_updates raw_updates in
let rec pump_async_updates acc acc_clock =
match ServerNotifier.async_reader_opt genv.notifier with
| Some reader when Buffered_line_reader.is_readable reader ->
let (changes, clock) = ServerNotifier.get_changes_async genv.notifier in
let (_, raw_updates) = unpack_updates changes in
pump_async_updates (SSet.union acc raw_updates) clock
| _ -> (acc, acc_clock)
in
let (raw_updates, clock) = pump_async_updates raw_updates clock in
let telemetry = Telemetry.duration telemetry ~key:"pumped" ~start_time in
Program.exit_if_critical_update genv ~raw_updates;
let updates =
FindUtils.post_watchman_filter_from_fully_qualified_raw_updates
~root:(ServerArgs.root genv.options)
~raw_updates
in
(* CARE! For streaming-errors to work in clientCheckStatus.ml, the test
which hh_server uses to determine "is there a non-empty set of changes which prompt me
to start a new check" must be at least as strict as the one in clientCheckStatus.
They're identical, in fact, because they both use the same watchman filter
(FilesToIgnore.watchman_server_expression_terms) and the same post-watchman-filter. *)
let telemetry =
telemetry
|> Telemetry.duration ~key:"processed" ~start_time
|> Telemetry.int_ ~key:"raw_updates" ~value:(SSet.cardinal raw_updates)
|> Telemetry.int_ ~key:"updates" ~value:(Relative_path.Set.cardinal updates)
in
if not @@ Relative_path.Set.is_empty updates then
HackEventLogger.notifier_returned start_time (SSet.cardinal raw_updates);
(env, updates, clock, updates_stale, telemetry)
let update_stats_after_recheck :
RecheckLoopStats.t ->
ServerTypeCheck.CheckStats.t ->
check_kind:ServerTypeCheck.CheckKind.t ->
telemetry:Telemetry.t ->
start_time:seconds_since_epoch ->
RecheckLoopStats.t =
fun {
RecheckLoopStats.total_changed_files_count;
per_batch_telemetry;
total_rechecked_count;
updates_stale;
recheck_id;
last_iteration_start_time = _;
duration;
time_first_result = _;
any_full_checks;
}
{
ServerTypeCheck.CheckStats.total_rechecked_count =
total_rechecked_count_in_iteration;
reparse_count;
time_first_result;
}
~check_kind
~telemetry
~start_time ->
{
RecheckLoopStats.total_changed_files_count =
total_changed_files_count + reparse_count;
per_batch_telemetry = telemetry :: per_batch_telemetry;
total_rechecked_count =
total_rechecked_count + total_rechecked_count_in_iteration;
updates_stale;
recheck_id;
last_iteration_start_time = start_time;
duration = duration +. (Unix.gettimeofday () -. start_time);
time_first_result;
any_full_checks =
any_full_checks || ServerTypeCheck.CheckKind.is_full_check check_kind;
}
(* This function loops until it has processed all outstanding changes.
*
* One reason for doing this is so that, if a client makes a request,
* then we can process all outstanding changes prior to handling that request.
* That way the client will get an up-to-date answer.
*
* Another reason is to get meaningful logging in case of watchman events.
* When a rebase occurs, Watchman/dfind takes a while to give us the full list
* of updates, and it often comes in batches. To get an accurate measurement
* of rebase time, we use the heuristic that any changes that come in
* right after one rechecking round finishes to be part of the same
* rebase, and we don't log the recheck_end event until the update list
* is no longer getting populated.
*
* The above doesn't apply in presence of interruptions / cancellations -
* it's possible for client to request current recheck to be stopped.
*)
let rec recheck_until_no_changes_left stats genv env select_outcome :
RecheckLoopStats.t * env =
let start_time = Unix.gettimeofday () in
(* this is telemetry for the current batch, i.e. iteration: *)
let telemetry =
Telemetry.create () |> Telemetry.float_ ~key:"start_time" ~value:start_time
in
(* When a new client connects, we use the synchronous notifier.
This is to get synchronous file system changes when invoking
hh_client in terminal.
CARE! The [`Skip] option doesn't in fact skip. It will still
retrieve queued-up watchman updates.
NB: This also uses synchronous notify on establishing a persistent
connection. This is harmless, but could maybe be filtered away. *)
let query_kind =
match select_outcome with
| ClientProvider.Select_new _ -> `Sync
| ClientProvider.Select_nothing
| ClientProvider.Select_exception _
| ClientProvider.Not_selecting_hg_updating ->
if Float.(start_time - env.last_notifier_check_time > 0.5) then
`Async
else
`Skip
| ClientProvider.Select_persistent ->
(* Do not aggressively process any disk changes when there are pending persistent
client requests - some of them might be edits, and we don't want to
do analysis on mid-edit state of the world. (Nevertheless, [`Skip] still may
pick up updates...) *)
`Skip
in
let (env, updates, clock, updates_stale, query_telemetry) =
query_notifier genv env query_kind start_time
in
(* The response from [query_notifier] is tricky to unpick...
* For [`Sync | `Async] it will always return [clock=Some] and updates may be empty or not.
* For [`Skip] it might return [clock=Some] and updates empty or not; or it might
return [clock=None] with updates empty. *)
let telemetry =
telemetry
|> Telemetry.object_ ~key:"query" ~value:query_telemetry
|> Telemetry.duration ~key:"query_done" ~start_time
in
let stats = { stats with RecheckLoopStats.updates_stale } in
let is_idle =
(match select_outcome with
| ClientProvider.Select_persistent -> false
| _ -> true)
&& (* "average person types [...] between 190 and 200 characters per minute"
* 60/200 = 0.3 *)
Float.(start_time - env.last_command_time > 0.3)
in
(* saving any file is our trigger to start full recheck *)
let env =
if Option.is_some clock && not (Option.equal String.equal clock env.clock)
then begin
Hh_logger.log "Recheck at watchclock %s" (ServerEnv.show_clock clock);
{ env with clock }
end else
env
in
let env =
if Relative_path.Set.is_empty updates then
env
else
let disk_needs_parsing =
Relative_path.Set.union updates env.disk_needs_parsing
in
match env.full_recheck_on_file_changes with
| Paused _ ->
let () = Hh_logger.log "Skipping full check due to `hh --pause`" in
{ env with disk_needs_parsing; full_check_status = Full_check_needed }
| _ ->
{ env with disk_needs_parsing; full_check_status = Full_check_started }
in
let telemetry = Telemetry.duration telemetry ~key:"got_updates" ~start_time in
(* If the client went away (e.g. user pressed Ctrl+C while waiting for the typecheck),
let's clean up now. *)
let env =
match env.nonpersistent_client_pending_command_needs_full_check with
| Some (_command, _reason, client)
when is_full_check_needed env.full_check_status
&& Float.(start_time - env.last_command_time > 5.0) -> begin
try
ClientProvider.ping client;
env
with
| ClientProvider.Client_went_away ->
ClientProvider.shutdown_client client;
{
env with
nonpersistent_client_pending_command_needs_full_check = None;
}
end
| _ -> env
in
(* If a typecheck had been suspended due to IDE edits, then we want to resume it eventually...
To recap: if an IDE edit comes in, it takes us time to suspend the current typecheck,
handle the edit, then resume the current typecheck. If we did this every edit then we'd
get sluggish perf. We have two "debounce" mechanisms to avoid resuming too eagerly:
* If we handled any IDE action, then we won't accept any further CLI clients until
the IDE tells us it has no further pending work which it does by sending IDE_IDLE.
The code to deny further clients is when [ServerMain.serve_one_iteration] calls
[ClientProvider.sleep_and_check]. The code to reset upon IDE_IDLE is in [ServerRpc.handle]
when it receives IDE_IDLE.
* If we handled an IDE edit action, then we won't resume any typechecking work until either
5.0s has elapsed or there was a disk change. The code to suspend typechecking work is when
[ServerCommand.handle] returns [Needs_writes {recheck_restart_is_needed=false}] and
its caller [ServerMain.persistent_client_interrupt_handler] sets [env.full_check_status=Full_check_needed].
This has effect because [ServerMain.recheck_until_no_changes_left] is in charge of deciding
whether a check is needed, and it decides "no" unless [ServerEnv.is_full_check_started].
The code to resume typechecking work is right here! We'll restart only after 5.0s.
* These mechanisms notwithstanding, we'll still start full recheck immediately
upon any file save. *)
let env =
if
is_full_check_needed env.full_check_status
&& Float.(start_time > env.last_command_time + 5.0)
then begin
Hh_logger.log "Restarting full check after 5.0s";
{ env with full_check_status = Full_check_started }
end else
env
in
(* Same as above, but for persistent clients *)
let env =
match env.persistent_client_pending_command_needs_full_check with
| Some (_command, reason) when is_full_check_needed env.full_check_status ->
Hh_logger.log "Restarting full check due to %s" reason;
{ env with full_check_status = Full_check_started }
| _ -> env
in
let telemetry =
Telemetry.duration telemetry ~key:"sorted_out_client" ~start_time
in
(* We have some new, or previously un-processed updates *)
let full_check =
ServerEnv.is_full_check_started env.full_check_status
(* Prioritize building search index over full rechecks. *)
&& (Queue.is_empty SearchServiceRunner.SearchServiceRunner.queue
(* Unless there is something actively waiting for this *)
|| Option.is_some
env.nonpersistent_client_pending_command_needs_full_check)
in
let lazy_check =
(not @@ Relative_path.Set.is_empty env.ide_needs_parsing) && is_idle
in
let telemetry =
telemetry
|> Telemetry.bool_ ~key:"full_check" ~value:full_check
|> Telemetry.bool_ ~key:"lazy_check" ~value:lazy_check
|> Telemetry.duration ~key:"figured_check_kind" ~start_time
in
if (not full_check) && not lazy_check then
let telemetry =
Telemetry.string_ telemetry ~key:"check_kind" ~value:"None"
in
let stats =
{
stats with
RecheckLoopStats.per_batch_telemetry =
telemetry :: stats.RecheckLoopStats.per_batch_telemetry;
}
in
(stats, env)
else
let check_kind =
if lazy_check then
ServerTypeCheck.CheckKind.Lazy
else
ServerTypeCheck.CheckKind.Full
in
let check_kind_str = ServerTypeCheck.CheckKind.to_string check_kind in
let env = { env with can_interrupt = not lazy_check } in
let needed_full_init = env.init_env.why_needed_full_check in
let old_errorl = Errors.get_error_list env.errorl in
(* HERE'S WHERE WE DO THE HEAVY WORK! **)
let telemetry =
telemetry
|> Telemetry.string_ ~key:"check_kind" ~value:check_kind_str
|> Telemetry.duration ~key:"type_check_start" ~start_time
in
let (env, check_stats, type_check_telemetry) =
CgroupProfiler.step_group check_kind_str ~log:(not lazy_check)
@@ ServerTypeCheck.type_check genv env check_kind start_time
in
let telemetry =
telemetry
|> Telemetry.object_ ~key:"type_check" ~value:type_check_telemetry
|> Telemetry.duration ~key:"type_check_end" ~start_time
in
(* END OF HEAVY WORK *)
(* Final telemetry and cleanup... *)
let env = { env with can_interrupt = true } in
begin
match (needed_full_init, env.init_env.why_needed_full_check) with
| (Some needed_full_init, None) ->
finalize_init env.init_env telemetry needed_full_init
| _ -> ()
end;
ServerStamp.touch_stamp_errors old_errorl (Errors.get_error_list env.errorl);
let telemetry =
Telemetry.duration telemetry ~key:"finalized_and_touched" ~start_time
in
let stats =
update_stats_after_recheck
stats
check_stats
~check_kind
~start_time
~telemetry
in
(* Avoid batching ide rechecks with disk rechecks - there might be
* other ide edits to process first and we want to give the main loop
* a chance to process them first.
* Similarly, if a recheck was interrupted because of arrival of command
* that needs writes, break the recheck loop to give that command chance
* to be handled in main loop.
* Finally, tests have ability to opt-out of batching completely. *)
if
lazy_check
|| Option.is_some env.pending_command_needs_writes
|| !force_break_recheck_loop_for_test_ref
then
(stats, env)
else
recheck_until_no_changes_left stats genv env select_outcome
let new_serve_iteration_id () = Random_id.short_string ()
(* This is safe to run only in the main loop, when workers are not doing
* anything. *)
let main_loop_command_handler client_kind client result =
match result with
| ServerUtils.Done env -> env
| ServerUtils.Needs_full_recheck { env; finish_command_handling; reason } ->
begin
match client_kind with
| `Non_persistent ->
(* We should not accept any new clients until this is cleared *)
assert (
Option.is_none env.nonpersistent_client_pending_command_needs_full_check);
{
env with
nonpersistent_client_pending_command_needs_full_check =
Some (finish_command_handling, reason, client);
}
| `Persistent ->
(* Persistent client will not send any further commands until previous one
* is handled. *)
assert (
Option.is_none env.persistent_client_pending_command_needs_full_check);
{
env with
persistent_client_pending_command_needs_full_check =
Some (finish_command_handling, reason);
}
end
| ServerUtils.Needs_writes
{
env;
finish_command_handling;
recheck_restart_is_needed = _;
reason = _;
} ->
finish_command_handling env
let generate_and_update_recheck_id env =
let recheck_id = new_serve_iteration_id () in
let env =
{
env with
ServerEnv.init_env =
{ env.ServerEnv.init_env with ServerEnv.recheck_id = Some recheck_id };
}
in
(env, recheck_id)
let idle_if_no_client env waiting_client =
match waiting_client with
| ClientProvider.Select_nothing
| ClientProvider.Select_exception _
| ClientProvider.Not_selecting_hg_updating ->
let {
RecheckLoopStats.per_batch_telemetry;
total_changed_files_count;
total_rechecked_count;
_;
} =
env.last_recheck_loop_stats
in
(* Ugly hack: We want GC_SHAREDMEM_RAN to record the last rechecked
* count so that we can figure out if the largest reclamations
* correspond to massive rebases. However, the logging call is done in
* the SharedMem module, which doesn't know anything about Server stuff.
* So we wrap the call here. *)
HackEventLogger.with_rechecked_stats
~update_batch_count:(List.length per_batch_telemetry)
~total_changed_files:total_changed_files_count
~total_rechecked:total_rechecked_count
(fun () -> SharedMem.GC.collect `aggressive);
let t = Unix.gettimeofday () in
if Float.(t -. env.last_idle_job_time > 0.5) then
let env = ServerIdle.go env in
{ env with last_idle_job_time = t }
else
env
| ClientProvider.Select_new _
| ClientProvider.Select_persistent ->
env
(** Push diagnostics (typechecker errors in the IDE are called diagnostics) to IDE.
Return a reason why nothing was pushed and optionally the timestamp of the push. *)
let push_diagnostics env : env * string * seconds_since_epoch option =
let (diagnostic_pusher, time_errors_pushed) =
Diagnostic_pusher.push_whats_left env.diagnostic_pusher
in
let env = { env with diagnostic_pusher } in
(env, "pushed any leftover", time_errors_pushed)
let log_recheck_end (stats : ServerEnv.RecheckLoopStats.t) ~errors ~diag_reason
=
let telemetry =
ServerEnv.RecheckLoopStats.to_user_telemetry stats
|> Telemetry.string_ ~key:"diag_reason" ~value:diag_reason
|> Telemetry.object_ ~key:"errors" ~value:(Errors.as_telemetry errors)
in
let {
RecheckLoopStats.duration;
total_changed_files_count;
total_rechecked_count;
per_batch_telemetry;
any_full_checks;
recheck_id;
updates_stale = _;
last_iteration_start_time = _;
time_first_result = _;
} =
stats
in
HackEventLogger.recheck_end
~last_recheck_duration:duration
~update_batch_count:(List.length per_batch_telemetry - 1)
~total_changed_files:total_changed_files_count
~total_rechecked:total_rechecked_count
(Option.some_if any_full_checks telemetry);
Hh_logger.log
"RECHECK_END (recheck_id %s):\n%s"
recheck_id
(Telemetry.to_string telemetry);
()
let exit_if_parent_dead () =
(* Cross-platform compatible way; parent PID becomes 1 when parent dies. *)
if Unix.getppid () = 1 then (
Hh_logger.log "Server's parent has died; exiting.\n";
Exit.exit Exit_status.Lost_parent_monitor
)
let serve_one_iteration genv env client_provider =
let (env, recheck_id) = generate_and_update_recheck_id env in
exit_if_parent_dead ();
let acceptable_new_client_kind =
let has_default_client_pending =
Option.is_some env.nonpersistent_client_pending_command_needs_full_check
in
let use_tracker_v2 =
genv.local_config.ServerLocalConfig.use_server_revision_tracker_v2
in
let can_accept_clients =
not @@ ServerRevisionTracker.is_hg_updating use_tracker_v2
in
match (can_accept_clients, has_default_client_pending) with
(* If we are already blocked on some client, do not accept more of them.
* Other clients (that connect through priority pipe, or persistent clients)
* can still be handled - unless we are in hg.update state, where we want to
* stop accepting any new clients, with the exception of forced ones. *)
| (true, true) -> Some `Priority
| (true, false) -> Some `Any
| (false, true) -> None
| (false, false) -> Some `Force_dormant_start_only
in
let selected_client =
match acceptable_new_client_kind with
| None -> ClientProvider.Not_selecting_hg_updating
| Some client_kind ->
ClientProvider.sleep_and_check
client_provider
(Ide_info_store.get_client ())
~ide_idle:env.ide_idle
~idle_gc_slice:genv.local_config.ServerLocalConfig.idle_gc_slice
client_kind
in
(* ServerProgress: By updating status now at the start of the serve_one_iteration,
* it means there's no obligation on the "doing work" part of the previous
* iteration to clean up its own status-reporting once done.
*
* Caveat: that's not quite true, since ClientProvider.sleep_and_check will
* wait up to 0.1s if there are no pending requests. So theoretically we
* won't update our status for up to 0.1s after the previous work is done.
* That doesn't really matter, since (1) if there are no pending requests
* then no client will even ask for status, and (2) it's worth it to
* keep the code clean and simple.
*
* By the same token, we will be writing "ready" once every 0.1s to the status file.
* Think of it as a heartbeat!
*
* Note: the message here might soon be replaced. If we discover disk changes
* that prompt a typecheck, then typechecking sends its own status updates.
* And if the selected_client was a request, then once we discover the nature
* of that request then ServerCommand.handle will send its own status updates too.
*)
begin
match selected_client with
| ClientProvider.(Select_nothing | Select_exception _) when not env.ide_idle
->
ServerProgress.write ~include_in_logs:false "hh_client:active"
| ClientProvider.(Select_nothing | Select_exception _) ->
(* There's some subtle IDE behavior, described in [ServerCommand.handle]
and [ServerMain.recheck_until_no_changes_left]... If an EDIT was received
over the persistent connection, then we won't resume typechecking
until either a file-save comes in or 5.0s has elapsed. *)
let (disposition, msg) =
match env.full_check_status with
| Full_check_needed -> (ServerProgress.DWorking, "will resume")
| Full_check_started -> (ServerProgress.DWorking, "typechecking")
| Full_check_done -> (ServerProgress.DReady, "ready")
in
ServerProgress.write ~include_in_logs:false ~disposition "%s" msg
| ClientProvider.Not_selecting_hg_updating ->
ServerProgress.write ~include_in_logs:false "hg-transaction"
| ClientProvider.Select_new _
| ClientProvider.Select_persistent ->
ServerProgress.write ~include_in_logs:false "working"
end;
let env = idle_if_no_client env selected_client in
let stage =
if Option.is_some env.init_env.why_needed_full_check then
`Init
else
`Recheck
in
HackEventLogger.with_id ~stage recheck_id @@ fun () ->
(* We'll first do "recheck_until_no_changes_left" to handle all outstanding changes, so that
* after that we'll be able to give an up-to-date answer to the client.
* Except: this might be stopped early in some cases, e.g. IDE checks. *)
let t_start_recheck = Unix.gettimeofday () in
let (stats, env) =
recheck_until_no_changes_left
(RecheckLoopStats.empty ~recheck_id)
genv
env
selected_client
in
let t_done_recheck = Unix.gettimeofday () in
let (env, diag_reason, time_errors_pushed) = push_diagnostics env in
let t_sent_diagnostics = Unix.gettimeofday () in
let stats =
ServerEnv.RecheckLoopStats.record_result_sent_ts stats time_errors_pushed
in
let did_work = stats.RecheckLoopStats.total_rechecked_count > 0 in
let env =
{
env with
last_recheck_loop_stats = stats;
last_recheck_loop_stats_for_actual_work =
(if did_work then
Some stats
else
env.last_recheck_loop_stats_for_actual_work);
}
in
if did_work then log_recheck_end stats ~errors:env.errorl ~diag_reason;
let env =
match selected_client with
| ClientProvider.Select_persistent
| ClientProvider.Select_nothing
| ClientProvider.Select_exception _
| ClientProvider.Not_selecting_hg_updating ->
env
| ClientProvider.Select_new { ClientProvider.client; m2s_sequence_number }
-> begin
try
Hh_logger.log
"Serving new client obtained from monitor handoff #%d"
m2s_sequence_number;
(* client here is the new client (not the existing persistent client)
* whose request we're going to handle. *)
ClientProvider.track
client
~key:Connection_tracker.Server_start_recheck
~time:t_start_recheck;
ClientProvider.track
client
~key:Connection_tracker.Server_done_recheck
~time:t_done_recheck;
ClientProvider.track
client
~key:Connection_tracker.Server_sent_diagnostics
~time:t_sent_diagnostics;
let env =
Client_command_handler.handle_client_command_or_persistent_connection
genv
env
client
`Non_persistent
|> main_loop_command_handler `Non_persistent client
in
HackEventLogger.handled_connection t_start_recheck;
env
with
| exn ->
let e = Exception.wrap exn in
HackEventLogger.handle_connection_exception "outer" e;
Hh_logger.log
"HANDLE_CONNECTION_EXCEPTION(outer) [ignoring request] %s"
(Exception.to_string e);
env
end
in
let env =
match Ide_info_store.get_client () with
| None -> env
| Some client ->
(* Test whether at the beginning of this iteration of main loop
* there was a request to read and handle.
* If yes, we'll try to do it, but it's possible that we have ran a recheck
* in-between those two events, and if this recheck was non-blocking, we
* might have already handled this command there. Proceeding to
* handle_connection would then block reading a request that is not there
* anymore, so we need to call has_persistent_connection_request again. *)
if ClientProvider.has_persistent_connection_request client then (
HackEventLogger.got_persistent_client_channels t_start_recheck;
try
let env =
Client_command_handler
.handle_client_command_or_persistent_connection
genv
env
client
`Persistent
|> main_loop_command_handler `Persistent client
in
HackEventLogger.handled_persistent_connection t_start_recheck;
env
with
| exn ->
let e = Exception.wrap exn in
HackEventLogger.handle_persistent_connection_exception
"outer"
e
~is_fatal:true;
Hh_logger.log
"HANDLE_PERSISTENT_CONNECTION_EXCEPTION(outer) [ignoring request] %s"
(Exception.to_string e);
env
) else
env
in
let env =
match env.pending_command_needs_writes with
| Some f -> { (f env) with pending_command_needs_writes = None }
| None -> env
in
let env =
match env.persistent_client_pending_command_needs_full_check with
| Some (f, _reason) when is_full_check_done env.full_check_status ->
{ (f env) with persistent_client_pending_command_needs_full_check = None }
| _ -> env
in
let env =
match env.nonpersistent_client_pending_command_needs_full_check with
| Some (f, _reason, _client) when is_full_check_done env.full_check_status
->
{
(f env) with
nonpersistent_client_pending_command_needs_full_check = None;
}
| _ -> env
in
env
(** This synthesizes a [MultiThreadedCall.Cancel] in the event that we want
a typecheck cancelled due to files changing on disk. It constructs the
human-readable [user_message] and also [log_message] appropriately. *)
let cancel_due_to_watchman
(updates : Relative_path.Set.t) (clock : Watchman.clock option) :
MultiThreadedCall.interrupt_result =
assert (not (Relative_path.Set.is_empty updates));
let size = Relative_path.Set.cardinal updates in
let examples =
(List.take (Relative_path.Set.elements updates) 5
|> List.map ~f:Relative_path.suffix)
@
if size > 5 then
["..."]
else
[]
in
let timestamp = Unix.gettimeofday () in
let tm = Unix.localtime timestamp in
MultiThreadedCall.Cancel
{
MultiThreadedCall.user_message =
Printf.sprintf
"Files have changed on disk! [%02d:%02d:%02d] %s"
tm.Unix.tm_hour
tm.Unix.tm_min
tm.Unix.tm_sec
(List.hd_exn examples);
log_message =
Printf.sprintf
"watchman interrupt handler at clock %s. %d files changed. [%s]"
(ServerEnv.show_clock clock)
(Relative_path.Set.cardinal updates)
(String.concat examples ~sep:",");
timestamp;
}
let watchman_interrupt_handler genv : env MultiThreadedCall.interrupt_handler =
fun env ->
let start_time = Unix.gettimeofday () in
let (env, updates, clock, updates_stale, _telemetry) =
query_notifier genv env `Async start_time
in
(* Async updates can always be stale, so we don't care *)
ignore updates_stale;
let size = Relative_path.Set.cardinal updates in
if size > 0 then (
Hh_logger.log
"Interrupted by Watchman message: %d files changed at watchclock %s"
size
(ServerEnv.show_clock clock);
( {
env with
disk_needs_parsing =
Relative_path.Set.union env.disk_needs_parsing updates;
clock;
},
cancel_due_to_watchman updates clock )
) else
(env, MultiThreadedCall.Continue)
(** Handler for events on the priority socket, which is used priority commands which
must be served immediately. *)
let priority_client_interrupt_handler genv client_provider :
env MultiThreadedCall.interrupt_handler =
fun env ->
let t = Unix.gettimeofday () in
Hh_logger.log "Handling message on priority socket.";
(* For non-persistent clients that don't synchronize file contents, users
* expect that a query they do immediately after saving a file will reflect
* this file contents. Async notifications are not always fast enough to
* guarantee it, so we need an additional sync query before accepting such
* client *)
let (env, updates, clock, _updates_stale, _telemetry) =
query_notifier genv env `Sync t
in
let size = Relative_path.Set.cardinal updates in
if size > 0 then (
Hh_logger.log
"Interrupted by Watchman sync query: %d files changed at watchclock %s"
size
(ServerEnv.show_clock clock);
( {
env with
disk_needs_parsing =
Relative_path.Set.union env.disk_needs_parsing updates;
clock;
},
cancel_due_to_watchman updates clock )
) else
let idle_gc_slice = genv.local_config.ServerLocalConfig.idle_gc_slice in
let use_tracker_v2 =
genv.local_config.ServerLocalConfig.use_server_revision_tracker_v2
in
let select_outcome =
if ServerRevisionTracker.is_hg_updating use_tracker_v2 then (
Hh_logger.log "Won't handle client message: hg is updating.";
ClientProvider.Not_selecting_hg_updating
) else
ClientProvider.sleep_and_check
client_provider
(Ide_info_store.get_client ())
~ide_idle:env.ide_idle
~idle_gc_slice
`Priority
in
let env =
match select_outcome with
| ClientProvider.Select_persistent ->
failwith "should only be looking at new priority clients"
| ClientProvider.Select_nothing ->
(* This is possible because client might have gone away during
* sleep_and_check. *)
Hh_logger.log "Client went away.";
env
| ClientProvider.Select_exception e ->
Hh_logger.log
"Exception during client FD select: %s"
(Exception.get_ctor_string e);
env
| ClientProvider.Not_selecting_hg_updating ->
Hh_logger.log "hg is updating.";
env
| ClientProvider.Select_new { ClientProvider.client; m2s_sequence_number }
->
Hh_logger.log
"Serving new client obtained from monitor handoff #%d"
m2s_sequence_number;
(match
Client_command_handler.handle_client_command_or_persistent_connection
genv
env
client
`Non_persistent
with
| ServerUtils.Needs_full_recheck { reason; _ } ->
failwith
("unexpected command needing full recheck in priority channel: "
^ reason)
| ServerUtils.Needs_writes { reason; _ } ->
failwith
("unexpected command needing writes in priority channel: " ^ reason)
| ServerUtils.Done env -> env)
in
(* Global rechecks in response to file changes can be paused.
Here, we check if the user requested global rechecks to be paused during
the current recheck (the one that we're in the middle of). The above call
to `handle_connection` could have resulted in this state change if
the RPC was `PAUSE true`.
If the state did change to `Paused` during the current recheck,
we should cancel the current recheck.
Note that `PAUSE false`, which resumes global rechecks in response to
file changes, requires a full recheck by policy - see ServerCommand's
`rpc_command_needs_full_check`. Commands that require a full recheck
do not use `priority pipe`, so they don't end up handled here.
Such commands don't interrupt MultiWorker calls, by design.
The effect of `PAUSE true` during a recheck is that the recheck will be
canceled, while the result of `PAUSE false` is that the client will wait
for the recheck to be finished. *)
let decision =
match (env.full_recheck_on_file_changes, env.init_env.recheck_id) with
| ( Paused { paused_recheck_id = Some paused_recheck_id; _ },
Some recheck_id )
when String.equal paused_recheck_id recheck_id ->
MultiThreadedCall.Cancel
{
MultiThreadedCall.user_message = "Pause via 'hh --pause'";
log_message = "";
timestamp = Unix.gettimeofday ();
}
| _ -> MultiThreadedCall.Continue
in
(env, decision)
let persistent_client_interrupt_handler genv :
env MultiThreadedCall.interrupt_handler =
fun env ->
Hh_logger.info "Handling message on persistent client socket.";
match Ide_info_store.get_client () with
(* Several handlers can become ready simultaneously and one of them can remove
* the persistent client before we get to it. *)
| None -> (env, MultiThreadedCall.Continue)
| Some client ->
(match
Client_command_handler.handle_client_command_or_persistent_connection
genv
env
client
`Persistent
with
| ServerUtils.Needs_full_recheck { env; finish_command_handling; reason } ->
ServerProgress.write "typechecking";
(* This should not be possible, because persistent client will not send
* the next command before receiving results from the previous one. *)
assert (
Option.is_none env.persistent_client_pending_command_needs_full_check);
( {
env with
persistent_client_pending_command_needs_full_check =
Some (finish_command_handling, reason);
},
MultiThreadedCall.Continue )
| ServerUtils.Needs_writes
{ env; finish_command_handling; recheck_restart_is_needed; reason } ->
let full_check_status =
match env.full_check_status with
| Full_check_started when not recheck_restart_is_needed ->
Full_check_needed
| x -> x
in
(* this should not be possible, because persistent client will not send
* the next command before receiving results from the previous one *)
assert (Option.is_none env.pending_command_needs_writes);
( {
env with
pending_command_needs_writes = Some finish_command_handling;
full_check_status;
},
MultiThreadedCall.Cancel
{
MultiThreadedCall.user_message =
Printf.sprintf
"Interrupted [%s]\n(Sorry about this nuisance... we're working to fix it T92870399)"
reason;
log_message = "";
timestamp = Unix.gettimeofday ();
} )
| ServerUtils.Done env ->
ServerProgress.write "typechecking";
(env, MultiThreadedCall.Continue))
let setup_interrupts env client_provider =
{
env with
interrupt_handlers =
(fun genv env ->
let { ServerLocalConfig.interrupt_on_watchman; interrupt_on_client; _ }
=
genv.local_config
in
let handlers = [] in
let handlers =
let interrupt_on_watchman =
interrupt_on_watchman && env.can_interrupt
in
match ServerNotifier.async_reader_opt genv.notifier with
| Some reader when interrupt_on_watchman ->
(Buffered_line_reader.get_fd reader, watchman_interrupt_handler genv)
:: handlers
| _ -> handlers
in
let handlers =
let interrupt_on_client = interrupt_on_client && env.can_interrupt in
match ClientProvider.priority_fd client_provider with
| Some fd when interrupt_on_client ->
(fd, priority_client_interrupt_handler genv client_provider)
:: handlers
| _ -> handlers
in
let handlers =
match
Ide_info_store.get_client () >>= ClientProvider.get_client_fd
with
| Some fd when interrupt_on_client ->
(fd, persistent_client_interrupt_handler genv) :: handlers
| _ -> handlers
in
handlers);
}
let serve genv env in_fds =
if genv.local_config.ServerLocalConfig.ide_parser_cache then
Ide_parser_cache.enable ();
(* During server lifetime dependency table can be not up-to-date. Because of
* that, we ban access to it be default, forcing the code trying to read it to
* take it into account, either by explcitely enabling reads (and being fine
* with stale results), or declaring (in ServerCommand) that it requires full
* check to be completed before being executed. *)
let (_ : bool) =
Typing_deps.allow_dependency_table_reads env.deps_mode false
in
let () = Errors.set_allow_errors_in_default_path false in
MultiThreadedCall.on_exception (fun e -> ServerUtils.exit_on_exception e);
let client_provider = ClientProvider.provider_from_file_descriptors in_fds in
(* This is needed when typecheck_after_init option is disabled.
* We're just filling it with placeholder telemetry values since
* we don't much care about this scenario. *)
let init_telemetry =
ServerEnv.Init_telemetry.make
ServerEnv.Init_telemetry.Init_typecheck_disabled_after_init
(Telemetry.create ()
|> Telemetry.string_
~key:"mode"
~value:"serve_due_to_disabled_typecheck_after_init")
in
let typecheck_telemetry = Telemetry.create () in
if Option.is_none env.init_env.why_needed_full_check then
finalize_init env.init_env typecheck_telemetry init_telemetry;
let env = setup_interrupts env client_provider in
let env = ref env in
while true do
let new_env = serve_one_iteration genv !env client_provider in
env := new_env
done
(* Rules for whether+how to load saved-state...
* 1. If hh.conf lacks "use_mini_state = true", then don't load it.
* 2. If hh_server --no-load, then don't load it.
* 3. If hh_server --save-mini or -s, then save but don't load it.
* 4. If "hh_server --with-mini-state", then load the one specified there!
* 5. If hh.conf lacks "load_state_natively_v4", then don't load it
* 6. Otherwise, load it normally!
*)
let resolve_init_approach genv : ServerInit.init_approach * string =
if
Option.is_some (ServerArgs.save_naming_filename genv.options)
&& Option.is_none (ServerArgs.save_filename genv.options)
then
(ServerInit.Parse_only_init, "Server_args_saving_naming")
else if ServerArgs.no_load genv.options then
(ServerInit.Full_init, "Server_args_no_load")
else if Option.is_some (ServerArgs.save_filename genv.options) then
(ServerInit.Full_init, "Server_args_saving_state")
else if
(not genv.local_config.ServerLocalConfig.use_saved_state)
&& Option.is_none (ServerArgs.write_symbol_info genv.options)
then
(ServerInit.Full_init, "Local_config_saved_state_disabled")
else if Option.is_some (ServerArgs.write_symbol_info genv.options) then
match
( genv.local_config.ServerLocalConfig.use_saved_state_when_indexing,
ServerArgs.with_saved_state genv.options )
with
| (false, None) ->
(ServerInit.Write_symbol_info, "Server_args_writing_symbol_info")
| (true, None) ->
( ServerInit.Write_symbol_info_with_state ServerInit.Load_state_natively,
"Server_args_writing_symbol_info_load_native" )
| (_, Some (ServerArgs.Saved_state_target_info target)) ->
( ServerInit.Write_symbol_info_with_state (ServerInit.Precomputed target),
"Server_args_writing_symbol_info_precomputed" )
else
match
( genv.local_config.ServerLocalConfig.load_state_natively,
ServerArgs.with_saved_state genv.options )
with
| (_, Some (ServerArgs.Saved_state_target_info target)) ->
( ServerInit.Saved_state_init (ServerInit.Precomputed target),
"Precomputed" )
| (false, None) -> (ServerInit.Full_init, "No_native_loading_or_precomputed")
| (true, None) ->
(* Use native loading only if the config specifies a load script,
* and the local config prefers native. *)
( ServerInit.Saved_state_init ServerInit.Load_state_natively,
"Load_state_natively" )
let program_init genv env =
Hh_logger.log "Init id: %s" env.init_env.init_id;
ServerProgress.write "initializing...";
ServerProgress.enable_error_production
genv.local_config.ServerLocalConfig.produce_streaming_errors;
Exit.add_hook_upon_clean_exit (fun _finale_data ->
ServerProgress.ErrorsWrite.unlink_at_server_stop ());
let env =
{
env with
init_env =
{ env.init_env with ci_info = Some (Ci_util.begin_get_info ()) };
}
in
let (init_approach, approach_name) = resolve_init_approach genv in
Hh_logger.log "Initing with approach: %s" approach_name;
let (env, init_type, init_error, init_error_telemetry, saved_state_delta) =
let (env, init_result) = ServerInit.init ~init_approach genv env in
match init_approach with
| ServerInit.Write_symbol_info
| ServerInit.Full_init ->
(env, "fresh", None, None, None)
| ServerInit.Parse_only_init -> (env, "parse-only", None, None, None)
| ServerInit.Write_symbol_info_with_state _
| ServerInit.Saved_state_init _ -> begin
match init_result with
| ServerInit.Load_state_succeeded saved_state_delta ->
let init_type =
match
Naming_table.get_forward_naming_fallback_path env.naming_table
with
| None -> "state_load_blob"
| Some _ -> "state_load_sqlite"
in
(env, init_type, None, None, saved_state_delta)
| ServerInit.Load_state_failed (err, telemetry) ->
(env, "state_load_failed", Some err, Some telemetry, None)
| ServerInit.Load_state_declined reason ->
(env, "state_load_declined", Some reason, None, None)
end
in
let env =
{
env with
init_env =
{
env.init_env with
saved_state_delta;
approach_name;
init_error;
init_type;
};
}
in
Hh_logger.log "Waiting for daemon(s) to be ready...";
ServerProgress.write "wrapping up init...";
ServerNotifier.wait_until_ready genv.notifier;
ServerStamp.touch_stamp ();
EventLogger.set_init_type init_type;
let telemetry =
ServerUtils.log_and_get_sharedmem_load_telemetry ()
|> Telemetry.object_opt ~key:"init_error" ~value:init_error_telemetry
|> Telemetry.json_
~key:"deps_mode"
~value:(Typing_deps_mode.to_opaque_json env.deps_mode)
in
HackEventLogger.init_lazy_end telemetry ~approach_name ~init_error ~init_type;
env
let num_workers options local_config =
(* The number of workers is set both in hh.conf and as an optional server argument.
if the two numbers given in argument and in hh.conf are different, we always take the minimum
of the two.
*)
let max_procs_opt =
Option.merge
~f:(fun a b ->
if Int.equal a b then
a
else (
Hh_logger.log
("Warning: both an argument --max-procs and a local config "
^^ "for max workers are given. Choosing minimum of the two.");
min a b
))
(ServerArgs.max_procs options)
local_config.ServerLocalConfig.max_workers
in
let nbr_procs = Sys_utils.nbr_procs in
match max_procs_opt with
| None -> nbr_procs
| Some max_procs ->
if max_procs <= nbr_procs then
max_procs
else (
Hh_logger.log
"Warning: max workers is higher than the number of processors. Ignoring.";
nbr_procs
)
(* The hardware we are running on are Intel Skylake and Haswell family
processors with 80, 56, or 48 cores. Turns out that there are only 1/2
actual CPUs, the rest are hyperthreads. Using worker processes for
hyperthreads is slower than using just the number of actual computation
cores. *)
let modify_worker_count hack_worker_count =
let n_procs = Sys_utils.nbr_procs in
let workers =
if hack_worker_count < n_procs then
(* Already limited, use what we have *)
hack_worker_count
else
(* Use half. *)
max 1 (n_procs / 2)
in
workers
let setup_server ~informant_managed ~monitor_pid options config local_config =
let num_workers = num_workers options local_config |> modify_worker_count in
let handle =
SharedMem.init ~num_workers (ServerConfig.sharedmem_config config)
in
let init_id = Random_id.short_string () in
let pid = Unix.getpid () in
(* There are three files which are used for IPC.
1. server_finale_file - we unlink it now upon startup,
and upon clean exit we'll write finale-date to it.
2. server_receipt_to_monitor_file - we'll unlink it now upon startup,
and upon clean exit we'll unlink it.
3. server_progress_file - we write "starting up" to it now upon startup,
and upon clean exit we'll write "shutting down" to it.
In both case of clean exit and abrupt exit there'll be leftover files.
We'll rely upon tmpclean to eventually clean them up. *)
ServerProgress.set_root (ServerArgs.root options);
let server_finale_file = ServerFiles.server_finale_file pid in
let server_receipt_to_monitor_file =
ServerFiles.server_receipt_to_monitor_file pid
in
(try Unix.unlink server_finale_file with
| _ -> ());
(try Unix.unlink server_receipt_to_monitor_file with
| _ -> ());
ServerProgress.write "starting up";
Exit.add_hook_upon_clean_exit (fun finale_data ->
begin
try Unix.unlink server_receipt_to_monitor_file with
| _ -> ()
end;
begin
try
Sys_utils.with_umask 0o000 (fun () ->
let oc = Stdlib.open_out_bin server_finale_file in
Marshal.to_channel oc finale_data [];
Stdlib.close_out oc)
with
| _ -> ()
end;
begin
try ServerProgress.write "shutting down" with
| _ -> ()
end;
());
Hh_logger.log "Version: %s" Hh_version.version;
Hh_logger.log "Hostname: %s" (Unix.gethostname ());
let root = ServerArgs.root options in
let deps_mode =
match ServerArgs.save_64bit options with
| Some new_edges_dir ->
let human_readable_dep_map_dir =
ServerArgs.save_human_readable_64bit_dep_map options
in
Typing_deps_mode.SaveToDiskMode
{ graph = None; new_edges_dir; human_readable_dep_map_dir }
| None -> Typing_deps_mode.InMemoryMode None
in
(* The OCaml default is 500, but we care about minimizing the memory
* overhead *)
let gc_control = Caml.Gc.get () in
Caml.Gc.set { gc_control with Caml.Gc.max_overhead = 200 };
let { ServerLocalConfig.cpu_priority; io_priority; enable_on_nfs; _ } =
local_config
in
let hhconfig_version =
config |> ServerConfig.version |> Config_file.version_to_string_opt
in
List.iter (ServerConfig.ignored_paths config) ~f:FilesToIgnore.ignore_path;
let logging_init init_id ~is_worker =
Hh_logger.Level.set_min_level local_config.ServerLocalConfig.min_log_level;
Hh_logger.Level.set_categories local_config.ServerLocalConfig.log_categories;
if not (Sys_utils.enable_telemetry ()) then
EventLogger.init_fake ()
else if is_worker then
HackEventLogger.init_worker
~root
~hhconfig_version
~init_id
~custom_columns:(ServerArgs.custom_telemetry_data options)
~rollout_flags:(ServerLocalConfig.to_rollout_flags local_config)
~rollout_group:local_config.ServerLocalConfig.rollout_group
~time:(Unix.gettimeofday ())
~per_file_profiling:local_config.ServerLocalConfig.per_file_profiling
else
HackEventLogger.init
~root
~hhconfig_version
~init_id
~custom_columns:(ServerArgs.custom_telemetry_data options)
~informant_managed
~rollout_flags:(ServerLocalConfig.to_rollout_flags local_config)
~rollout_group:local_config.ServerLocalConfig.rollout_group
~time:(Unix.gettimeofday ())
~max_workers:num_workers
~per_file_profiling:local_config.ServerLocalConfig.per_file_profiling
in
logging_init init_id ~is_worker:false;
HackEventLogger.init_start
~experiments_config_meta:
local_config.ServerLocalConfig.experiments_config_meta
(Memory_stats.get_host_hw_telemetry ());
let root_s = Path.to_string root in
let check_mode = ServerArgs.check_mode options in
if (not check_mode) && Sys_utils.is_nfs root_s && not enable_on_nfs then (
Hh_logger.log "Refusing to run on %s: root is on NFS!" root_s;
HackEventLogger.nfs_root ();
Exit.exit Exit_status.Nfs_root
);
if
ServerConfig.warn_on_non_opt_build config && not Build_id.is_build_optimized
then begin
let msg =
Printf.sprintf
"hh_server binary was built in \"%s\" mode, "
Build_id.build_mode
^ "is running with Rust version of parser enabled, "
^ "and this repository's .hhconfig specifies warn_on_non_opt_build option. "
^ "Parsing with non-opt build will take significantly longer"
in
if ServerArgs.allow_non_opt_build options then
Hh_logger.log
"Warning: %s. Initializing anyway due to --allow-non-opt-build option."
msg
else
let msg =
Printf.sprintf
"Error: %s. Recompile the server in opt or dbgo mode, or pass --allow-non-opt-build to continue anyway."
msg
in
Hh_logger.log "%s" msg;
Exit.exit ~msg Exit_status.Server_non_opt_build_mode
end;
Program.preinit ();
Sys_utils.set_priorities ~cpu_priority ~io_priority;
(* this is to transform SIGPIPE in an exception. A SIGPIPE can happen when
* someone C-c the client.
*)
Sys_utils.set_signal Sys.sigpipe Sys.Signal_ignore;
PidLog.init (ServerFiles.pids_file root);
Option.iter monitor_pid ~f:(fun monitor_pid ->
PidLog.log ~reason:"monitor" monitor_pid);
PidLog.log ~reason:"main" (Unix.getpid ());
(* Make a sub-init_id because we use it to name temporary files for piping to
scuba logging processes. *)
let worker_logging_init () =
logging_init (init_id ^ "." ^ Random_id.short_string ()) ~is_worker:true
in
let gc_control = ServerConfig.gc_control config in
let workers =
ServerWorker.make
~longlived_workers:local_config.ServerLocalConfig.longlived_workers
~nbr_procs:num_workers
gc_control
handle
~logging_init:worker_logging_init
in
(workers, ServerEnvBuild.make_env config ~init_id ~deps_mode)
let run_once options config local_config =
assert (ServerArgs.check_mode options);
let (workers, env) =
setup_server
options
config
local_config
~informant_managed:false
~monitor_pid:None
in
let genv = ServerEnvBuild.make_genv options config local_config workers in
(* The type-checking happens here *)
let env = program_init genv env in
(* All of saving state happens here *)
let (env, save_state_results) =
match ServerArgs.save_filename genv.options with
| None -> (env, None)
| Some filename -> (env, ServerInit.save_state genv env filename)
in
let _naming_table_rows_changed =
match ServerArgs.save_naming_filename genv.options with
| None -> None
| Some filename ->
Disk.mkdir_p (Filename.dirname filename);
let save_result = Naming_table.save env.naming_table filename in
Hh_logger.log
"Inserted symbols into the naming table:\n%s"
(Naming_sqlite.show_save_result save_result);
if List.length save_result.Naming_sqlite.errors > 0 then begin
Sys_utils.rm_dir_tree filename;
failwith "Naming table state had errors - deleting output file!"
end else
Some save_result
in
(* Finish up by generating the output and the exit code *)
match ServerArgs.concatenate_prefix genv.options with
| Some prefix ->
let prefix =
Relative_path.from_root ~suffix:prefix |> Relative_path.to_absolute
in
let text = ServerConcatenateAll.go genv env [prefix] in
print_endline text;
Exit.exit Exit_status.No_error
| _ ->
Hh_logger.log "Running in check mode";
Program.run_once_and_exit genv env save_state_results
(*
* The server monitor will pass client connections to this process
* via ic.
*)
let daemon_main_exn ~informant_managed options monitor_pid in_fds =
assert (not (ServerArgs.check_mode options));
Folly.ensure_folly_init ();
Printexc.record_backtrace true;
let (config, local_config) = ServerConfig.load ~silent:false options in
Option.iter local_config.ServerLocalConfig.memtrace_dir ~f:(fun dir ->
Daemon.start_memtracing (Filename.concat dir "memtrace.server.ctf"));
let (workers, env) =
setup_server
options
config
local_config
~informant_managed
~monitor_pid:(Some monitor_pid)
in
let genv = ServerEnvBuild.make_genv options config local_config workers in
HackEventLogger.with_id ~stage:`Init env.init_env.init_id @@ fun () ->
let env = MainInit.go genv options (fun () -> program_init genv env) in
serve genv env in_fds
type params = {
informant_managed: bool;
state: ServerGlobalState.t;
options: ServerArgs.options;
monitor_pid: int;
priority_in_fd: Unix.file_descr;
force_dormant_start_only_in_fd: Unix.file_descr;
}
let daemon_main
{
informant_managed;
state;
options;
monitor_pid;
priority_in_fd;
force_dormant_start_only_in_fd;
}
(default_ic, _default_oc) =
(* Avoid leaking this fd further *)
let () = Unix.set_close_on_exec priority_in_fd in
let () = Unix.set_close_on_exec force_dormant_start_only_in_fd in
let default_in_fd = Daemon.descr_of_in_channel default_ic in
(* Restore the root directory and other global states from monitor *)
ServerGlobalState.restore state ~worker_id:0;
(match ServerArgs.custom_hhi_path options with
| None ->
(* Restore hhi files every time the server restarts
in case the tmp folder changes *)
ignore (Hhi.get_hhi_root ())
| Some path ->
if Disk.file_exists path && Disk.is_directory path then (
Hh_logger.log "Custom hhi directory set to %s." path;
Hhi.set_custom_hhi_root (Path.make path)
) else (
Hh_logger.log "Custom hhi directory %s not found." path;
Exit.exit Exit_status.Input_error
));
ServerUtils.with_exit_on_exception @@ fun () ->
daemon_main_exn
~informant_managed
options
monitor_pid
(default_in_fd, priority_in_fd, force_dormant_start_only_in_fd)
let entry = Daemon.register_entry_point "ServerMain.daemon_main" daemon_main |
OCaml Interface | hhvm/hphp/hack/src/server/serverMain.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type params = {
informant_managed: bool;
state: ServerGlobalState.t;
options: ServerArgs.options;
monitor_pid: int;
priority_in_fd: Unix.file_descr;
force_dormant_start_only_in_fd: Unix.file_descr;
}
(* The in/out channels don't actually take type unit -- we write directly
* to the underlying file descriptor -- but we have to declare some type for
* these phantom types because OCaml doesn't allow polymorphic values that
* are not functions. *)
val entry : (params, unit, unit) Daemon.entry
val run_once : ServerArgs.options -> ServerConfig.t -> ServerLocalConfig.t -> 'a
val serve_one_iteration :
ServerEnv.genv -> ServerEnv.env -> ClientProvider.t -> ServerEnv.env
(* Main loop can choose to batch several rechecks together. Setting this will
* disable this behavior, forcing only one recheck per serve_one_iteration
* call. This is useful in tests to observe intermediate state. *)
val force_break_recheck_loop_for_test : bool -> unit
val program_init : ServerEnv.genv -> ServerEnv.env -> ServerEnv.env
val setup_server :
informant_managed:bool ->
monitor_pid:int option ->
ServerArgs.options ->
ServerConfig.t ->
ServerLocalConfig.t ->
MultiWorker.worker list * ServerEnv.env |
OCaml | hhvm/hphp/hack/src/server/serverMethodJumps.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open ServerEnv
let go class_ find_children env genv oc =
let ctx = Provider_utils.ctx_from_server_env env in
let res_list =
(* Might raise {!Naming_table.File_info_not_found} *)
MethodJumps.get_inheritance
ctx
class_
~find_children
env.naming_table
genv.workers
in
Marshal.to_channel oc res_list [];
flush oc;
() |
OCaml | hhvm/hphp/hack/src/server/serverMethodJumpsBatch.ml | (*
* Copyright (c) 2017, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let get_ancestors_single ctx class_ ~filter =
let class_ = MethodJumps.add_ns class_ in
let class_ = Decl_provider.get_class ctx class_ in
Option.map class_ ~f:(fun c ->
MethodJumps.get_ancestor_classes_and_methods ctx c ~filter [])
let get_ancestors_multiple ctx acc classes ~filter =
let result =
List.concat
(List.filter_map classes ~f:(fun class_ ->
get_ancestors_single ctx class_ ~filter))
in
result :: acc
let parallel_helper ctx workers classes filter =
MultiWorker.call
workers
~job:(get_ancestors_multiple ctx ~filter)
~neutral:[]
~merge:List.rev_append
~next:(MultiWorker.next workers classes)
(* Entry Point *)
let go :
Provider_context.t ->
MultiWorker.worker list option ->
Decl_provider.type_key list ->
ServerCommandTypes.Method_jumps.filter ->
ServerCommandTypes.Method_jumps.result list =
fun ctx workers classes filter ->
(* Sort and dedup identical queries *)
let deduped =
List.remove_consecutive_duplicates
~equal:String.( = )
(List.sort ~compare:String.compare classes)
in
let results =
if List.length deduped < 10 then
get_ancestors_multiple ctx [] deduped ~filter
else
parallel_helper ctx workers deduped filter
in
List.concat results |
OCaml | hhvm/hphp/hack/src/server/serverNotifier.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type changes =
| Unavailable
(** e.g. because DFind is not available, or watchman subscription is down *)
| SyncChanges of SSet.t
(** contains all changes up to the point that the notifier was invoked *)
| AsyncChanges of SSet.t
(** contains whatever changes have been pushed up to this moment *)
| StateEnter of string * Hh_json.json option
| StateLeave of string * Hh_json.json option
type t =
| IndexOnly of { root: Path.t }
| Dfind of {
root: Path.t;
ready: bool ref;
dfind: DfindLib.t;
}
| Watchman of {
wenv: Watchman.env;
watchman: Watchman.watchman_instance ref;
(** Watchman state can change during requests (see Watchamn.Watchman_dead and Watchman_alive).
This reference will be updated as necessary to the new instance. *)
root: Path.t;
local_config: ServerLocalConfig.t;
num_workers: int;
}
| MockChanges of {
get_changes_async: unit -> changes;
get_changes_sync: unit -> changes;
}
type indexer = (string -> bool) -> unit -> string list
(** This returns an "indexer", i.e. unit -> string list, which when invoked
will return all files under root. *)
let indexer (t : t) (filter : string -> bool) : unit -> string list =
match t with
| Dfind { root; _ }
| IndexOnly { root; _ } ->
Find.make_next_files ~name:"root" ~filter root
| MockChanges _ -> failwith "indexer not mocked"
| Watchman { wenv; num_workers; _ } ->
let files = Watchman.get_all_files wenv in
Bucket.make_list ~num_workers (List.filter ~f:filter files)
let init
(options : ServerArgs.options)
(local_config : ServerLocalConfig.t)
~(num_workers : int) : t * indexer =
let root = ServerArgs.root options in
let ServerLocalConfig.Watchman.
{ enabled; sockname; subscribe; init_timeout; debug_logging; _ } =
local_config.ServerLocalConfig.watchman
in
(* helper to construct Dfind *)
let init_dfind () =
Hh_logger.log "Using dfind";
let in_fd = Daemon.null_fd () in
let log_link = ServerFiles.dfind_log root in
let log_file = Sys_utils.make_link_of_timestamped log_link in
let log_fd = Daemon.fd_of_path log_file in
let dfind =
DfindLib.init
(in_fd, log_fd, log_fd)
(GlobalConfig.scuba_table_name, [root])
in
Dfind { root; ready = ref false; dfind }
in
(* helper to try to construct Watchman, or return None if failed *)
let try_init_watchman () =
Hh_logger.log "Using watchman";
let wenv =
Watchman.init
{
Watchman.init_timeout = Watchman.Explicit_timeout (float init_timeout);
subscribe_mode =
(if subscribe then
Some Watchman.Defer_changes
else
None);
expression_terms = FilesToIgnore.watchman_server_expression_terms;
debug_logging =
ServerArgs.watchman_debug_logging options || debug_logging;
sockname;
subscription_prefix = "hh_type_check_watcher";
roots = [root];
}
()
in
Option.map wenv ~f:(fun wenv ->
HackEventLogger.set_use_watchman ();
Watchman
{
wenv;
watchman = ref (Watchman.Watchman_alive wenv);
root;
local_config;
num_workers;
})
in
let notifier =
if ServerArgs.check_mode options then begin
Hh_logger.log "Not using dfind or watchman";
IndexOnly { root }
end else if not enabled then
init_dfind ()
else
match try_init_watchman () with
| Some t -> t
| None -> init_dfind ()
in
(notifier, indexer notifier)
let init_mock
~(get_changes_async : unit -> changes) ~(get_changes_sync : unit -> changes)
: t =
MockChanges { get_changes_async; get_changes_sync }
let init_null () : t =
let f () = SyncChanges SSet.empty in
init_mock ~get_changes_async:f ~get_changes_sync:f
let wait_until_ready (t : t) : unit =
match t with
| Dfind { ready; dfind; _ } ->
if !ready then
()
else begin
DfindLib.wait_until_ready dfind;
ready := true
end
| IndexOnly _ -> ()
| MockChanges _ -> ()
| Watchman _ ->
(* The initial watch-project command blocks until watchman's crawl is
done, so we don't have anything else to wait for here. *)
()
(** Helper conversion function, from a single watchman-changes to ServerNotifier.changes *)
let async_changes_from_watchman_changes
~(root : Path.t)
~(local_config : ServerLocalConfig.t)
(watchman_changes : Watchman.pushed_changes) : changes =
let use_tracker_v2 =
local_config.ServerLocalConfig.use_server_revision_tracker_v2
in
match watchman_changes with
| Watchman.Changed_merge_base _ ->
let () =
Hh_logger.log "Error: Typechecker does not use Source Control Aware mode"
in
raise Exit_status.(Exit_with Watchman_invalid_result)
| Watchman.State_enter (name, metadata) ->
if local_config.ServerLocalConfig.hg_aware then
ServerRevisionTracker.on_state_enter name use_tracker_v2;
StateEnter (name, metadata)
| Watchman.State_leave (name, metadata) ->
if local_config.ServerLocalConfig.hg_aware then
ServerRevisionTracker.on_state_leave root name metadata use_tracker_v2;
StateLeave (name, metadata)
| Watchman.Files_changed changes ->
ServerRevisionTracker.files_changed local_config (SSet.cardinal changes);
AsyncChanges changes
(** Helper conversion function, from a list of watchman-changes to combined ServerNotifier.changes *)
let sync_changes_from_watchman_changes_list
~(root : Path.t)
~(local_config : ServerLocalConfig.t)
(watchman_changes_list : Watchman.pushed_changes list) : changes =
let set =
List.fold_left
watchman_changes_list
~f:(fun acc changes ->
match
async_changes_from_watchman_changes ~root ~local_config changes
with
| Unavailable
| StateEnter _
| StateLeave _ ->
acc
| SyncChanges changes
| AsyncChanges changes ->
SSet.union acc changes)
~init:SSet.empty
in
SyncChanges set
let get_changes_sync (t : t) : changes * Watchman.clock option =
match t with
| IndexOnly _ -> (SyncChanges SSet.empty, None)
| MockChanges { get_changes_sync; _ } -> (get_changes_sync (), None)
| Dfind { dfind; _ } ->
let set =
try
Timeout.with_timeout
~timeout:120
~on_timeout:(fun (_ : Timeout.timings) ->
Exit.exit Exit_status.Dfind_unresponsive)
~do_:(fun t -> DfindLib.get_changes ~timeout:t dfind)
with
| _ -> Exit.exit Exit_status.Dfind_died
in
(SyncChanges set, None)
| Watchman { local_config; watchman; root; _ } ->
let (watchman', changes) =
Watchman.get_changes_synchronously
~timeout:
local_config.ServerLocalConfig.watchman
.ServerLocalConfig.Watchman.synchronous_timeout
!watchman
in
watchman := watchman';
let changes =
sync_changes_from_watchman_changes_list ~root ~local_config changes
in
let clock = Watchman.get_clock !watchman in
(changes, Some clock)
let get_changes_async (t : t) : changes * Watchman.clock option =
match t with
| IndexOnly _ -> (SyncChanges SSet.empty, None)
| MockChanges { get_changes_async; _ } -> (get_changes_async (), None)
| Dfind _ -> get_changes_sync t
| Watchman { watchman; root; local_config; _ } ->
let (watchman', changes) = Watchman.get_changes !watchman in
watchman := watchman';
let changes =
match changes with
| Watchman.Watchman_unavailable -> Unavailable
| Watchman.Watchman_pushed changes ->
async_changes_from_watchman_changes ~root ~local_config changes
| Watchman.Watchman_synchronous changes ->
sync_changes_from_watchman_changes_list ~root ~local_config changes
in
let clock = Watchman.get_clock !watchman in
(changes, Some clock)
let async_reader_opt (t : t) : Buffered_line_reader.t option =
match t with
| Dfind _
| IndexOnly _ ->
None
| MockChanges _ -> None
| Watchman { watchman; _ } -> Watchman.get_reader !watchman |
OCaml Interface | hhvm/hphp/hack/src/server/serverNotifier.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type changes =
| Unavailable
(** e.g. because DFind is not available, or watchman subscription is down *)
| SyncChanges of SSet.t
(** contains all changes up to the point that the notifier was invoked *)
| AsyncChanges of SSet.t
(** contains whatever changes have been pushed up to this moment *)
| StateEnter of string * Hh_json.json option
| StateLeave of string * Hh_json.json option
type t
(** This takes a filter, and returns all files under root that match *)
type indexer = (string -> bool) -> unit -> string list
val init :
ServerArgs.options -> ServerLocalConfig.t -> num_workers:int -> t * indexer
val init_null : unit -> t
val init_mock :
get_changes_async:(unit -> changes) -> get_changes_sync:(unit -> changes) -> t
val wait_until_ready : t -> unit
(** This might return AsyncChanges the ones that we happen to have received by now,
or SyncChanges, depending on the underlying notifier's state *)
val get_changes_async : t -> changes * Watchman.clock option
(** This will always return SyncChanges, all changes up to the point this was invoked. *)
val get_changes_sync : t -> changes * Watchman.clock option
val async_reader_opt : t -> Buffered_line_reader.t option |
OCaml | hhvm/hphp/hack/src/server/serverPopulateRemoteDecls.ml | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
(*
* This module is used in saved state jobs to create two primary artifacts for
* each changed file in a mergebase commit
* 1) a marshalled ocaml blob representing the decls in said file
* 2) a json blob representing the "fan in" or in other words the decls
* needed to typecheck said file
*)
open Hh_prelude
let ( >>= ) res f =
Future.Promise.bind res (function
| Ok r -> f r
| Error e -> Future.Promise.return (Error e))
let return_ok x = Future.Promise.return (Ok x)
let return_err x = Future.Promise.return (Error x)
let get_hhconfig_version ~(repo : Path.t) :
(string, string) result Future.Promise.t =
let hhconfig_path =
Path.to_string
(Path.concat repo Config_file.file_path_relative_to_repo_root)
in
(if Disk.file_exists hhconfig_path then
return_ok (Config_file.parse_hhconfig hhconfig_path)
else
let error =
"Attempted to parse .hhconfig.\nBut it doesn't exist at this path:\n"
^ hhconfig_path
in
return_err error)
>>= fun (_, config) ->
let version = Config_file.Getters.string_opt "version" config in
begin
match version with
| None -> failwith "Failed to parse hh version"
| Some version ->
let version = "v" ^ String_utils.lstrip version "^" in
return_ok version
end
>>= fun hhconfig_version -> return_ok hhconfig_version
let get_version ~repo =
if Build_id.is_dev_build then
Remote_old_decl_client.Utils.get_dev_build_version ()
else
match Future.get @@ get_hhconfig_version ~repo with
| Ok (Ok result) -> result
| Ok (Error e) -> failwith (Printf.sprintf "%s" e)
| Error e -> failwith (Printf.sprintf "%s" (Future.error_to_string e))
let go
(env : ServerEnv.env)
(genv : ServerEnv.genv)
(workers : MultiWorker.worker list option)
(files : Relative_path.t list option) : unit =
let ctx = Provider_utils.ctx_from_server_env env in
(* TODO: the following is a bug! *)
let repo = Wwwroot.interpret_command_line_root_parameter [] in
let version = get_version ~repo in
let manifold_dir = Remote_old_decl_client.Utils.make_manifold_path ~version in
Hh_logger.log "Will upload to manifold directory %s" manifold_dir;
let cmd = Printf.sprintf "manifold mkdirs %s" manifold_dir in
ignore (Sys.command cmd);
let job (acc : (string * string) list) (fnl : Relative_path.t list) :
(string * string) list =
List.fold_left
~init:acc
~f:(fun acc fn ->
Hh_logger.log
"Saving decls for prefetching: %s"
(Relative_path.suffix fn);
match Direct_decl_utils.direct_decl_parse ctx fn with
| None -> acc
| Some parsed_file ->
let class_decls = parsed_file.Direct_decl_parser.pfh_decls in
let decls_to_upload =
List.map class_decls ~f:(fun (name, decl, decl_hash) ->
let decl_hash_64 = Int64.to_string decl_hash in
let symbol_to_shallow_decl = SMap.singleton name decl in
let marshalled_symbol_to_shallow_decl =
Marshal.to_string symbol_to_shallow_decl []
in
(decl_hash_64, marshalled_symbol_to_shallow_decl))
in
decls_to_upload @ acc)
fnl
in
let results =
match files with
| None ->
MultiWorker.call
workers
~job
~neutral:[]
~merge:List.rev_append
~next:
(ServerUtils.make_next
~hhi_filter:(fun _ -> true)
~indexer:(genv.ServerEnv.indexer FindUtils.file_filter)
~extra_roots:(ServerConfig.extra_paths genv.ServerEnv.config))
| Some files -> job [] files
in
let _ = Remote_old_decls_ffi.put_decls ~silent:false version results in
Hh_logger.log "Processed %d decls" (List.length results);
() |
OCaml Interface | hhvm/hphp/hack/src/server/serverPopulateRemoteDecls.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val go :
ServerEnv.env ->
ServerEnv.genv ->
MultiWorker.worker list option ->
Relative_path.t list option ->
unit |
OCaml | hhvm/hphp/hack/src/server/serverPos.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let resolve : Tast_env.env -> Pos_or_decl.t -> Pos.t =
(fun env p -> Naming_provider.resolve_position (Tast_env.get_ctx env) p) |
OCaml Interface | hhvm/hphp/hack/src/server/serverPos.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val resolve : Tast_env.env -> Pos_or_decl.t -> Pos.t |
OCaml | hhvm/hphp/hack/src/server/serverPrecheckedFiles.ml | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerEnv
let should_use options local_config =
Option.value
(ServerArgs.prechecked options)
~default:local_config.ServerLocalConfig.prechecked_files
let set env prechecked_files = { env with prechecked_files }
let init env ~dirty_local_deps ~dirty_master_deps =
set
env
(Initial_typechecking
{
rechecked_files = Relative_path.Set.empty;
dirty_local_deps;
dirty_master_deps;
clean_local_deps = Typing_deps.(DepSet.make ());
})
(** Update env.needs_recheck and return new value for [dirty_master_deps]
using the following pseudo-code:
needs_recheck = files(fanout(fanout(deps) /\ dirty_master_deps)) \ rechecked_files
dirty_master_deps = dirty_master_deps \ fanout(deps)
*)
let intersect_with_master_deps
~ctx ~deps ~dirty_master_deps ~rechecked_files genv env =
let t0 = Unix.gettimeofday () in
let deps_mode = Provider_context.get_deps_mode ctx in
(* Compute maximum fan-out of input dep set *)
let deps = Typing_deps.add_all_deps deps_mode deps in
let t1 = Unix.gettimeofday () in
(* See if it intersects in any way with dirty_master_deps *)
let common_deps = Typing_deps.DepSet.inter deps dirty_master_deps in
let t2 = Unix.gettimeofday () in
(* Expand the common part *)
let more_deps = Typing_deps.add_all_deps deps_mode common_deps in
let t3 = Unix.gettimeofday () in
(* Remove the common part from dirty_master_deps (because after expanding it's
* no longer dirty. *)
let dirty_master_deps =
Typing_deps.DepSet.diff dirty_master_deps common_deps
in
let t4 = Unix.gettimeofday () in
(* Translate the dependencies to files that need to be rechecked. *)
let needs_recheck0 = Naming_provider.get_files ctx more_deps in
let t5 = Unix.gettimeofday () in
let needs_recheck = Relative_path.Set.diff needs_recheck0 rechecked_files in
let t6 = Unix.gettimeofday () in
let size = Relative_path.Set.cardinal needs_recheck in
let env =
if size = 0 then
env
else (
ServerRevisionTracker.typing_changed genv.local_config size;
Hh_logger.log "Adding %d files to recheck" size;
let needs_recheck =
Relative_path.Set.union env.needs_recheck needs_recheck
in
{ env with needs_recheck }
)
in
let duration key tend tstart telemetry =
Telemetry.int_
telemetry
~key
~value:(int_of_float ((tend -. tstart) *. 1000.))
in
let telemetry =
Telemetry.create ()
|> duration "t1" t1 t0
|> duration "t2" t2 t1
|> duration "t3" t3 t2
|> duration "t4" t4 t3
|> duration "t5" t5 t4
|> duration "t6" t6 t5
|> Telemetry.int_ ~key:"deps" ~value:(Typing_deps.DepSet.cardinal deps)
|> Telemetry.int_
~key:"common_deps"
~value:(Typing_deps.DepSet.cardinal common_deps)
|> Telemetry.int_
~key:"more_deps"
~value:(Typing_deps.DepSet.cardinal more_deps)
|> Telemetry.int_
~key:"needs_recheck0"
~value:(Relative_path.Set.cardinal needs_recheck0)
|> Telemetry.int_
~key:"needs_recheck"
~value:(Relative_path.Set.cardinal needs_recheck)
in
(env, dirty_master_deps, size, telemetry)
(** Update env.prechecked_files by adding to the recheck_files field of each variant *)
let update_rechecked_files env rechecked =
let t = Unix.gettimeofday () in
let add_rechecked (dirty_deps : dirty_deps) : dirty_deps =
{
dirty_deps with
rechecked_files =
Relative_path.Set.union rechecked dirty_deps.rechecked_files;
}
in
let env =
set env
@@
match env.prechecked_files with
| Prechecked_files_disabled -> Prechecked_files_disabled
| Initial_typechecking dirty_deps ->
Initial_typechecking (add_rechecked dirty_deps)
| Prechecked_files_ready dirty_deps ->
Prechecked_files_ready (add_rechecked dirty_deps)
in
HackEventLogger.prechecked_update_rechecked t;
env
(** Update:
- env.needs_recheck,
- env.full_check_status,
- env.prechecked_files,
- env.init_env.why_needed_full_check
in esoteric ways
*)
let update_after_recheck genv env rechecked ~start_time =
let ctx = Provider_utils.ctx_from_server_env env in
let telemetry =
Telemetry.create ()
|> Telemetry.duration ~key:"start" ~start_time
|> Telemetry.int_
~key:"rechecked"
~value:(Relative_path.Set.cardinal rechecked)
in
let env = update_rechecked_files env rechecked in
let telemetry = Telemetry.duration telemetry ~key:"end" ~start_time in
match (env.full_check_status, env.prechecked_files) with
| ( Full_check_done,
Initial_typechecking
{
dirty_local_deps;
dirty_master_deps;
rechecked_files;
clean_local_deps;
} ) ->
let t = Unix.gettimeofday () in
assert (Typing_deps.DepSet.is_empty clean_local_deps);
Hh_logger.log "Finished rechecking dirty files, evaluating their fanout";
(* Take any prechecked files that could have been affected by local changes
* and expand them too *)
let (env, dirty_master_deps, size, _telemetry) =
intersect_with_master_deps
~ctx
~deps:dirty_local_deps
~dirty_master_deps
~rechecked_files
genv
env
in
let env =
if size = 0 then
env
else
let full_check_status = Full_check_started in
let why_needed_full_check =
ServerEnv.Init_telemetry.make
ServerEnv.Init_telemetry.Init_prechecked_fanout
(Telemetry.create ()
|> Telemetry.float_ ~key:"time" ~value:(Unix.gettimeofday ())
|> Telemetry.string_ ~key:"reason" ~value:"prechecked_fanout"
|> Telemetry.object_opt
~key:"prev"
~value:
(Option.map
env.init_env.why_needed_full_check
~f:ServerEnv.Init_telemetry.get))
|> Option.some
in
let init_env = { env.init_env with why_needed_full_check } in
{ env with init_env; full_check_status }
in
let clean_local_deps = dirty_local_deps in
let dirty_local_deps = Typing_deps.DepSet.make () in
HackEventLogger.prechecked_evaluate_init t size;
let telemetry =
telemetry
|> Telemetry.duration ~key:"fanout_end" ~start_time
|> Telemetry.int_ ~key:"size" ~value:size
in
let env =
set
env
(Prechecked_files_ready
{
dirty_local_deps;
dirty_master_deps;
rechecked_files;
clean_local_deps;
})
in
(env, telemetry)
| _ -> (env, telemetry)
(** Update:
- env.needs_recheck,
- env.full_check_status,
- env.prechecked_files,
in esoteric ways
*)
let update_after_local_changes genv env changes ~start_time =
let ctx = Provider_utils.ctx_from_server_env env in
let telemetry =
Telemetry.create ()
|> Telemetry.duration ~key:"start" ~start_time
|> Telemetry.int_
~key:"changes"
~value:(Typing_deps.DepSet.cardinal changes)
in
let (env, telemetry) =
match env.prechecked_files with
| Prechecked_files_disabled ->
let telemetry =
telemetry |> Telemetry.string_ ~key:"mode" ~value:"disabled"
in
(env, telemetry)
| Initial_typechecking dirty_deps ->
(* Add [changes] dep set to [dirty_local_deps] *)
let env =
set
env
(Initial_typechecking
{
dirty_deps with
dirty_local_deps =
Typing_deps.DepSet.union changes dirty_deps.dirty_local_deps;
})
in
let telemetry =
telemetry |> Telemetry.string_ ~key:"mode" ~value:"initial"
in
(env, telemetry)
| Prechecked_files_ready dirty_deps ->
(* This is cleared during transition from Initial_typechecking to
* Prechecked_files_ready and should not be populated again *)
assert (Typing_deps.DepSet.is_empty dirty_deps.dirty_local_deps);
let changes =
Typing_deps.DepSet.diff changes dirty_deps.clean_local_deps
in
if Typing_deps.DepSet.is_empty changes then
let telemetry =
Telemetry.string_ telemetry ~key:"mode" ~value:"ready_empty"
in
(env, telemetry)
else
let t = Unix.gettimeofday () in
let clean_local_deps =
Typing_deps.DepSet.union dirty_deps.clean_local_deps changes
in
let (env, dirty_master_deps, size, intersect_telemetry) =
intersect_with_master_deps
~ctx
~deps:changes
~dirty_master_deps:dirty_deps.dirty_master_deps
~rechecked_files:dirty_deps.rechecked_files
genv
env
in
let env =
if size = 0 then
env
else
let full_check_status =
match env.full_check_status with
| Full_check_done -> Full_check_needed
| x -> x
in
{ env with full_check_status }
in
let telemetry =
telemetry
|> Telemetry.string_ ~key:"mode" ~value:"ready_changes"
|> Telemetry.int_ ~key:"size" ~value:size
|> Telemetry.int_
~key:"changes"
~value:(Typing_deps.DepSet.cardinal changes)
|> Telemetry.int_
~key:"dirty_local_deps"
~value:(Typing_deps.DepSet.cardinal dirty_deps.dirty_local_deps)
|> Telemetry.int_
~key:"dirty_master_deps"
~value:(Typing_deps.DepSet.cardinal dirty_deps.dirty_master_deps)
|> Telemetry.int_
~key:"dirty_master_deps_new"
~value:(Typing_deps.DepSet.cardinal dirty_master_deps)
|> Telemetry.int_
~key:"rechecked_files"
~value:(Relative_path.Set.cardinal dirty_deps.rechecked_files)
|> Telemetry.int_
~key:"clean_local_deps"
~value:(Typing_deps.DepSet.cardinal dirty_deps.clean_local_deps)
|> Telemetry.int_
~key:"clean_local_deps_new"
~value:(Typing_deps.DepSet.cardinal clean_local_deps)
|> Telemetry.int_
~key:"needs_recheck_new"
~value:(Relative_path.Set.cardinal env.needs_recheck)
|> Telemetry.string_
~key:"full_check_status"
~value:(ServerEnv.show_full_check_status env.full_check_status)
|> Telemetry.object_ ~key:"intersect" ~value:intersect_telemetry
in
HackEventLogger.prechecked_evaluate_incremental t size;
let env =
set
env
(Prechecked_files_ready
{ dirty_deps with dirty_master_deps; clean_local_deps })
in
(env, telemetry)
in
let telemetry = Telemetry.duration telemetry ~key:"end" ~start_time in
(env, telemetry)
let expand_all env =
match env.prechecked_files with
| Prechecked_files_disabled -> env
| Initial_typechecking dirty_deps
| Prechecked_files_ready dirty_deps ->
let ctx = Provider_utils.ctx_from_server_env env in
let deps_mode = Provider_context.get_deps_mode ctx in
let deps =
Typing_deps.add_all_deps deps_mode dirty_deps.dirty_master_deps
in
let needs_recheck = Naming_provider.get_files ctx deps in
let needs_recheck =
Relative_path.Set.diff needs_recheck dirty_deps.rechecked_files
in
let env =
if Relative_path.Set.is_empty needs_recheck then
env
else (
Hh_logger.log
"Adding %d files to recheck after expanding all master deps"
(Relative_path.Set.cardinal needs_recheck);
let needs_recheck =
Relative_path.Set.union env.needs_recheck needs_recheck
in
{ env with needs_recheck; full_check_status = Full_check_started }
)
in
set
env
(Prechecked_files_ready
{ dirty_deps with dirty_master_deps = Typing_deps.DepSet.make () }) |
OCaml Interface | hhvm/hphp/hack/src/server/serverPrecheckedFiles.mli | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
val should_use : ServerArgs.options -> ServerLocalConfig.t -> bool
val expand_all : ServerEnv.env -> ServerEnv.env
val init :
ServerEnv.env ->
dirty_local_deps:Typing_deps.DepSet.t ->
dirty_master_deps:Typing_deps.DepSet.t ->
ServerEnv.env
val update_after_recheck :
ServerEnv.genv ->
ServerEnv.env ->
Relative_path.Set.t ->
start_time:float ->
ServerEnv.env * Telemetry.t
val update_after_local_changes :
ServerEnv.genv ->
ServerEnv.env ->
Typing_deps.DepSet.t ->
start_time:float ->
ServerEnv.env * Telemetry.t |
OCaml | hhvm/hphp/hack/src/server/serverPrepareCallHierarchy.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let go_quarantined
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(line : int)
~(column : int) : Lsp.PrepareCallHierarchy.result =
let all_sym_occs =
IdentifySymbolService.go_quarantined ~ctx ~entry ~line ~column
in
let matching_sym_occs =
List.filter
(fun s -> Pos.inside s.SymbolOccurrence.pos line column)
all_sym_occs
in
let (_, get_def) = ServerDepsUtil.get_ast_getdef ctx entry in
let get_def_opt (sym_occ : Relative_path.t SymbolOccurrence.t) :
Relative_path.t SymbolDefinition.t option =
if sym_occ.SymbolOccurrence.is_declaration then
get_def sym_occ
else
None
in
let def_opts = List.map get_def_opt matching_sym_occs in
let items =
List.map2 Lsp_helpers.symbol_to_lsp_call_item matching_sym_occs def_opts
in
Some items |
OCaml Interface | hhvm/hphp/hack/src/server/serverPrepareCallHierarchy.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val go_quarantined :
ctx:Provider_context.t ->
entry:Provider_context.entry ->
line:int ->
column:int ->
Lsp.PrepareCallHierarchy.result |
OCaml | hhvm/hphp/hack/src/server/serverProgress.ml | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type disposition =
| DStopped [@value 1]
| DWorking [@value 2]
| DReady [@value 3]
[@@deriving show { with_path = false }, enum]
let _unused = (min_disposition, max_disposition)
(* to suppress "unused" warning *)
type t = {
pid: int;
disposition: disposition;
message: string;
timestamp: float;
}
(** The caller must set this before attempting to send progress, otherwise exception *)
let root : Path.t option ref = ref None
let set_root (r : Path.t) : unit = root := Some r
let disable () : unit = root := Some Path.dummy_path
let server_progress_file () =
match !root with
| None -> failwith "ServerProgress.set_root must be called first"
| Some root when Path.equal root Path.dummy_path -> None
| Some root -> Some (ServerFiles.server_progress_file root)
let try_delete () : unit =
match server_progress_file () with
| None -> ()
| Some server_progress_file ->
(try Unix.unlink server_progress_file with
| _ -> ())
(** This writes to the specified progress file. It first acquires
an exclusive (writer) lock. (Locks on unix are advisory; we trust
[read] below to also acquire a lock). It overwrites
whatever was there before. In case of failure, it logs but is
silent. That's on the principle that defects in
progress-reporting should never break hh_server. *)
let write_file (t : t) : unit =
match server_progress_file () with
| None -> ()
| Some server_progress_file ->
let open Hh_json in
let { pid; disposition; message; timestamp } = t in
let content =
JSON_Object
[
("pid", int_ pid);
("disposition", int_ (disposition_to_enum disposition));
("progress", string_ message);
("timestamp", float_ timestamp);
]
|> json_to_multiline
in
(try Sys_utils.protected_write_exn server_progress_file content with
| exn ->
let e = Exception.wrap exn in
Hh_logger.log
"SERVER_PROGRESS_EXCEPTION(write) %s\n%s"
(Exception.get_ctor_string e)
(Exception.get_backtrace_string e |> Exception.clean_stack);
HackEventLogger.server_progress_write_exn ~server_progress_file e;
())
(** This reads the specified progress file, which is assumed to exist.
It first acquires a non-exclusive (reader) lock. (Locks on unix are
advisory; we trust [write_file] above to also acquire a writer
lock). If there are failures, we log, and return a human-readable
string that indicates why. *)
let read () : t =
let synthesize_stopped message =
{
pid = 0;
disposition = DStopped;
message;
timestamp = Unix.gettimeofday ();
}
in
match server_progress_file () with
| None -> failwith "ServerProgress.disable: can't read it"
| Some server_progress_file ->
let content = ref "[not yet read content]" in
(try
content := Sys_utils.protected_read_exn server_progress_file;
let json = Some (Hh_json.json_of_string !content) in
let pid = Hh_json_helpers.Jget.int_exn json "pid" in
let message = Hh_json_helpers.Jget.string_exn json "progress" in
let timestamp = Hh_json_helpers.Jget.float_exn json "timestamp" in
let disposition =
Hh_json_helpers.Jget.int_opt json "disposition"
|> Option.bind ~f:disposition_of_enum
|> Option.value ~default:DReady
in
(* If the status had been left behind on disk by a process that terminated without deleting it,
well, we'll return the same 'unknown' as if the file didn't exist. *)
if Proc.is_alive ~pid ~expected:"" then
{ pid; message; disposition; timestamp }
else
synthesize_stopped "stopped"
with
| Unix.Unix_error (Unix.ENOENT, _, _) -> synthesize_stopped "stopped"
| exn ->
let e = Exception.wrap exn in
Hh_logger.log
"SERVER_PROGRESS_EXCEPTION(read) %s\n%s\n%s"
(Exception.get_ctor_string e)
(Exception.get_backtrace_string e |> Exception.clean_stack)
!content;
HackEventLogger.server_progress_read_exn ~server_progress_file e;
synthesize_stopped "unknown state")
let write ?(include_in_logs = true) ?(disposition = DWorking) fmt =
let f message =
begin
if include_in_logs then Hh_logger.log "[progress] %s" message;
let timestamp = Unix.gettimeofday () in
write_file { pid = Unix.getpid (); disposition; message; timestamp }
end
in
Printf.ksprintf f fmt
(* The message will look roughly like this:
<operation> <done_count>/<total_count> <unit> <percent done> <extra>*)
let make_percentage_progress_message
~(operation : string)
~(done_count : int)
~(total_count : int)
~(unit : string)
~(extra : string option) : string =
let unit =
if String.equal unit "" then
unit
else
unit ^ " "
in
let percent =
Float.round_down
(1000.0 *. float_of_int done_count /. float_of_int total_count)
/. 10.0 (* so that 999999/1000000 will show as 99.9%, not 100.0% *)
in
let main_message =
Printf.sprintf
"%s %d/%d %s(%.1f%%)"
operation
done_count
total_count
unit
percent
in
match extra with
| Some extra -> main_message ^ " " ^ extra
| None -> main_message
let write_percentage
~(operation : string)
~(done_count : int)
~(total_count : int)
~(unit : string)
~(extra : string option) : unit =
write
~include_in_logs:false
"%s"
(make_percentage_progress_message
~operation
~done_count
~total_count
~unit
~extra)
type errors_file_error =
| NothingYet
| Complete of Telemetry.t [@printer (fun fmt _t -> fprintf fmt "Complete")]
| Restarted of {
user_message: string;
log_message: string;
} [@printer (fun fmt _ _ -> fprintf fmt "Restarted")]
| Stopped
| Killed of Exit_status.finale_data option
[@printer (fun fmt _ -> fprintf fmt "Killed")]
| Build_id_mismatch
[@@deriving show { with_path = false }]
type errors_file_item =
| Errors of {
errors: Errors.finalized_error list Relative_path.Map.t;
(** we convert to finalized_error (i.e. turn paths absolute) before writing in the file,
because consumers don't know hhi paths. As for the [Relative_path.Map.t], it
is guaranteed to only have root-relative paths. (we don't have a type to indicate
"I am the suffix of a root-relative path" so this is the best we can do.) *)
timestamp: float;
(** the errors were detected by reading the files not later than this time. *)
}
| Telemetry of Telemetry.t
let is_complete (e : errors_file_error) : bool =
match e with
| Complete _ -> true
| _ -> false
let is_production_enabled = ref true
let enable_error_production (b : bool) : unit = is_production_enabled := b
let errors_file_path () =
match !root with
| None -> failwith "ServerProgress.set_root must be called first"
| Some _ when not !is_production_enabled -> None
| Some root when Path.equal root Path.dummy_path -> None
| Some root -> Some (ServerFiles.errors_file_path root)
(** This is an internal module concerned with the binary format of the errors-file. *)
module ErrorsFile = struct
(** The errors-file is a binary format consisting of a sequence of messages
written by the hh_server process.
The first two messages are [VersionHeader] followed by [Header], and these
always exist (they are placed atomically at error-file creation by
[ErrorsWrite.new_empty_file]). Then there are zero or more [Item], each
one appended by a call to [ErrorsWrite.report]. Finally, there may be an [End],
appended by a call to [ErrorsWrite.complete] or [unlink_after_stopped] or [new_empty_file].
Each message consists of a 5-byte preamble (one sync byte, 4 size bites)
followed by a marshalled [message] structure. *)
type message =
| VersionHeader of {
version: string;
(** from Build_id.build_revision, or empty if dev build or --ignore-hh-version *)
extra: Hh_json.json;
(** CARE! The hh_client binary might read the [VersionHeader] message that was written by either
an older or a newer version of the hh_server binary. Therefore, it is impossible to
change the datatype! Any new fields will have to be added to 'extra' if they're needed
cross-version, or more commonly just placed in [Header] since it is version-safe. *)
}
| Header of {
pid: int;
(** the pid of the hh_server that wrote this file; clients check "sigkill 0 <pid>" to see if it's still alive *)
cmdline: string;
(** the /proc/pid/cmdline of hh_server; clients check that the current /proc/pid/cmdline is the same (proving that the pid hasn't been recycled) *)
timestamp: float; (** the time at which the typecheck began *)
clock: Watchman.clock option;
(** the watchclock at which the typecheck began, i.e. reflecting all file-changes up to here *)
}
| Item of errors_file_item
| End of {
error: errors_file_error;
timestamp: float;
log_message: string;
}
(** This helper acquires an exclusive lock on the file, appends the message, then releases the lock.
It does not do any state validation - that's left to its caller. *)
let write_message (fd : Unix.file_descr) (message : message) : unit =
let payload = Marshal.to_bytes message [] in
let preamble = Marshal_tools.make_preamble (Bytes.length payload) in
Sys_utils.with_lock fd Unix.F_LOCK ~f:(fun () ->
Sys_utils.write_non_intr fd preamble 0 (Bytes.length preamble);
Sys_utils.write_non_intr fd payload 0 (Bytes.length payload))
let read_message (fd : Unix.file_descr) : message =
let synthesize_end (error : errors_file_error) (log_message : string) :
message =
End { error; timestamp = Unix.gettimeofday (); log_message }
in
Sys_utils.with_lock fd Unix.F_RLOCK ~f:(fun () ->
let preamble =
Sys_utils.read_non_intr fd Marshal_tools.expected_preamble_size
in
match preamble with
| None -> synthesize_end NothingYet "no additional bytes"
| Some preamble ->
let size = Marshal_tools.parse_preamble preamble in
(* This assert is in case the file is garbled, and we read a crazy-big size,
to avoid allocating say a 20gb bytes array and having the machine get stuck. *)
assert (size < 20_000_000);
(match Sys_utils.read_non_intr fd size with
| None -> synthesize_end (Killed None) "no payload"
| Some payload ->
let message : message = Marshal.from_bytes payload 0 in
message))
end
module ErrorsWrite = struct
(** This structure represent's hh_server's current knowledge about the errors-file.
Only hh_server should be manipulating the errors-file; hence, this knowledge is authoritative. *)
type write_state =
| Absent
(** The errors-file either doesn't exist because this hh_server instance hasn't
yet called [new_empty_file], or because it called [unlink_at_server_stop]. *)
| Reporting of Unix.file_descr * int
[@printer (fun fmt (_fd, n) -> fprintf fmt "Reporting[%d]" n)]
(** The errors-file does exist,
due to a previous call to [new_empty_file], and it's fine to call [report] or [complete]. *)
| Closed
(** The errors-file has an end-marker due to a call to [complete]. *)
[@@deriving show { with_path = false }]
(** This mutable value tracks the current state of the errors-file belonging to this
instance of hh_server.
The errors-file is deemed absent when hh_server starts up. Even if there had been a leftover
errors-file from a previous hh_server instance, any attempt to read it will fail because it
necessarily must have come from a now-dead PID. *)
let write_state : write_state ref = ref Absent
(** This helper is called by [new_empty_file] and [unlink_at_server_stop]...
1. It unlinks the current file, if any (in states [Reporting], [Closed])
2. It calls the [after_unlink] callback
3. It writes a End message if needed (in state [Reporting])
4. It closes the file-descriptor for the current file (in state [Reporting]).
For example: in the case of caller [new_empty_file], its [after_unlink] callback creates a new errors file.
In this way, if a client with an existing file-descriptor should read a sentinel, then it knows
for sure it can immediately close that file-descriptor and re-open the new errors file. *)
let unlink_sentinel_close
(error : errors_file_error)
~(log_message : string)
~(errors_file_path : string)
~(after_unlink : unit -> 'a) =
begin
try Unix.unlink errors_file_path with
| _ -> ()
end;
let result = after_unlink () in
begin
match !write_state with
| Reporting (fd, count) ->
Hh_logger.log
"Errors-file: ending old %s with its %d reports, with sentinel %s"
(Sys_utils.show_inode fd)
count
(show_errors_file_error error);
ErrorsFile.write_message
fd
(ErrorsFile.End
{ error; timestamp = Unix.gettimeofday (); log_message });
Unix.close fd
| _ -> ()
end;
result
let new_empty_file
~(clock : Watchman.clock option)
~(ignore_hh_version : bool)
~(cancel_reason : string * string) : unit =
match errors_file_path () with
| None -> ()
| Some errors_file_path -> begin
(* (1) unlink the old errors file, (2) atomically create a new errors-file with
Version_header+Header messages in it, (3) write a End marker into the old errors file.
**Atomicity** is so that a client can be assured that if they open an errors-file
then it will necessarily have two errors in it.
**Sentinel-after-new** is so that a client can be assured that if they encounter
a sentinel then there's already a new errors-file ready to be read immediately
or, if not, then the server must have died.
Both mechanisms are there to make the client-side code easier to write! *)
let pid = Unix.getpid () in
let version =
if ignore_hh_version then
""
else
Build_id.build_revision
in
let version_header =
ErrorsFile.VersionHeader { version; extra = Hh_json.JSON_Object [] }
in
let header =
ErrorsFile.Header
{
pid;
cmdline = Proc.get_cmdline pid |> Result.ok_or_failwith;
timestamp = Unix.gettimeofday ();
clock;
}
in
let (user_message, log_message) = cancel_reason in
let fd =
unlink_sentinel_close
(Restarted { user_message; log_message })
~log_message:"new_empty_file"
~errors_file_path
~after_unlink:(fun () ->
let fd =
Sys_utils.atomically_create_and_init_file
errors_file_path
~rd:false
~wr:true
0o666
~init:(fun fd ->
ErrorsFile.write_message fd version_header;
ErrorsFile.write_message fd header)
in
match fd with
| None ->
failwith "Errors-file was created by someone else under our feet"
| Some fd ->
Hh_logger.log
"Errors-file: starting new %s at clock %s"
(Sys_utils.show_inode fd)
(Option.value clock ~default:"[none]");
fd)
in
write_state := Reporting (fd, 0)
end
let report (errors : Errors.t) : unit =
match errors_file_path () with
| None -> ()
| Some _ -> begin
match !write_state with
| Absent
| Closed ->
failwith ("Cannot report in state " ^ show_write_state !write_state)
| Reporting _ when Errors.is_empty ~drop_fixmed:true errors -> ()
| Reporting (fd, n) ->
let n = n + 1 in
if n <= 5 then
Hh_logger.log
"Errors-file: report#%d on %s: %d new errors%s"
n
(Sys_utils.show_inode fd)
(Errors.count errors)
(if n = 5 then
" [won't report any more this typecheck...]"
else
"");
(* sort and dedupe *)
let errors =
errors
|> Errors.drop_fixmed_errors_in_files
|> Errors.as_map
|> Relative_path.Map.filter ~f:(fun path _errors ->
let is_root =
Relative_path.is_root (Relative_path.prefix path)
in
if not is_root then
HackEventLogger.invariant_violation_bug
"error in file outside root"
~path;
is_root)
|> Relative_path.Map.map ~f:(fun errors ->
errors |> Errors.sort |> List.map ~f:User_error.to_absolute)
in
ErrorsFile.write_message
fd
(ErrorsFile.Item (Errors { timestamp = Unix.gettimeofday (); errors }));
write_state := Reporting (fd, n)
end
let telemetry (telemetry : Telemetry.t) : unit =
match errors_file_path () with
| None -> ()
| Some _ -> begin
match !write_state with
| Absent
| Closed ->
failwith ("Cannot report in state " ^ show_write_state !write_state)
| Reporting (fd, _n) ->
ErrorsFile.write_message fd (ErrorsFile.Item (Telemetry telemetry))
end
let complete (telemetry : Telemetry.t) : unit =
match errors_file_path () with
| None -> ()
| Some _ -> begin
match !write_state with
| Absent
| Closed ->
failwith ("Cannot complete in state " ^ show_write_state !write_state)
| Reporting (fd, n) ->
Hh_logger.log
"Errors-file: completing %s after %d reports"
(Sys_utils.show_inode fd)
n;
ErrorsFile.write_message
fd
(ErrorsFile.End
{
error = Complete telemetry;
timestamp = Unix.gettimeofday ();
log_message = "complete";
});
write_state := Closed
end
let unlink_at_server_stop () : unit =
match errors_file_path () with
| None -> ()
| Some errors_file_path ->
unlink_sentinel_close
Stopped
~log_message:"unlink"
~errors_file_path
~after_unlink:(fun () -> ());
write_state := Absent
let get_state_FOR_TEST () : string = show_write_state !write_state
let create_file_FOR_TEST ~(pid : int) ~(cmdline : string) : unit =
let fd =
Unix.openfile
(Option.value_exn (errors_file_path ()))
[Unix.O_WRONLY; Unix.O_CREAT; Unix.O_TRUNC]
0o666
in
ErrorsFile.write_message
fd
(ErrorsFile.VersionHeader { version = ""; extra = Hh_json.JSON_Object [] });
ErrorsFile.write_message
fd
(ErrorsFile.Header { pid; cmdline; timestamp = 0.0; clock = None });
Unix.close fd
end
module ErrorsRead = struct
type log_message = string
type open_success = {
pid: int;
timestamp: float;
clock: Watchman.clock option;
}
type read_result = (errors_file_item, errors_file_error * log_message) result
let openfile (fd : Unix.file_descr) :
(open_success, errors_file_error * log_message) result =
let message1 = ErrorsFile.read_message fd in
let message2 = ErrorsFile.read_message fd in
match (message1, message2) with
| ( ErrorsFile.VersionHeader { version; _ },
ErrorsFile.Header { pid; cmdline; clock; timestamp } ) ->
if
String.length version > 16
&& String.length Build_id.build_revision > 16
&& not (String.equal version Build_id.build_revision)
then
(* This is a version mismatch which we can't ignore. (We always ignore mismatch from dev-builds...
version="" means a buck dev build, and version.length<=16 means a dune dev build.) *)
let msg =
Printf.sprintf
"errors-file is version %s, but we are %s"
version
Build_id.build_revision
in
Error (Build_id_mismatch, msg)
else if not (Proc.is_alive ~pid ~expected:cmdline) then
let server_finale_file = ServerFiles.server_finale_file pid in
let finale_data = Exit_status.get_finale_data server_finale_file in
Error (Killed finale_data, "Errors-file is from defunct PID")
else
Ok { pid; clock; timestamp }
| _ -> failwith "impossible message combination"
let read_next_errors (fd : Unix.file_descr) : read_result =
match ErrorsFile.read_message fd with
| ErrorsFile.VersionHeader _
| ErrorsFile.Header _ ->
failwith
"do ServerProgress.ErrorsRead.openfile before read_next_error or ServerProgressLwt.watch_errors_file"
| ErrorsFile.Item item -> Ok item
| ErrorsFile.End { error; log_message; _ } -> Error (error, log_message)
end |
OCaml Interface | hhvm/hphp/hack/src/server/serverProgress.mli | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
(** All functions in this file will throw unless you've already called set_root
at the start of your process. *)
val set_root : Path.t -> unit
(** use this in tests, instead of set_root,
to disable progress-logging and error-streaming. *)
val disable : unit -> unit
type disposition =
| DStopped
(** Hh_server has failed in some way, so will be unable to handle future work until it's been fixed. *)
| DWorking
(** Hh_server is working on something, e.g. doing a typecheck or handling a request in ServerRpc *)
| DReady
(** Hh_server is ready to handle requests, i.e. not doing any work. *)
[@@deriving show]
(** Progress is a file in /tmp/hh_server/<repo>.progress.json which is written
by monitor+server. It lives from the moment the monitor starts up until the
moment it finally dies or is killed. You should only read it by the [read]
call, since that protects against races. It also protects against server death,
because if you try to [read] a progress.json that had been created by a now-dead
server PID then it detects that fact and returns an "unknown" status.
The state fields [disposition] and [message] are intended to be shown to the
user, not acted upon in code -- they're slightly handwavey in places (e.g. there's
an interval from when a server dies until the monitor realizes that fact when
[read] will return "unknown"). *)
type t = {
pid: int; (** pid of the process that wrote this status *)
disposition: disposition;
message: string; (** e.g. "typechecking 5/15 files" *)
timestamp: float;
}
(** Reads the current progress.json file. If there is none, or if there is one
but it came from a dead PID, or if it was corrupt, this function synthesizes
a [DStopped] response with a appropriate human-readable message that reflects
on the precise reason, but simply says "stopped" in the typical case of absent
file or dead PID. *)
val read : unit -> t
(** [write ~include_in_logs ~disposition fmt_string] writes
[disposition] and the formatted string [fmt_string] to progress.json.
The signature [('a, unit, string, unit) format4 -> 'a]
is simply the signature of [Printf.printf].
Default disposition is [DWorking]. If you want to indicate that the server is stopped
or ready, you must provide a disposition explicitly. *)
val write :
?include_in_logs:bool ->
?disposition:disposition ->
('a, unit, string, unit) format4 ->
'a
(** Shorthand for [write ~include_in_logs:false ~disposition:DWorking "%s" message]
for the message "<operation> <done_count>/<total_count> <unit> <percent done> <extra>". *)
val write_percentage :
operation:string ->
done_count:int ->
total_count:int ->
unit:string ->
extra:string option ->
unit
(** Call this upon monitor shutdown to delete the progress file *)
val try_delete : unit -> unit
(**********************************************************************
OVERVIEW OF STREAMING ERRORS FILE
Errors is a file at /tmp/hh_server/<repo>/errors.bin which is written
by server during a typecheck. The file is first created when the server
starts its first typecheck, gets errors appended to it as the typecheck
discovers them, is unlinked and then another one with the same name
created each time the server starts another typecheck, and the file is finally
unlinked upon server exit. A "typecheck" can be thought of as a typecheck of the
entire program, and hence the errors file will contain every error reported in WWW.
But in reality the server does tricks to typecheck a smaller number of files just
so long as it still manages to report every error into the file.
Therefore: a client can at any time open an file descriptor to this file,
and `tail -f -n +1` (i.e. read from the start and then follow) to follow
the typecheck that is currently underway or most recently completed, from its
start to its end. It will still be able to read the file descriptor through
to its end even if, in the meantime, a new typecheck has started and the old
file has been unlinked. Note that a given file-descriptor will only ever point
to a file which monitonically grows.
The server can have its typecheck interrupted. Some interruptions like watchman
will cause the current typecheck to be cancelled, then a new typecheck started.
In this case the existing errors file will be unlinked, a new errors file created
for the new typecheck, and a sentinel will be written into the old file descriptor
to show that it ended before completing. Other interruptions, e.g. from
`hh --type-at-pos`, will have no effect: the current typecheck file can continue
being read just fine. If a client reads a file descriptor on its sentinel, that is
a guarantee that a new errors file is already in place and can be opened (or,
if no errors file exists, that can only be because the server has terminated).
If the file does not exist, the client knows that either there is no server, or there
is one but it has not yet started typechecking (it might still be loading saved state).
In either case it is appropriate for the client to clientConnect {autostart_server=true}
in the normal way, until it establishes a connection to the server, and then it can
wait for the errors file.
SEMANTICS OF ERRORS-FILE CONTENTS
* An errors-file is tied to a particular watchman clock value. It reflects
all file-changes that happened prior to the clock. (As to whether it reflects
any file-changes that happened after the clock, that is impossible to tell.)
* As mentioned above, when the errors-file is complete, it contains the full
set of errors that hh_server knows about for the project.
* The error file is in a binary format consisting of a header followed by a series of
Errors.error list Relative_path.map along with timestamp,
followed by an "end sentinel" if the typecheck has finished or been interrupted.
Let's call each of these maps an error report. Each encompasses
several files, and for each file it has a sorted list of errors. Currently we make
one report for all "duplicate name" errors across the project if any, followed by
one report per batch that had errors. This means that a file might be mentioned
twice, once in a "duplicate name" report, once later. This will change in future
so that each file is reported only once.
* It currently does not write empty reports, though that might change in future
(e.g. we might decide to write "no errors in file a.php" for a file which
previously did have errors).
* Within a single report, the error list has been sorted and de-duped.
LIFECYCLE SEMANTICS OF PRODUCING AND CONSUMING THE ERRORS-FILE
The code that produces errors-file lives in ServerMain.ml (which registers an on-exit
hook to delete the file), ServerTypeCheck.ml (to manage the previous and new
errors-file immediately before and after a typecheck is performed),
and typing_check_service.ml (to make the actual error reports).
1. When hh_server is launched, it either eventually exits or eventually writes
an errors.bin file with some clock value that came at or after its launch.
2. When files on disk are changed that pass FilesToIgnore.watchman_server_expression_terms
and FindUtils.post_watchman_filter, then eventually either a new errors.bin will
be written which reflects the clock after those files, or eventually it will terminate.
These invariants imply how the client should connect:
1. If there's no errors.bin, then doing an RPC connect to the server that succeeds
means that it's fine to just wait around waiting for an errors.bin to succeed.
(Except for one vulnerability: if the server handles the RPC but then crashes
before starting its first check).
2. If there is an errors.bin but files have changed on disk since error's watchclock,
it's fine for the client to just wait until a new errors.bin gets created.
*)
(** If we don't succeed in reading the next errors report, here's why. *)
type errors_file_error =
| NothingYet
(** There are no new errors yet available, not until server calls [ErrorsWrite.report]. *)
| Complete of Telemetry.t
(** The typecheck has finished, i.e. server called [ErrorsWrite.complete]. *)
| Restarted of {
user_message: string;
log_message: string;
}
(** The typecheck didn't complete; a new typecheck in a new errors-file has started.
i.e. server called [ErrorsWrite.new_empty_file] before [ErrorsWrite.complete].
[user_message] is a human-facing reason for why it was restarted, and [log_message]
contains extra logging information. *)
| Stopped
(** Hh_server was stopped gracefully so we can't read errors. i.e. server called [ErrorsWrite.unlink]. *)
| Killed of Exit_status.finale_data option
(** Hh_server was killed so we can't read errors. *)
| Build_id_mismatch
(** The hh_server that produced these errors is incompatible with the current binary. *)
[@@deriving show]
(** Each item that a consumer reads from the errors-file is one of these. *)
type errors_file_item =
| Errors of {
errors: Errors.finalized_error list Relative_path.Map.t;
timestamp: float;
}
| Telemetry of Telemetry.t
val is_complete : errors_file_error -> bool
val enable_error_production : bool -> unit
module ErrorsWrite : sig
(** To be called at start of typechecking.
This creates a new errors file. If there had been a previous errors file, then the previous
one gets unlinked; and if the previous error file was not yet complete then anyone
reading from the previous errors file will now get a [Error (Restarted cancel_reason)]. *)
val new_empty_file :
clock:Watchman.clock option ->
ignore_hh_version:bool ->
cancel_reason:string * string ->
unit
(** To be called during typechecking.
Anyone reading the current errors file will get this error report as [Ok errors].
This call will failwith if called before [new_empty_file], or after [complete]/[unlink_at_server_stop]. *)
val report : Errors.t -> unit
(** To be called during typechecking.
Anyone reading the current errors file will get this as [Ok Telemetry].
This call will failwith if called before [new_empty_file], or after [complete]/[unlink_at_server_stop]. *)
val telemetry : Telemetry.t -> unit
(** To be called at the end of typechecking.
After this, anyone reading the errors file will get [Error Complete].
This call will failwith if called before [new_empty_file] or after [complete]/[unlink_at_server_stop]. *)
val complete : Telemetry.t -> unit
(** To be called upon server shutdown, e.g. after "hh stop" or .hhconfig change.
After this, anyone reading the errors file will get [Error Stopped]. *)
val unlink_at_server_stop : unit -> unit
(** Internal, for testing only. *)
val get_state_FOR_TEST : unit -> string
(** Internal, for testing only *)
val create_file_FOR_TEST : pid:int -> cmdline:string -> unit
end
module ErrorsRead : sig
(** A [log_message] is a string which should go to logs, but not be surfaced to users. *)
type log_message = string
(** A successful call to [openfile] returns this. *)
type open_success = {
pid: int;
(** the PID of the server process that produced this errors file *)
timestamp: float; (** The time at which the typecheck started. *)
clock: Watchman.clock option;
(** The watchman clock at which this typecheck started. *)
}
(** [openfile fd] opens an error-file for reading, one that has been created
through [ErrorsWrite.new_empty_file]. The only error conditions this can
return are [Error Killed] or [Error Build_id_mismatch]. *)
val openfile :
Unix.file_descr -> (open_success, errors_file_error * log_message) result
(** This is the return type for [read_next_errors]. In case of success, it includes
a timestamp when they were reported. The paths in the [Relative_path.Map.t] are guaranteed
to all be root-relative. (it doesn't even make sense to report errors on other files...) *)
type read_result = (errors_file_item, errors_file_error * log_message) result
(** Attempt to get the next batch of errors. It returns based on a queue
of what the server was written to the errors file...
* For each time the server did [ErrorsWrite.report errors], this function will return [Ok (errors, timestamp)].
* If the server hasn't yet done further [ErrorsWrite.report], this will return [Error NothingYet].
* If the server did [ErrorsWrite.complete] then this will return [Error Complete].
* If the server did [ErrorsWrite.new_empty_file] then this will return [Error Restarted].
* If the server did [ErrorsWrite.unlink_at_server_stop] then this will return [Error Stopped].
* If the server was killed, then this will return [Error Killed]. *)
val read_next_errors : Unix.file_descr -> read_result
end |
OCaml | hhvm/hphp/hack/src/server/serverProgressLwt.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** This long-lived Lwt routine will keep polling the file, and the
report it finds into the queue. If it gets an error then it sticks that
in the queue and terminates.
Exception: if it gets a NothingYet error, then it either continues polling
the file, or ends the queue with ServerProgress.Killed, depending on whether
the producing PID is still alive. *)
let rec watch
~(pid : int)
~(pid_future : unit Lwt.t)
(fd : Unix.file_descr)
(add : ServerProgress.ErrorsRead.read_result option -> unit) : unit Lwt.t =
match ServerProgress.ErrorsRead.read_next_errors fd with
| Ok errors ->
add (Some (Ok errors));
watch ~pid ~pid_future fd add
| Error (ServerProgress.NothingYet, _) when Lwt.is_sleeping pid_future ->
let%lwt () = Lwt_unix.sleep 0.2 in
watch ~pid ~pid_future fd add
| Error (ServerProgress.NothingYet, _) ->
let server_finale_file = ServerFiles.server_finale_file pid in
let finale_data = Exit_status.get_finale_data server_finale_file in
add (Some (Error (ServerProgress.Killed finale_data, "pid")));
add None;
Lwt.return_unit
| Error e ->
add (Some (Error e));
add None;
Lwt.return_unit
(** This returns an Lwt future which will complete once <pid> dies.
It implements this by polling "SIGKILL 0" every 5s. *)
let rec watch_pid (pid : int) : unit Lwt.t =
let%lwt () = Lwt_unix.sleep 5.0 in
let is_alive =
try
Unix.kill pid 0;
true
with
| _ -> false
in
if is_alive then
watch_pid pid
else
Lwt.return_unit
let watch_errors_file ~(pid : int) (fd : Unix.file_descr) :
ServerProgress.ErrorsRead.read_result Lwt_stream.t =
let (q, add) = Lwt_stream.create () in
let pid_future = watch_pid pid in
let _watcher_future =
watch ~pid ~pid_future fd add |> Lwt.map (fun () -> Lwt.cancel pid_future)
in
q |
OCaml Interface | hhvm/hphp/hack/src/server/serverProgressLwt.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** This function kicks off a long-running task which watches the errors.bin file that the
caller has already opened, and has already called openfile upon.
It returns a Lwt_stream.t as follows.
* Shortly after a new set of errors has been reported to the file,
"Ok errors" will be placed in the stream.
* Shortly after the file is finished cleanly (completed, restarted, killed),
"Error" will be placed in the stream and the stream will be closed
and the long-running task will finish.
* If the producing PID gets killed without a clean finish, then that too will
be detected and reported with "Error Killed" in the stream, albeit not quite so soon.
(It only performs this inter-process polling every 5s).
* There's no way to cancel the long-running task. *)
val watch_errors_file :
pid:int ->
Unix.file_descr ->
ServerProgress.ErrorsRead.read_result Lwt_stream.t |
OCaml | hhvm/hphp/hack/src/server/serverRage.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let go (_genv : ServerEnv.genv) (env : ServerEnv.env) : ServerRageTypes.result =
let open ServerRageTypes in
let open ServerEnv in
(* Gather up the contents of all files that hh_server believes are in the
IDE different from what's on disk *)
let unsaved_items =
ServerFileSync.get_unsaved_changes env
|> Relative_path.Map.map ~f:fst
|> Relative_path.Map.elements
|> List.map ~f:(fun (relPath, data) ->
{ title = "unsaved:" ^ Relative_path.to_absolute relPath; data })
in
(* include PIDs that we know *)
let pids_data =
Printf.sprintf
"hh_server pid=%d ppid=%d\n"
(Unix.getpid ())
(Unix.getppid ())
in
(* is it paused? *)
let paused_data =
Printf.sprintf
"\n%s... disk_needs_parsing:\n%s\n"
(match env.ServerEnv.full_recheck_on_file_changes with
| ServerEnv.Not_paused -> "hh"
| ServerEnv.Paused _ -> "hh --pause"
| ServerEnv.Resumed -> "hh --resume")
(Relative_path.Set.elements env.ServerEnv.disk_needs_parsing
|> List.map ~f:Relative_path.to_absolute
|> String.concat ~sep:"\n")
in
(* include current state of client, as we know it *)
let client_data =
match Ide_info_store.get_client () with
| None -> "no client"
| Some client ->
Printf.sprintf
("client_has_message: %b\n"
^^ "ide_needs_parsing: %b\n"
^^ "error_count: %d\n")
(ClientProvider.client_has_message client)
(not (Relative_path.Set.is_empty env.ide_needs_parsing))
(Errors.count env.errorl)
in
(* that's it! *)
let data =
Printf.sprintf
"PIDS\n%s\n\nPAUSED\n%s\n\nSUBSCRIPTION\n%s\n\nIDE FILES\n%s\n"
pids_data
paused_data
client_data
(List.map unsaved_items ~f:(fun item -> item.title)
|> String.concat ~sep:"\n")
in
{ title = "status"; data } :: unsaved_items |
OCaml | hhvm/hphp/hack/src/server/serverRageTypes.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** To make the rage output more useful, it's broken into rageItems. *)
type rageItem = {
title: string;
data: string;
}
type result = rageItem list |
OCaml | hhvm/hphp/hack/src/server/serverRecheckCapture.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type snapshot = unit
let update_env env changed_files =
{
env with
ServerEnv.changed_files =
Relative_path.Set.union env.ServerEnv.changed_files changed_files;
}
let update_before_recheck
genv
env
~(changed_files : Relative_path.Set.t)
~(to_recheck_count : int)
~(parse_t : float) : ServerEnv.env * snapshot =
ignore (genv, to_recheck_count, parse_t);
(update_env env changed_files, ())
let update_after_recheck
genv
env
snapshot
~(cancelled_files : Relative_path.Set.t)
~(rechecked_files : Relative_path.Set.t)
~(changed_files : Relative_path.Set.t)
~(recheck_errors : Errors.t)
~(all_errors : Errors.t) : ServerEnv.env * string Future.t option =
ignore
( genv,
snapshot,
cancelled_files,
rechecked_files,
recheck_errors,
all_errors );
(update_env env changed_files, None)
let set_up_replay_environment
~(handle : string)
~(root : Path.t)
~(temp_dir : Path.t)
~(fanout_input_path : Path.t)
~(expected_errors_path : Path.t) : (unit, string) result =
ignore (handle, root, temp_dir, fanout_input_path, expected_errors_path);
Error "Not implemented" |
OCaml | hhvm/hphp/hack/src/server/serverRename.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerEnv
open ServerRenameTypes
let maybe_add_dollar s =
if not (Char.equal s.[0] '$') then
"$" ^ s
else
s
let get_fixme_patches codes (env : env) =
let fixmelist =
Errors.get_error_list ~drop_fixmed:false env.errorl
|> List.filter ~f:(fun e -> e.User_error.is_fixmed)
|> List.map ~f:(fun e -> (User_error.get_pos e, User_error.get_code e))
in
let poslist =
Fixme_provider.get_unused_fixmes
~codes
~applied_fixmes:fixmelist
~fold:(Naming_table.fold ~warn_on_naming_costly_iter:true)
~files_info:env.naming_table
in
List.map ~f:(fun pos -> Remove (Pos.to_absolute pos)) poslist
let get_dead_unsafe_cast_patches (env : env) =
Remove_dead_unsafe_casts.get_patches
~is_test:false
~fold:(Naming_table.fold ~warn_on_naming_costly_iter:true)
~files_info:env.naming_table
let get_lambda_parameter_rewrite_patches ctx files =
List.concat_map files ~f:(fun file ->
ServerRewriteLambdaParameters.get_patches
ctx
(Relative_path.from_root ~suffix:file))
let find_def_filename current_filename definition =
SymbolDefinition.(
if Relative_path.equal (Pos.filename definition.pos) Relative_path.default
then
(* When the definition is in an IDE buffer with local changes, the filename
in the definition will be empty. *)
current_filename
else
Pos.filename definition.pos)
(*
We construct the text for the deprecated wrapper here.
Example of deprecated wrapper & its relation to the newly named function:
<<__Deprecated("Deprecated: Use `newlyNamedFunction` instead")>>
public function oldFunctionName(int $x, SomeClass $y, ...$nums): string {
return $this->newlyNamedFunction($x, $y, ...$nums);
}
/**
* Some docblock
*
*/
public function newlyNamedFunction(int $x, SomeClass $y, ...$nums): string {
// some function body
}
*)
let construct_deprecated_wrapper_stub
~(func_decl_text : string)
~(params_text_list : string list)
~(col_start : int)
~(returns_void : bool)
~(is_async : bool)
~(func_ref : deprecated_wrapper_function_ref)
(new_name : string) : string =
(* Since the starting column position points to the beginning of the function
declaration header, we can use it to figure out the indentation level
of the function, and insert whitespace accordingly *)
let base_indentation = String.make col_start ' ' in
let deprecated_header =
base_indentation ^ "<<__Deprecated(\"Use `" ^ new_name ^ "` instead\")>>"
in
let func_decl = base_indentation ^ func_decl_text in
(* The immediate body of a function is indented by 2 extra spaces *)
let func_body_indentation = String.make 2 ' ' in
let return_indentation = base_indentation ^ func_body_indentation in
let parameter_input = String.concat ~sep:", " params_text_list in
let maybe_return =
if returns_void then
""
else
"return "
in
let maybe_await =
if is_async then
"await "
else
""
in
let maybe_this_or_self =
match func_ref with
| DeprecatedStaticMethodRef -> "self::"
| DeprecatedNonStaticMethodRef -> "$this->"
| DeprecatedFunctionRef -> ""
in
let return_statement =
return_indentation
^ maybe_return
^ maybe_await
^ maybe_this_or_self
^ new_name
^ "("
^ parameter_input
^ ");"
in
"\n"
^ deprecated_header
^ "\n"
^ func_decl
^ " {"
^ "\n"
^ return_statement
^ "\n"
^ base_indentation
^ "}"
^ "\n"
let get_pos_before_docblock_from_cst_node filename node =
Full_fidelity_positioned_syntax.(
let source_text = source_text node in
let start_offset = leading_start_offset node in
SourceText.relative_pos filename source_text start_offset start_offset)
(* This function will capture a variadic parameter and give it a name if it is
* anonymous. Example:
*
* public static function newName(int $x, ...): string {
*
* would become:
*
* public static function newName(int $x, mixed ...$args): string {
*
*)
let fixup_anonymous_variadic
(func_decl : Full_fidelity_positioned_syntax.t)
(has_anonymous_variadic : bool) : string =
Full_fidelity_positioned_syntax.(
if has_anonymous_variadic then
let r = Str.regexp "\\.\\.\\." in
Str.global_replace r "mixed ...$args" (text func_decl)
else
text func_decl)
(* Contains just enough information to properly wrap a function *)
type wrapper_call_signature_info = {
params_text_list: string list;
returns_void: bool;
is_async: bool;
is_static: bool;
has_anonymous_variadic: bool;
}
(* Identify key information about a function so we can produce a deprecated wrapper *)
let get_call_signature_for_wrap (func_decl : Full_fidelity_positioned_syntax.t)
: wrapper_call_signature_info =
Full_fidelity_positioned_syntax.(
match syntax func_decl with
| FunctionDeclarationHeader
{
function_parameter_list = params;
function_type = ret_type;
function_modifiers = modifiers;
_;
} ->
let params_text_list =
match syntax params with
| SyntaxList params ->
let params_text_list =
List.map params ~f:(fun param ->
let param =
match syntax param with
| ListItem { list_item; _ } -> list_item
| _ -> failwith "Expected ListItem"
in
match syntax param with
(* NOTE:
`ParameterDeclaration` includes regular params like "$x" and
_named_ variadic parameters like "...$nums". For the latter case,
calling `text parameter_name` will return the entire "...$nums"
string, including the ellipsis.
`VariadicParameter` addresses the unnamed variadic parameter
"...". In this case, we provide as a parameter a function call
that outputs only the variadic params (and dropping the
non-variadic ones).
*)
| ParameterDeclaration { parameter_name = name; _ } -> text name
| VariadicParameter _ -> "...$args"
| _ -> failwith "Expected some parameter type")
in
params_text_list
| Missing -> []
| _ -> []
in
let has_anonymous_variadic =
match syntax params with
| SyntaxList params ->
List.exists params ~f:(fun param ->
let param =
match syntax param with
| ListItem { list_item; _ } -> list_item
| _ -> failwith "Expected ListItem"
in
match syntax param with
| VariadicParameter _ -> true
| _ -> false)
| Missing -> false
| _ -> false
in
let returns_void =
match syntax ret_type with
| GenericTypeSpecifier
{
generic_class_type = generic_type;
generic_argument_list =
{
syntax =
TypeArguments
{
type_arguments_types =
{
syntax =
SyntaxList
[
{
syntax =
ListItem
{
list_item =
{
syntax =
SimpleTypeSpecifier
{
simple_type_specifier =
type_spec;
};
_;
};
_;
};
_;
};
];
_;
};
_;
};
_;
};
_;
} ->
String.equal (text generic_type) "Awaitable"
&& String.equal (text type_spec) "void"
| SimpleTypeSpecifier { simple_type_specifier = type_spec } ->
String.equal (text type_spec) "void"
| _ -> false
in
let (is_async, is_static) =
match syntax modifiers with
| SyntaxList modifiers ->
let is_async =
List.exists modifiers ~f:(fun modifier ->
String.equal (text modifier) "async")
in
let is_static =
List.exists modifiers ~f:(fun modifier ->
String.equal (text modifier) "static")
in
(is_async, is_static)
| _ -> (false, false)
in
{
params_text_list;
returns_void;
is_async;
is_static;
has_anonymous_variadic;
}
| _ ->
{
params_text_list = [];
returns_void = false;
is_async = false;
is_static = false;
has_anonymous_variadic = false;
})
let classish_is_interface (ctx : Provider_context.t) (name : string) : bool =
match Decl_provider.get_class ctx (Utils.add_ns name) with
| None -> false
| Some cls ->
(match Decl_provider.Class.kind cls with
| Ast_defs.Cinterface -> true
| _ -> false)
(* Produce a "deprecated" version of the old [definition] so that calls to it can be rerouted.
If [definition] is None, this is a no-op. *)
let get_deprecated_wrapper_patch
~(definition : Relative_path.t SymbolDefinition.t option)
~(ctx : Provider_context.t)
(new_name : string) : patch option =
let filename =
Option.bind definition ~f:(fun definition ->
let filename = Pos.filename definition.SymbolDefinition.pos in
let is_dummy = Relative_path.equal filename Relative_path.default in
if is_dummy then
HackEventLogger.invariant_violation_bug
"--refactor has empty filename";
Option.some_if (not is_dummy) filename)
in
SymbolDefinition.(
Full_fidelity_positioned_syntax.(
Option.Monad_infix.(
filename >>= fun filename ->
definition >>= fun definition ->
(* We need the number of spaces that the function declaration is offsetted so that we can
format our wrapper properly with the correct indent (i.e. we need 0-indexed columns).
However, even though column offsets are already indexed accordingly when
stored in positions, `destruct_range` adds 1 in order to
return an [inclusive, exclusive) span.
Thus, we subtract 1.
*)
let (_, col_start_plus1, _, _) = Pos.destruct_range definition.span in
let col_start = col_start_plus1 - 1 in
let (_ctx, entry) =
Provider_context.add_entry_if_missing ~ctx ~path:filename
in
let cst_node =
ServerSymbolDefinition.get_definition_cst_node_ctx
~ctx
~entry
~kind:definition.kind
~pos:definition.pos
in
cst_node >>= fun cst_node ->
begin
match syntax cst_node with
| MethodishDeclaration
{ methodish_function_decl_header = func_decl; _ } ->
let call_signature = get_call_signature_for_wrap func_decl in
let func_decl_text =
fixup_anonymous_variadic
func_decl
call_signature.has_anonymous_variadic
in
let func_ref =
if call_signature.is_static then
DeprecatedStaticMethodRef
else
DeprecatedNonStaticMethodRef
in
(match definition.class_name with
| Some name when classish_is_interface ctx name ->
(* We can't add a stub that calls the new name in
interfaces, as methods can't have bodies there. *)
None
| _ ->
Some
( func_decl_text,
call_signature.params_text_list,
call_signature.returns_void,
call_signature.is_async,
func_ref ))
| FunctionDeclaration { function_declaration_header = func_decl; _ }
->
let call_signature = get_call_signature_for_wrap func_decl in
let func_decl_text =
fixup_anonymous_variadic
func_decl
call_signature.has_anonymous_variadic
in
let func_ref = DeprecatedFunctionRef in
Some
( func_decl_text,
call_signature.params_text_list,
call_signature.returns_void,
call_signature.is_async,
func_ref )
| _ -> None
end
>>| fun ( func_decl_text,
params_text_list,
returns_void,
is_async,
func_ref ) ->
let deprecated_wrapper_stub =
construct_deprecated_wrapper_stub
~func_decl_text
~params_text_list
~col_start
~returns_void
~is_async
~func_ref
new_name
in
let filename = find_def_filename filename definition in
let deprecated_wrapper_pos =
get_pos_before_docblock_from_cst_node filename cst_node
in
let patch =
{
pos = Pos.to_absolute deprecated_wrapper_pos;
text = deprecated_wrapper_stub;
}
in
Insert patch)))
let method_might_support_dynamic ctx ~class_name ~method_name =
let open Option.Monad_infix in
let module Class = Decl_provider.Class in
let sd_enabled =
TypecheckerOptions.enable_sound_dynamic @@ Provider_context.get_tcopt ctx
in
(not sd_enabled)
|| Decl_provider.get_class ctx @@ Utils.add_ns class_name
>>= (fun class_ ->
Option.first_some
(Class.get_smethod class_ method_name)
(Class.get_method class_ method_name))
>>| (fun elt ->
let flags = elt.Typing_defs.ce_flags in
Typing_defs_flags.ClassElt.(
is_dynamicallycallable flags || supports_dynamic_type flags))
|> Option.value ~default:true
let go
ctx
action
genv
env
~(definition_for_wrapper : Relative_path.t SymbolDefinition.t option) =
let module Types = ServerCommandTypes.Find_refs in
let (find_refs_action, new_name) =
match action with
| ClassRename (old_name, new_name) ->
(Types.ExplicitClass old_name, new_name)
| ClassConstRename (class_name, old_name, new_name) ->
(Types.Member (class_name, Types.Class_const old_name), new_name)
| MethodRename { class_name; old_name; new_name; _ } ->
(Types.Member (class_name, Types.Method old_name), new_name)
| FunctionRename { old_name; new_name; _ } ->
(Types.Function old_name, new_name)
| LocalVarRename { filename; file_content; line; char; new_name } ->
(Types.LocalVar { filename; file_content; line; char }, new_name)
in
let include_defs = true in
ServerFindRefs.go
ctx
find_refs_action
include_defs
~stream_file:None
~hints:[]
genv
env
|> ServerCommandTypes.Done_or_retry.map_env ~f:(fun refs ->
let changes =
let fold_to_positions_and_patches (positions, patches) (_, pos) =
if Pos.Set.mem pos positions then
(* Don't rename at the same position twice. Double-renames were happening (~~T157645473~~) because
* ServerRename uses ServerFindRefs which searches the tast, which thinks Self::TWhatever
* is a use of the current class at the position of the declaration of the current class.
* *)
(positions, patches)
else
let positions = Pos.Set.add pos positions in
let replacement =
{ pos = Pos.to_absolute pos; text = new_name }
in
let patch = Replace replacement in
let patches = patch :: patches in
(positions, patches)
in
refs
|> List.fold_left
~init:(Pos.Set.empty, [])
~f:fold_to_positions_and_patches
|> snd
in
let deprecated_wrapper_patch =
match action with
| FunctionRename _ ->
get_deprecated_wrapper_patch
~definition:definition_for_wrapper
~ctx
new_name
| MethodRename { class_name; old_name; _ } ->
if
method_might_support_dynamic
ctx
~class_name
~method_name:old_name
then
get_deprecated_wrapper_patch
~definition:definition_for_wrapper
~ctx
new_name
else
None
| ClassRename _
| ClassConstRename _
| LocalVarRename _ ->
None
in
Option.value_map
deprecated_wrapper_patch
~default:changes
~f:(fun patch -> patch :: changes))
let go_for_localvar ctx action new_name =
let open Result.Monad_infix in
let module Types = ServerCommandTypes.Find_refs in
match action with
| Types.LocalVar _ ->
let changes =
ServerFindRefs.go_for_localvar ctx action
>>| List.fold_left
~f:(fun acc x ->
let replacement =
{
pos = Pos.to_absolute (snd x);
text = maybe_add_dollar new_name;
}
in
let patch = Replace replacement in
patch :: acc)
~init:[]
>>| fun patches -> Some patches
in
changes
| _ -> Error action
let go_for_single_file
ctx ~find_refs_action ~new_name ~filename ~symbol_definition ~naming_table =
let action =
match find_refs_action with
| ServerCommandTypes.Find_refs.Class str ->
(* Note that [ServerRename.go] remaps ClassRename to Find_refs.ExplicitClass,
so we manually handle that here for parity.
*)
ServerCommandTypes.Find_refs.ExplicitClass str
| action -> action
in
( ServerFindRefs.go_for_single_file
~ctx
~action
~filename
~name:symbol_definition.SymbolDefinition.full_name
~naming_table
|> fun refs ->
let changes =
List.fold_left
refs
~f:
begin
fun acc x ->
let replacement =
{ pos = Pos.to_absolute (snd x); text = new_name }
in
let patch = Replace replacement in
patch :: acc
end
~init:[]
in
let should_write_deprecated_wrapper_patch =
(* Context: D46818062
When we rename symbols looking at one file at a time we always pass the [SymbolDefinition.t] along.
Generating a deprecated_wrapper_patch looks at the symbol definition pos, checks to see if there's a matching
CST node and if so, will send back a deprecated wrapper patch to write.
However, if we're renaming a symbol in a file where the (method or function) is NOT defined,
we should reject any deprecated wrapper patches. We only want a patch IFF the rename action is editing
the file matching [symbol_definition.pos.file]
*)
Relative_path.equal
filename
(Pos.filename symbol_definition.SymbolDefinition.pos)
in
let definition =
Option.some_if should_write_deprecated_wrapper_patch symbol_definition
in
let deprecated_wrapper_patch =
let open ServerCommandTypes.Find_refs in
match find_refs_action with
| Function _ -> get_deprecated_wrapper_patch ~definition ~ctx new_name
| Member (class_name, Method old_name) ->
if method_might_support_dynamic ctx ~class_name ~method_name:old_name
then
get_deprecated_wrapper_patch ~definition ~ctx new_name
else
None
| Class _
| Member _
| ExplicitClass _
| LocalVar _
| GConst _ ->
None
in
if should_write_deprecated_wrapper_patch then
Option.value_map
deprecated_wrapper_patch
~default:changes
~f:(fun patch -> patch :: changes)
else
changes )
|> fun rename_patches -> Ok rename_patches
(**
Like go_ide, but rather than looking up a symbolDefinition manually from a file and
converting a ServerRenameTypes.action to a Find_refs.action, we supply a Find_refs.action
directly.
*)
let go_ide_with_find_refs_action
ctx ~find_refs_action ~new_name ~symbol_definition genv env =
let include_defs = true in
ServerFindRefs.go
ctx
find_refs_action
include_defs
~stream_file:None
~hints:[]
genv
env
|> ServerCommandTypes.Done_or_retry.map_env ~f:(fun refs ->
let changes =
List.fold_left
refs
~f:
begin
fun acc x ->
let replacement =
{ pos = Pos.to_absolute (snd x); text = new_name }
in
let patch = Replace replacement in
patch :: acc
end
~init:[]
in
let deprecated_wrapper_patch =
let open ServerCommandTypes.Find_refs in
match find_refs_action with
| Function _ ->
get_deprecated_wrapper_patch
~definition:(Some symbol_definition)
~ctx
new_name
| Member (class_name, Method old_name) ->
if
method_might_support_dynamic
ctx
~class_name
~method_name:old_name
then
get_deprecated_wrapper_patch
~definition:(Some symbol_definition)
~ctx
new_name
else
None
| Class _
| Member _
| ExplicitClass _
| LocalVar _
| GConst _ ->
None
in
Option.value_map
deprecated_wrapper_patch
~default:changes
~f:(fun patch -> patch :: changes))
|> fun rename_patches -> Ok rename_patches |
OCaml Interface | hhvm/hphp/hack/src/server/serverRename.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val get_fixme_patches :
int list -> ServerEnv.env -> ServerRenameTypes.patch list
val get_dead_unsafe_cast_patches : ServerEnv.env -> ServerRenameTypes.patch list
val get_lambda_parameter_rewrite_patches :
Provider_context.t -> string list -> ServerRenameTypes.patch list
(** Does the rename. [definition_for_wrapper] is the definition where a deprecated-wrapper may
be generated, or None to suppress any possible generation. *)
val go :
Provider_context.t ->
ServerRenameTypes.action ->
ServerEnv.genv ->
ServerEnv.env ->
definition_for_wrapper:Relative_path.t SymbolDefinition.t option ->
ServerEnv.env
* ServerRenameTypes.patch list ServerCommandTypes.Done_or_retry.t
val go_for_single_file :
Provider_context.t ->
find_refs_action:ServerCommandTypes.Find_refs.action ->
new_name:string ->
filename:Relative_path.t ->
symbol_definition:Relative_path.t SymbolDefinition.t ->
naming_table:Naming_table.t ->
(ServerRenameTypes.patch list, 'a) result
val go_ide_with_find_refs_action :
Provider_context.t ->
find_refs_action:ServerCommandTypes.Find_refs.action ->
new_name:string ->
symbol_definition:Relative_path.t SymbolDefinition.t ->
ServerEnv.genv ->
ServerEnv.env ->
( ServerEnv.env
* ServerRenameTypes.patch list ServerCommandTypes.Done_or_retry.t,
string )
result
val go_for_localvar :
Provider_context.t ->
ServerCommandTypes.Find_refs.action ->
string ->
( ServerRenameTypes.patch list option,
ServerCommandTypes.Find_refs.action )
result |
OCaml | hhvm/hphp/hack/src/server/serverRenameTypes.ml | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type patch =
| Insert of insert_patch
| Remove of Pos.absolute
| Replace of insert_patch
and insert_patch = {
pos: Pos.absolute;
text: string;
}
type action =
| ClassRename of string * string (* old_name * new_name *)
| ClassConstRename of string * string * string
(* class_name * old_name * new_name *)
| MethodRename of {
class_name: string;
old_name: string;
new_name: string;
}
| FunctionRename of {
old_name: string;
new_name: string;
}
| LocalVarRename of {
filename: Relative_path.t;
file_content: string;
line: int;
char: int;
new_name: string;
}
[@@deriving show]
type deprecated_wrapper_function_ref =
| DeprecatedStaticMethodRef
| DeprecatedNonStaticMethodRef
| DeprecatedFunctionRef
let compare_pos pos1 pos2 =
let (char_start1, char_end1) = Pos.info_raw pos1 in
let (char_start2, char_end2) = Pos.info_raw pos2 in
if char_end1 <= char_start2 then
-1
else if char_end2 <= char_start1 then
1
else
0
let get_pos = function
| Insert patch
| Replace patch ->
patch.pos
| Remove p -> p
let compare_result res1 res2 = compare_pos (get_pos res1) (get_pos res2)
let write_string_to_file fn str =
let oc = Out_channel.create fn in
Out_channel.output_string oc str;
Out_channel.close oc
let write_patches_to_buffer buf original_content patch_list =
let i = ref 0 in
let trim_leading_whitespace = ref false in
let len = String.length original_content in
let is_whitespace c =
match c with
| '\n'
| ' '
| '\012'
| '\r'
| '\t' ->
true
| _ -> false
in
(* advances to requested character and adds the original content
from the current position to that point to the buffer *)
let add_original_content j =
while
!trim_leading_whitespace
&& !i < len
&& is_whitespace original_content.[!i]
do
i := !i + 1
done;
if j <= !i then
()
else
let size = j - !i in
let size = min (- !i + len) size in
let str_to_write = String.sub original_content ~pos:!i ~len:size in
Buffer.add_string buf str_to_write;
i := !i + size
in
List.iter patch_list ~f:(fun res ->
let pos = get_pos res in
let (char_start, char_end) = Pos.info_raw pos in
add_original_content char_start;
trim_leading_whitespace := false;
match res with
| Insert patch -> Buffer.add_string buf patch.text
| Replace patch ->
Buffer.add_string buf patch.text;
i := char_end
| Remove _ ->
i := char_end;
trim_leading_whitespace := true);
add_original_content len
let map_patches_to_filename acc res =
let pos = get_pos res in
let fn = Pos.filename pos in
match SMap.find_opt fn acc with
| Some lst -> SMap.add fn (res :: lst) acc
| None -> SMap.add fn [res] acc
let apply_patches_to_string old_content patch_list =
let buf = Buffer.create (String.length old_content) in
let patch_list = List.sort ~compare:compare_result patch_list in
write_patches_to_buffer buf old_content patch_list;
Buffer.contents buf
let apply_patches_to_file fn patch_list =
let old_content = Sys_utils.cat fn in
let new_file_contents = apply_patches_to_string old_content patch_list in
write_string_to_file fn new_file_contents
let list_to_file_map =
List.fold_left ~f:map_patches_to_filename ~init:SMap.empty
let apply_patches_to_file_contents file_contents patches =
let file_map = list_to_file_map patches in
let apply fn old_contents =
match SMap.find_opt (Relative_path.to_absolute fn) file_map with
| Some patches -> apply_patches_to_string old_contents patches
| None -> old_contents
in
Relative_path.Map.mapi ~f:apply file_contents |
OCaml | hhvm/hphp/hack/src/server/serverRevisionTracker.ml | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*)
(** Note: the tracking in this module is best effort only;
* it's not guaranteed to always reflect accurate merge base transitions:
* - in some init types, initial merge base is not known so we will only notice
* the second transition
* - to avoid blocking rest of the system, mergebase queries time out after 30
* seconds and are not retried in case of errors
* - we only record "new" mergebases as we see them, not detecting transitions
* between already visited revisions
**)
open Hh_prelude
type watchman_event = {
timestamp: float;
source: string;
is_enter: bool;
}
(** A list of all unpaired "hg.update.enter", "hg.update.leave", "hg.transaction.enter",
"hg.transaction.leave" events that we have encountered, sorted by time, most recent first.
Because they are unpaired, the list will never contain both X.enter and X.leave for a given source.
Meaning, from the perspective of consulting the list to see whether we are in a state,
or amending the list because we just received a state-transition event, is the same
1. We are "in a state" if there exist any enter events
2. A new leave will be paired to the most recent enter, if present.
3. A new enter will be paired to the most recent leave only if it is recent; otherwise we ditch the leave, and log.
4. Observe the consequence, that existence of "enter" events can only be discharged either by
a subsequent leave, or an outstanding one from the recent past.
*)
type event_list = watchman_event list
type tracker_state = {
mutable is_enabled: bool;
mutable current_mergebase: Hg.global_rev option;
mutable did_change_mergebase: bool;
(** Do we think that this server have processed a mergebase change? If we are
* in this state and get notified about changes to a huge number of files (or
* even small number of files that fan-out to a huge amount of work), we might
* decide that restarting the server is a better option than going through with
* incremental processing (See ServerLocalConfig.hg_aware_*_restart_threshold).
* It is likely to be faster because:
* - a better saved state might be available
* - even with same saved state, during init we can treat all those changes
* (if they were indeed due to non-local commits ) as prechecked (see ServerPrecheckedFiles)
* and avoid processing them.
* There is some room for false positives, when small inconsequential rebase is immediately
* followed by a big local change, but that seems unlikely to happen often compared to
* the hours we waste processing incremental rebases.
*)
pending_queries: Hg.hg_rev Queue.t;
(** Keys from mergebase_queries that contain futures that were not resolved yet *)
mergebase_queries: (Hg.hg_rev, Hg.global_rev Future.t) Caml.Hashtbl.t;
}
type state_handler = {
is_hg_updating: unit -> bool;
on_state_enter: string -> unit;
on_state_leave:
Path.t ->
(* project root *)
string ->
(* state name *)
Hh_json.json option ->
(* state metadata *)
unit;
}
type tracker_v1_event_state = {
mutable is_in_hg_update_state: bool;
mutable is_in_hg_transaction_state: bool;
}
type tracker_v2_event_state = { mutable outstanding_events: event_list }
let tracker_state =
{
is_enabled = false;
current_mergebase = None;
did_change_mergebase = false;
pending_queries = Queue.create ();
mergebase_queries = Caml.Hashtbl.create 200;
}
let initialize mergebase =
Hh_logger.log "ServerRevisionTracker: Initializing mergebase to r%d" mergebase;
tracker_state.is_enabled <- true;
tracker_state.current_mergebase <- Some mergebase
let add_query ~hg_rev root =
if Caml.Hashtbl.mem tracker_state.mergebase_queries hg_rev then
()
else (
Hh_logger.log "ServerRevisionTracker: Seen new HG revision: %s" hg_rev;
let future = Hg.get_closest_global_ancestor hg_rev (Path.to_string root) in
Caml.Hashtbl.add tracker_state.mergebase_queries hg_rev future;
Queue.enqueue tracker_state.pending_queries hg_rev
)
let v1_handler_fn () : state_handler =
let state =
{ is_in_hg_update_state = false; is_in_hg_transaction_state = false }
in
let on_state_enter state_name =
match state_name with
| "hg.update" -> state.is_in_hg_update_state <- true
| "hg.transaction" -> state.is_in_hg_transaction_state <- true
| _ -> ()
in
let on_state_leave root state_name state_metadata =
match state_name with
| "hg.update" ->
if not state.is_in_hg_update_state then
HackEventLogger.invalid_mercurial_state_transition ~state:state_name;
state.is_in_hg_update_state <- false;
Hh_logger.log "ServerRevisionTracker: leaving hg.update";
Option.Monad_infix.(
Option.iter
(state_metadata >>= Watchman_utils.rev_in_state_change)
~f:(fun hg_rev ->
match state_metadata >>= Watchman_utils.merge_in_state_change with
| Some true ->
Hh_logger.log
"ServerRevisionTracker: Ignoring merge rev %s"
hg_rev
| _ -> add_query ~hg_rev root))
| "hg.transaction" ->
if not state.is_in_hg_transaction_state then
HackEventLogger.invalid_mercurial_state_transition ~state:state_name;
state.is_in_hg_transaction_state <- false
| _ -> ()
in
let is_hg_updating () =
state.is_in_hg_transaction_state || state.is_in_hg_update_state
in
{ on_state_enter; on_state_leave; is_hg_updating }
let v2_handler_fn () =
let state = { outstanding_events = [] } in
let is_in_state (outstanding : event_list) : bool =
List.exists outstanding ~f:(fun e -> e.is_enter)
in
let transition (outstanding : event_list) (e : watchman_event) : event_list =
(* Note: we blindly trusting that e.timestamp is newer than anything in the list.
The consequence if not is mild; it just means the 10s criterion will be slightly off. *)
(* consider S301212, which manifests as the following
transaction/leave
transaction/leave
transaction/leave
transaction/enter
transaction/enter
all with the same timestamp. How do we resolve a stale leave at the bottom?
Simply throw it away if another event comes in 10+ seconds after,
*)
let outstanding =
if is_in_state outstanding then
outstanding
else
match List.filter outstanding ~f:(fun e -> not e.is_enter) with
| { timestamp; _ } :: _ when Float.(timestamp < e.timestamp -. 10.) ->
(* the most recent event (which must be a leave based on our if-branch)
is old. Throw away current state of events
*)
let telemetry =
Telemetry.create ()
|> Telemetry.string_opt ~key:"event_source" ~value:(Some e.source)
|> Telemetry.bool_ ~key:"flushed_by_new_event" ~value:true
in
HackEventLogger.server_revision_tracker_forced_reset ~telemetry;
[]
| _ -> outstanding
in
(* Strip the first pair, if there is one *)
let rec strip_first_match outstanding =
match outstanding with
(* if there is an event of matching source and opposite is_enter,
assume that's the pair to our incoming event and remove it *)
| { source; is_enter; _ } :: rest
when String.equal source e.source && Bool.(is_enter = not e.is_enter) ->
rest
| olde :: rest -> olde :: strip_first_match rest
| [] -> []
in
let new_outstanding = strip_first_match outstanding in
(* Otherwise prepend our new event
In other words, if we were able to remove a pair to the incoming event,
then the new event list is new_outstanding.
If we didn't find a pair, then the new event list is the old list + our new one
*)
if List.length new_outstanding < List.length outstanding then
new_outstanding
else
e :: outstanding
in
let on_state_enter state_name =
let event =
{ source = state_name; timestamp = Unix.gettimeofday (); is_enter = true }
in
match state_name with
| "hg.update"
| "hg.transaction" ->
state.outstanding_events <- transition state.outstanding_events event
| _ -> ()
in
let on_state_leave root state_name state_metadata =
let event =
{
source = state_name;
timestamp = Unix.gettimeofday ();
is_enter = false;
}
in
match state_name with
| "hg.update" ->
let _ =
state.outstanding_events <- transition state.outstanding_events event
in
Hh_logger.log "ServerRevisionTracker: leaving hg.update";
Option.Monad_infix.(
Option.iter
(state_metadata >>= Watchman_utils.rev_in_state_change)
~f:(fun hg_rev ->
match state_metadata >>= Watchman_utils.merge_in_state_change with
| Some true ->
Hh_logger.log
"ServerRevisionTracker: Ignoring merge rev %s"
hg_rev
| _ -> add_query ~hg_rev root))
| "hg.transaction" ->
state.outstanding_events <- transition state.outstanding_events event
| _ -> ()
in
let is_hg_updating () = is_in_state state.outstanding_events in
{ on_state_enter; on_state_leave; is_hg_updating }
let v1_handler = v1_handler_fn ()
let v2_handler = v2_handler_fn ()
let on_state_enter state_name use_tracker_v2 =
if use_tracker_v2 then
v2_handler.on_state_enter state_name
else
v1_handler.on_state_enter state_name
let on_state_leave root state_name state_metadata use_tracker_v2 =
if use_tracker_v2 then
v2_handler.on_state_leave root state_name state_metadata
else
v1_handler.on_state_leave root state_name state_metadata
let is_hg_updating use_tracker_v2 =
if use_tracker_v2 then
v2_handler.is_hg_updating ()
else
v1_handler.is_hg_updating ()
let check_query future ~timeout ~current_t =
match Future.get ~timeout future with
| Error e ->
let e = Future.error_to_string e in
HackEventLogger.check_mergebase_failed current_t e;
Hh_logger.log "ServerRevisionTracker: %s" e
| Ok new_global_rev ->
HackEventLogger.check_mergebase_success current_t;
(match tracker_state.current_mergebase with
| Some global_rev when global_rev <> new_global_rev ->
tracker_state.current_mergebase <- Some new_global_rev;
tracker_state.did_change_mergebase <- true;
HackEventLogger.set_changed_mergebase true;
Hh_logger.log
"ServerRevisionTracker: Changing mergebase from r%d to r%d"
global_rev
new_global_rev;
()
| Some _ -> ()
| None -> initialize new_global_rev)
let check_blocking () =
if Queue.is_empty tracker_state.pending_queries then
()
else
let start_t = Unix.gettimeofday () in
Hh_logger.log "Querying Mercurial for mergebase changes";
Queue.iter
~f:
begin
fun hg_rev ->
let current_t = Unix.gettimeofday () in
let elapsed_t = current_t -. start_t in
let timeout = max 0 (int_of_float (30.0 -. elapsed_t)) in
let future =
Caml.Hashtbl.find tracker_state.mergebase_queries hg_rev
in
check_query future ~timeout ~current_t
end
tracker_state.pending_queries;
Queue.clear tracker_state.pending_queries;
let (_ : float) =
Hh_logger.log_duration "Finished querying Mercurial" start_t
in
()
let rec check_non_blocking ~is_full_check_done =
if Queue.is_empty tracker_state.pending_queries then (
if is_full_check_done && tracker_state.did_change_mergebase then (
Hh_logger.log
"ServerRevisionTracker: Full check completed despite mergebase changes";
(* Clearing this flag because we somehow managed to get through this rebase,
* so no need to restart anymore *)
tracker_state.did_change_mergebase <- false;
HackEventLogger.set_changed_mergebase false
)
) else
let hg_rev = Queue.peek_exn tracker_state.pending_queries in
let future = Caml.Hashtbl.find tracker_state.mergebase_queries hg_rev in
if Future.is_ready future then (
let (_ : Hg.hg_rev) = Queue.dequeue_exn tracker_state.pending_queries in
check_query future ~timeout:30 ~current_t:(Unix.gettimeofday ());
check_non_blocking ~is_full_check_done
)
let make_decision threshold count name =
if threshold = 0 || count < threshold || not tracker_state.is_enabled then
()
else (
(* Enough files / declarations / typings have changed to possibly warrant
* a restart. Let's wait for Mercurial to decide if we want to before
* proceeding. *)
check_blocking ();
if tracker_state.did_change_mergebase then (
Hh_logger.log "Changed %d %s due to rebase. Restarting!" count name;
Exit.exit
~msg:
"Hh_server detected a large rebase. Its quickest option now is to restart."
Exit_status.Big_rebase_detected
)
)
let files_changed local_config count =
make_decision
local_config.ServerLocalConfig.hg_aware_parsing_restart_threshold
count
"files"
let decl_changed local_config count =
make_decision
local_config.ServerLocalConfig.hg_aware_redecl_restart_threshold
count
"declarations"
let typing_changed local_config count =
make_decision
local_config.ServerLocalConfig.hg_aware_recheck_restart_threshold
count
"file typings" |
OCaml Interface | hhvm/hphp/hack/src/server/serverRevisionTracker.mli | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*)
val initialize : Hg.global_rev -> unit
(* state_name -> use_tracker_v2 *)
val on_state_enter : string -> bool -> unit
val on_state_leave :
Path.t ->
(* project root *)
string ->
(* state name *)
Hh_json.json option ->
(* state metadata *)
bool ->
(* use_tracker_v2 *)
unit
val is_hg_updating : bool (* use_tracker_v2 *) -> bool
val check_blocking : unit -> unit
val check_non_blocking : is_full_check_done:bool -> unit
(* This module tracks changes to mergebase, and is also informed (by functions below)
* about the sizes of jobs that are being processed. If we are in "mergebase changed"
* state AND a big job have occurred, it can infer that this job was DUE to rebase,
* and that restarting will be faster than going forward (due to possibility of better
* saved state being available, or possibility to treat some files as prechecked
* (see ServerPrecheckedFiles.ml).
*
* If there is no better saved state available and the only reason we restart is due
* to handling of prechecked files, we could theoretically avoid the restart.
* But initialization and incremental mode are so divergent that this would require
* implementing entire thing twice (and was not done - we can only treat files
* as prechecked during init).
*
* Moreover:
*
* - attributing incremental file changes to rebases is based on timing and
* there is a risk that we would precheck a real local change
* - even if we just want to treat files as prechecked, without fully processing
* them, it can require quiet a lot of work in incrmental mode to invalidate all
* things that need to be invalidated (which are absent during init)
* *)
val files_changed : ServerLocalConfig.t -> int -> unit
val decl_changed : ServerLocalConfig.t -> int -> unit
val typing_changed : ServerLocalConfig.t -> int -> unit |
OCaml | hhvm/hphp/hack/src/server/serverRewriteLambdaParameters.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Syntax = Full_fidelity_editable_positioned_syntax
module Rewriter = Full_fidelity_rewriter.WithSyntax (Syntax)
module PositionedTree =
Full_fidelity_syntax_tree.WithSyntax (Full_fidelity_positioned_syntax)
open Syntax
let is_not_acceptable ty =
let finder =
object
inherit [_] Type_visitor.locl_type_visitor
method! on_tprim acc _ =
function
| Aast.Tnull
| Aast.Tvoid
| Aast.Tresource
| Aast.Tnoreturn ->
true
| _ -> acc
end
in
finder#on_type false ty
let print_ty ty =
if is_not_acceptable ty then
None
else
CodemodTypePrinter.print ty
let get_first_suggested_type_as_string file type_map node =
Option.Monad_infix.(
position file node >>= fun pos ->
Tast_type_collector.get_from_pos_map (Pos.to_absolute pos) type_map
>>= fun tys ->
List.find_map tys ~f:(fun (env, phase_ty) ->
match phase_ty with
| Typing_defs.LoclTy ty ->
let (env, ty) = Tast_env.simplify_unions env ty in
let (env, ty) = Tast_env.expand_type env ty in
begin
match Typing_defs.deref ty with
| (_, Typing_defs.Tnewtype ("HackSuggest", [ty], _)) ->
let (env, ty) = Tast_env.simplify_unions env ty in
let ty = Tast_env.fully_expand env ty in
begin
match print_ty ty with
| Some type_str -> Some type_str
| None ->
Hh_logger.log
"%s failed to rewrite lambda parameter %s: the suggested type %s is non-denotable"
(Pos.string (Pos.to_absolute pos))
(text node)
(Tast_env.print_ty
env
(Typing_defs.mk
(Typing_reason.Rnone, Typing_defs.get_node ty)));
None
end
| _ -> None
end
| Typing_defs.DeclTy _ -> None))
let get_patches ctx file =
let nast = Ast_provider.get_ast ~full:true ctx file in
let tast =
(* We don't need an accurate list of typing errors, so we can skip TAST
checks. *)
Typing_toplevel.nast_to_tast
~do_tast_checks:false
ctx
(Naming.program ctx nast)
in
let type_map =
Tast_type_collector.collect_types
ctx
tast.Tast_with_dynamic.under_normal_assumptions
in
let source_text = Full_fidelity_source_text.from_file file in
let positioned_tree = PositionedTree.make source_text in
let root =
Full_fidelity_editable_positioned_syntax.from_positioned_syntax
(PositionedTree.root positioned_tree)
in
let get_lambda_expression_patches node =
let get_lambda_parameter_patches node =
let patch =
Option.Monad_infix.(
match syntax node with
| Token _ ->
get_first_suggested_type_as_string file type_map node
>>= fun type_str ->
position_exclusive file node >>| fun pos ->
ServerRenameTypes.Replace
ServerRenameTypes.
{
pos = Pos.to_absolute pos;
text = Printf.sprintf "(%s %s)" type_str (text node);
}
| ListItem { list_item; _ } -> begin
match syntax list_item with
| ParameterDeclaration _ ->
get_first_suggested_type_as_string file type_map list_item
>>= fun type_str ->
position file list_item >>| fun pos ->
ServerRenameTypes.Insert
ServerRenameTypes.
{ pos = Pos.to_absolute pos; text = type_str ^ " " }
| _ -> None
end
| _ -> None)
in
Option.to_list patch
in
match syntax node with
| LambdaExpression { lambda_signature; _ } -> begin
match syntax lambda_signature with
| Token _ -> get_lambda_parameter_patches lambda_signature
| LambdaSignature { lambda_parameters; _ } ->
List.concat_map
(syntax_node_to_list lambda_parameters)
~f:get_lambda_parameter_patches
| _ -> []
end
| _ -> []
in
let (patches, _) =
Rewriter.aggregating_rewrite_post
(fun node patches ->
(get_lambda_expression_patches node @ patches, Rewriter.Result.Keep))
root
[]
in
patches |
OCaml | hhvm/hphp/hack/src/server/serverRpc.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerEnv
open ServerCommandTypes
let remove_dead_warning name =
"hh_server was started without '--no-load', which is required when removing dead "
^ name
^ "s.\n"
^ "Please run 'hh_client restart --no-load' to restart it."
let take_max_errors error_list max_errors =
match max_errors with
| Some max_errors ->
let (error_list, dropped_errors) = List.split_n error_list max_errors in
(error_list, List.length dropped_errors)
| None -> (error_list, 0)
let single_ctx env path file_input =
let contents =
match file_input with
| ServerCommandTypes.FileName path -> Sys_utils.cat path
| ServerCommandTypes.FileContent contents -> contents
in
let ctx = Provider_utils.ctx_from_server_env env in
Provider_context.add_or_overwrite_entry_contents ~ctx ~path ~contents
let single_ctx_path env path =
single_ctx
env
(Relative_path.create_detect_prefix path)
(ServerCommandTypes.FileName path)
(* Might raise {!Naming_table.File_info_not_found} *)
let handle : type a. genv -> env -> is_stale:bool -> a t -> env * a =
fun genv env ~is_stale -> function
| STATUS { max_errors; _ } ->
HackEventLogger.check_response
(Errors.get_error_list env.errorl |> List.map ~f:User_error.get_code);
let error_list = Errors.sort_and_finalize env.errorl in
let (error_list, dropped_count) = take_max_errors error_list max_errors in
let liveness =
if is_stale then
Stale_status
else
Live_status
in
let has_unsaved_changes = ServerFileSync.has_unsaved_changes env in
let last_recheck_stats =
Option.map
env.ServerEnv.last_recheck_loop_stats_for_actual_work
~f:ServerEnv.RecheckLoopStats.to_user_telemetry
in
( env,
{
Server_status.liveness;
has_unsaved_changes;
error_list;
dropped_count;
last_recheck_stats;
} )
| STATUS_SINGLE { file_names; max_errors } ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, take_max_errors (ServerStatusSingle.go file_names ctx) max_errors)
| INFER_TYPE (file_input, line, column) ->
let path =
match file_input with
| FileName fn -> Relative_path.create_detect_prefix fn
| FileContent _ -> Relative_path.create_detect_prefix ""
in
let (ctx, entry) = single_ctx env path file_input in
let result =
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
ServerInferType.go_ctx ~ctx ~entry ~line ~column)
in
(env, result)
| INFER_TYPE_BATCH positions ->
(env, ServerInferTypeBatch.go genv.workers positions env)
| IS_SUBTYPE stdin -> (env, ServerIsSubtype.check genv.workers stdin env)
| TAST_HOLES (file_input, hole_filter) ->
let path =
match file_input with
| FileName fn -> Relative_path.create_detect_prefix fn
| FileContent _ -> Relative_path.create_detect_prefix ""
in
let (ctx, entry) = single_ctx env path file_input in
let result =
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
ServerCollectTastHoles.go_ctx ~ctx ~entry ~hole_filter)
in
(env, result)
| TAST_HOLES_BATCH files ->
(env, ServerTastHolesBatch.go genv.workers files env)
| INFER_TYPE_ERROR (file_input, line, column) ->
let path =
match file_input with
| FileName fn -> Relative_path.create_detect_prefix fn
| FileContent _ -> Relative_path.create_detect_prefix ""
in
let (ctx, entry) = single_ctx env path file_input in
let result =
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
ServerInferTypeError.go_ctx ~ctx ~entry ~line ~column)
in
(env, result)
| IDE_HOVER (path, line, column) ->
let (ctx, entry) = single_ctx_path env path in
let result = ServerHover.go_quarantined ~ctx ~entry ~line ~column in
(env, result)
| DOCBLOCK_AT (path, line, column, _, kind) ->
let (ctx, entry) = single_ctx_path env path in
let r = ServerDocblockAt.go_docblock_ctx ~ctx ~entry ~line ~column ~kind in
(env, r)
| DOCBLOCK_FOR_SYMBOL (symbol, kind) ->
let ctx = Provider_utils.ctx_from_server_env env in
let r = ServerDocblockAt.go_docblock_for_symbol ~ctx ~symbol ~kind in
(env, r)
| IDE_SIGNATURE_HELP (path, line, column) ->
let (ctx, entry) = single_ctx_path env path in
(env, ServerSignatureHelp.go_quarantined ~ctx ~entry ~line ~column)
(* TODO: edit this to look for classname *)
| XHP_AUTOCOMPLETE_SNIPPET cls ->
let ctx = Provider_utils.ctx_from_server_env env in
let tast_env = Tast_env.empty ctx in
let cls = Utils.add_ns cls in
(env, AutocompleteService.get_snippet_for_xhp_classname cls ctx tast_env)
| IDENTIFY_SYMBOL arg ->
let module SO = SymbolOccurrence in
let ctx = Provider_utils.ctx_from_server_env env in
let get_def_opt type_ name =
ServerSymbolDefinition.go
ctx
None
SO.{ type_; name; is_declaration = false; pos = Pos.none }
|> Option.to_list
|> List.map ~f:SymbolDefinition.to_absolute
in
let arg = Str.split (Str.regexp "::") arg in
(* The following are all the different named entities I could think of in Hack. *)
let results =
match arg with
| [c_name; member] ->
let c_name = Utils.add_ns c_name in
List.concat
[
get_def_opt (SO.Method (SO.ClassName c_name, member)) "";
get_def_opt (SO.Property (SO.ClassName c_name, member)) "";
get_def_opt (SO.XhpLiteralAttr (c_name, member)) "";
get_def_opt (SO.ClassConst (SO.ClassName c_name, member)) "";
get_def_opt (SO.Typeconst (c_name, member)) "";
]
| [name] ->
let name = Utils.add_ns name in
List.concat
[
get_def_opt (SO.Class SO.ClassId) name;
(* SO.Record and Class find the same things *)
get_def_opt SO.Function name;
get_def_opt SO.GConst name;
]
| _ -> []
in
(env, results)
| IDENTIFY_FUNCTION (filename, file_input, line, column) ->
let (ctx, entry) =
single_ctx env (Relative_path.create_detect_prefix filename) file_input
in
let result =
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
ServerIdentifyFunction.go_quarantined_absolute
~ctx
~entry
~line
~column)
in
(env, result)
| METHOD_JUMP (class_, filter, find_children) ->
Printf.printf "%s" class_;
let ctx = Provider_utils.ctx_from_server_env env in
( env,
MethodJumps.get_inheritance
ctx
class_
~filter
~find_children
env.naming_table
genv.workers )
| METHOD_JUMP_BATCH (classes, filter) ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerMethodJumpsBatch.go ctx genv.workers classes filter)
| FIND_REFS find_refs_action ->
let ctx = Provider_utils.ctx_from_server_env env in
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
let open Done_or_retry in
let include_defs = false in
ServerFindRefs.(
go
ctx
find_refs_action
include_defs
~stream_file:None
~hints:[]
genv
env
|> map_env ~f:to_absolute))
| GO_TO_IMPL go_to_impl_action ->
Done_or_retry.(
ServerGoToImpl.go ~action:go_to_impl_action ~genv ~env
|> map_env ~f:ServerFindRefs.to_absolute)
| IDE_FIND_REFS_BY_SYMBOL
{
FindRefsWireFormat.CliArgs.symbol_name = _;
action;
stream_file;
hint_suffixes;
} ->
let hints =
List.map hint_suffixes ~f:(fun suffix -> Relative_path.from_root ~suffix)
in
let ctx = Provider_utils.ctx_from_server_env env in
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
let open Done_or_retry in
let include_defs = false in
map_env
~f:ServerFindRefs.to_absolute
(ServerFindRefs.go
ctx
action
include_defs
~stream_file
~hints
genv
env))
| IDE_GO_TO_IMPL_BY_SYMBOL
{ FindRefsWireFormat.CliArgs.symbol_name = _; action; _ } ->
let ctx = Provider_utils.ctx_from_server_env env in
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
let open Done_or_retry in
map_env
~f:ServerFindRefs.to_absolute
(ServerGoToImpl.go ~action ~genv ~env))
| RENAME rename_action ->
let ctx = Provider_utils.ctx_from_server_env env in
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
let definition_for_wrapper =
match rename_action with
| ServerRenameTypes.ClassRename _
| ServerRenameTypes.ClassConstRename _
| ServerRenameTypes.LocalVarRename _ ->
None
| ServerRenameTypes.MethodRename { class_name; old_name; _ } ->
ServerSymbolDefinition.go
ctx
None
{
SymbolOccurrence.name = "unused for lookup";
type_ =
SymbolOccurrence.Method
( SymbolOccurrence.ClassName (Utils.add_ns class_name),
old_name );
is_declaration = false;
pos = Pos.none;
}
| ServerRenameTypes.FunctionRename { old_name; _ } ->
ServerSymbolDefinition.go
ctx
None
{
SymbolOccurrence.name = Utils.add_ns old_name;
type_ = SymbolOccurrence.Function;
is_declaration = false;
pos = Pos.none;
}
in
ServerRename.go ctx rename_action genv env ~definition_for_wrapper)
| IDE_RENAME_BY_SYMBOL (action, new_name, symbol_definition) ->
let ctx = Provider_utils.ctx_from_server_env env in
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
let open Done_or_retry in
match
ServerRename.go_ide_with_find_refs_action
ctx
~find_refs_action:action
~new_name
~symbol_definition
genv
env
with
| Error e -> (env, Done (Error e))
| Ok r -> map_env r ~f:(fun x -> Ok x))
| CODEMOD_SDT codemod_line ->
let patches = Sdt_analysis.patches_of_codemod_line codemod_line in
(env, patches)
| REMOVE_DEAD_FIXMES codes ->
if genv.ServerEnv.options |> ServerArgs.no_load then (
HackEventLogger.check_response
(Errors.get_error_list env.errorl |> List.map ~f:User_error.get_code);
(env, `Ok (ServerRename.get_fixme_patches codes env))
) else
(env, `Error (remove_dead_warning "fixme"))
| REMOVE_DEAD_UNSAFE_CASTS ->
if genv.ServerEnv.options |> ServerArgs.no_load then (
HackEventLogger.check_response
(Errors.get_error_list env.errorl |> List.map ~f:User_error.get_code);
(env, `Ok (ServerRename.get_dead_unsafe_cast_patches env))
) else
(env, `Error (remove_dead_warning "unsafe cast"))
| REWRITE_LAMBDA_PARAMETERS files ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerRename.get_lambda_parameter_rewrite_patches ctx files)
| DUMP_SYMBOL_INFO file_list ->
(env, SymbolInfoService.go genv.workers file_list env)
| IN_MEMORY_DEP_TABLE_SIZE ->
(* TODO(hverr): Clean up 32-bit/migrate *)
(env, Ok 0)
| SAVE_NAMING filename ->
(env, SaveStateService.go_naming env.naming_table filename)
| SAVE_STATE (filename, gen_saved_ignore_type_errors) ->
if Errors.is_empty env.errorl || gen_saved_ignore_type_errors then
(env, SaveStateService.go env filename)
else
(env, Error "There are typecheck errors; cannot generate saved state.")
| SEARCH (query, type_) ->
let ctx = Provider_utils.ctx_from_server_env env in
let sienv_ref = ref env.ServerEnv.local_symbol_table in
let r = ServerSearch.go ctx query ~kind_filter:type_ sienv_ref in
({ env with ServerEnv.local_symbol_table = !sienv_ref }, r)
| LINT fnl ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerLint.go genv ctx fnl)
| LINT_STDIN { filename; contents } ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerLint.go_stdin ctx ~filename ~contents)
| LINT_ALL code ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerLint.lint_all genv ctx code)
| CREATE_CHECKPOINT x -> (env, ServerCheckpoint.create_checkpoint x)
| RETRIEVE_CHECKPOINT x -> (env, ServerCheckpoint.retrieve_checkpoint x)
| DELETE_CHECKPOINT x -> (env, ServerCheckpoint.delete_checkpoint x)
| STATS -> (env, Stats.get_stats ())
| FORMAT (content, from, to_) ->
let legacy_format_options =
{ Lsp.DocumentFormatting.tabSize = 2; insertSpaces = true }
in
(env, ServerFormat.go ~content from to_ legacy_format_options)
| DUMP_FULL_FIDELITY_PARSE file -> (env, FullFidelityParseService.go file)
| OPEN_FILE (path, contents) ->
let predeclare = genv.local_config.ServerLocalConfig.predeclare_ide in
(ServerFileSync.open_file ~predeclare env path contents, ())
| CLOSE_FILE path -> (ServerFileSync.close_file env path, ())
| EDIT_FILE (path, edits) ->
let predeclare = genv.local_config.ServerLocalConfig.predeclare_ide in
let edits = List.map edits ~f:Ide_api_types.ide_text_edit_to_fc in
(ServerFileSync.edit_file ~predeclare env path edits, ())
| IDE_AUTOCOMPLETE (filename, pos, is_manually_invoked) ->
let pos = pos |> Ide_api_types.ide_pos_to_fc in
let contents =
ServerFileSync.get_file_content (ServerCommandTypes.FileName filename)
in
let (ctx, entry) =
Provider_context.add_or_overwrite_entry_contents
~ctx:(Provider_utils.ctx_from_server_env env)
~path:(Relative_path.create_detect_prefix filename)
~contents
in
let sienv_ref = ref env.ServerEnv.local_symbol_table in
let results =
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
ServerAutoComplete.go_ctx
~ctx
~entry
~sienv_ref
~is_manually_invoked
~line:pos.File_content.line
~column:pos.File_content.column
~naming_table:env.naming_table)
in
let env = { env with ServerEnv.local_symbol_table = !sienv_ref } in
(env, results)
| CODE_ACTION { path; range } ->
let (ctx, entry) = single_ctx_path env path in
let actions = Server_code_actions_services.go ~ctx ~entry ~range in
(env, actions)
| CODE_ACTION_RESOLVE { path; range; resolve_title; use_snippet_edits } ->
let (ctx, entry) = single_ctx_path env path in
let action =
Server_code_actions_services.resolve
~ctx
~entry
~range
~resolve_title
~use_snippet_edits
in
(env, action)
| DISCONNECT -> (ServerFileSync.clear_sync_data env, ())
| OUTLINE path ->
( env,
ServerCommandTypes.FileName path
|> ServerFileSync.get_file_content
|> FileOutline.outline env.popt )
| IDE_IDLE -> ({ env with ide_idle = true }, ())
| RAGE -> (env, ServerRage.go genv env)
| CST_SEARCH { sort_results; input; files_to_search } -> begin
try
(env, CstSearchService.go genv env ~sort_results ~files_to_search input)
with
| MultiThreadedCall.Coalesced_failures failures ->
let failures =
failures
|> List.map ~f:WorkerController.failure_to_string
|> String.concat ~sep:"\n"
in
( env,
Error
(Printf.sprintf
"Worker failures - check the logs for more details:\n%s\n"
failures) )
| exn ->
let e = Exception.wrap exn in
(env, Error (Exception.to_string e))
end
| NO_PRECHECKED_FILES -> (ServerPrecheckedFiles.expand_all env, ())
| POPULATE_REMOTE_DECLS files ->
(env, ServerPopulateRemoteDecls.go env genv genv.workers files)
| FUN_DEPS_BATCH positions ->
(env, ServerFunDepsBatch.go genv.workers positions env)
| LIST_FILES_WITH_ERRORS -> (env, ServerEnv.list_files env)
| FILE_DEPENDENTS filenames ->
let files = ServerFileDependents.go genv env filenames in
(env, files)
| IDENTIFY_TYPES (labelled_file, line, column) ->
let (path, file_input) =
ServerCommandTypesUtils.extract_labelled_file labelled_file
in
let (ctx, entry) = single_ctx env path file_input in
let result =
ServerTypeDefinition.go_quarantined ~ctx ~entry ~line ~column
in
(env, result)
| EXTRACT_STANDALONE target ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerExtractStandalone.go ctx target)
| CONCATENATE_ALL paths -> (env, ServerConcatenateAll.go genv env paths)
| GO_TO_DEFINITION (labelled_file, line, column) ->
let (path, file_input) =
ServerCommandTypesUtils.extract_labelled_file labelled_file
in
let (ctx, entry) = single_ctx env path file_input in
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
(env, ServerGoToDefinition.go_quarantined ~ctx ~entry ~line ~column))
| PREPARE_CALL_HIERARCHY (labelled_file, line, column) ->
let (path, file_input) =
ServerCommandTypesUtils.extract_labelled_file labelled_file
in
let (ctx, entry) = single_ctx env path file_input in
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
( env,
ServerPrepareCallHierarchy.go_quarantined ~ctx ~entry ~line ~column ))
| CALL_HIERARCHY_INCOMING_CALLS call_item ->
let ctx = Provider_utils.ctx_from_server_env env in
let result =
ServerCallHierarchyIncomingCalls.go call_item ~ctx ~genv ~env
in
(env, result)
| CALL_HIERARCHY_OUTGOING_CALLS call_item ->
let ctx = Provider_utils.ctx_from_server_env env in
let result = ServerCallHierarchyOutgoingCalls.go call_item ~ctx in
(env, result)
| VERBOSE verbose ->
if verbose then
Hh_logger.Level.set_min_level Hh_logger.Level.Debug
else
Hh_logger.Level.set_min_level
genv.local_config.ServerLocalConfig.min_log_level;
(env, ())
| PAUSE pause ->
let env =
if pause then
{
env with
full_recheck_on_file_changes =
Paused { paused_recheck_id = env.init_env.recheck_id };
}
else
{ env with full_recheck_on_file_changes = Resumed }
in
(env, ())
| DEPS_OUT_BATCH positions ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerDepsOutBatch.go ctx positions)
| DEPS_IN_BATCH positions ->
let ctx = Provider_utils.ctx_from_server_env env in
(env, ServerDepsInBatch.go ~ctx ~genv ~env positions) |
OCaml | hhvm/hphp/hack/src/server/serverRxApiShared.ml | (*
* Copyright (c) 2017, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type pos = Relative_path.t * int * int
type spos = string * int * int [@@deriving eq, ord]
let pos_to_json fn line char =
Hh_json.(
JSON_Object
[
("file", JSON_String (Relative_path.to_absolute fn));
("line", int_ line);
("character", int_ char);
])
let recheck_typing ctx (pos_list : pos list) =
let files_to_check =
pos_list
|> List.map ~f:(fun (path, _, _) -> path)
|> List.remove_consecutive_duplicates ~equal:Relative_path.equal
in
List.map files_to_check ~f:(fun path ->
let (_ctx, entry) = Provider_context.add_entry_if_missing ~ctx ~path in
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_unquarantined ~ctx ~entry
in
(path, tast))
let pos_contains_line_char pos line char =
let (l, start, end_) = Pos.info_pos pos in
l = line && start <= char && char - 1 <= end_
type 'a walker = {
plus: 'a -> 'a -> 'a;
on_method: Tast_env.env -> Tast.method_ -> 'a;
on_fun_def: Tast_env.env -> Tast.fun_def -> 'a;
}
let find_in_tree (walker : 'a walker) line char =
object (self)
inherit [_] Tast_visitor.reduce
inherit [_] Visitors_runtime.option_monoid
method merge = walker.plus
method! on_method_ env m =
if pos_contains_line_char (fst m.Aast.m_name) line char then
Some (walker.on_method env m)
else
self#zero
method! on_fun_def env fd =
if pos_contains_line_char (fst fd.Aast.fd_name) line char then
Some (walker.on_fun_def env fd)
else
self#zero
end
type ('a, 'r, 's) handlers = {
result_to_string:
('r option, string) result -> Relative_path.t * int * int -> string;
walker: 'a walker;
get_state: Provider_context.t -> Relative_path.t -> 's;
map_result: Provider_context.t -> 's -> 'a -> 'r;
}
let prepare_pos_infos pos_list =
pos_list
(* Sort, so that many queries on the same file will (generally) be
* dispatched to the same worker. *)
|> List.sort ~compare:compare_spos
(* Dedup identical queries *)
|> List.remove_consecutive_duplicates ~equal:equal_spos
|> List.map ~f:(fun (path, line, char) ->
(Relative_path.create_detect_prefix path, line, char))
let helper h ctx acc pos_list =
let tasts =
List.fold
(recheck_typing ctx pos_list)
~init:Relative_path.Map.empty
~f:(fun map (key, data) -> Relative_path.Map.add map ~key ~data)
in
List.fold pos_list ~init:acc ~f:(fun acc pos ->
let (fn, line, char) = pos in
let s = h.get_state ctx fn in
let result =
Relative_path.Map.find_opt tasts fn
|> Result.of_option ~error:"No such file or directory"
|> Result.map ~f:(fun tast ->
(find_in_tree h.walker line char)#go
ctx
tast.Tast_with_dynamic.under_normal_assumptions
|> Option.map ~f:(h.map_result ctx s))
in
h.result_to_string result pos :: acc)
let parallel_helper h workers tcopt pos_list =
MultiWorker.call
workers
~job:(helper h tcopt)
~neutral:[]
~merge:List.rev_append
~next:(MultiWorker.next workers pos_list)
(* Entry Point *)
let go :
MultiWorker.worker list option ->
(string * int * int) list ->
ServerEnv.env ->
_ handlers ->
_ =
fun workers pos_list env h ->
let ctx = Provider_utils.ctx_from_server_env env in
let pos_list = prepare_pos_infos pos_list in
let results =
if List.length pos_list < 10 then
helper h ctx [] pos_list
else
parallel_helper h workers ctx pos_list
in
results |
OCaml Interface | hhvm/hphp/hack/src/server/serverRxApiShared.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type pos = Relative_path.t * int * int
val pos_to_json : Relative_path.t -> int -> int -> Hh_json.json
type 'a walker = {
plus: 'a -> 'a -> 'a;
on_method: Tast_env.env -> Tast.method_ -> 'a;
on_fun_def: Tast_env.env -> Tast.fun_def -> 'a;
}
type ('a, 'r, 's) handlers = {
result_to_string: ('r option, string) result -> pos -> string;
walker: 'a walker;
get_state: Provider_context.t -> Relative_path.t -> 's;
map_result: Provider_context.t -> 's -> 'a -> 'r;
}
val go :
MultiWorker.worker list option ->
(string * int * int) list ->
ServerEnv.env ->
('a, 'b, 'c) handlers ->
string list
(** For test: *)
val helper :
('a, 'b, 'c) handlers ->
Provider_context.t ->
string list ->
pos list ->
string list |
OCaml | hhvm/hphp/hack/src/server/serverSavedStateConfig.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude |
OCaml | hhvm/hphp/hack/src/server/serverSearch.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module SUtils = SearchUtils
let result_to_json res =
let desc_string = SearchUtils.kind_to_string res.SUtils.result_type in
let p = res.SUtils.pos in
let fn = Pos.filename p in
let (line, start, end_) = Pos.info_pos p in
Hh_json.JSON_Object
[
("name", Hh_json.JSON_String (Utils.strip_ns res.SUtils.name));
("filename", Hh_json.JSON_String fn);
("desc", Hh_json.JSON_String desc_string);
("line", Hh_json.int_ line);
("char_start", Hh_json.int_ start);
("char_end", Hh_json.int_ end_);
("scope", Hh_json.JSON_String "");
]
let re_colon_colon = Str.regexp "::"
let go
ctx query_text ~(kind_filter : string) (sienv_ref : SearchUtils.si_env ref)
: SearchUtils.result =
let max_results = 100 in
let start_time = Unix.gettimeofday () in
let kind_filter = SearchUtils.string_to_kind kind_filter in
let context = SearchTypes.Ac_workspace_symbol in
let results =
(* If query contains "::", search class methods instead of top level definitions *)
match Str.split_delim re_colon_colon query_text with
| [class_name_query; method_query] ->
(* Fixup the kind filter *)
let kind_filter = Some SearchTypes.SI_Class in
(* Get the class with the most similar name to `class_name_query` *)
let (candidates, _is_complete) =
SymbolIndex.find_matching_symbols
~query_text:class_name_query
~max_results:1
~kind_filter
~context
~sienv_ref
in
let class_ =
candidates |> List.hd |> Option.map ~f:(fun r -> r.SearchTypes.si_name)
in
begin
match class_ with
| Some name ->
ClassMethodSearch.query_class_methods
ctx
(Utils.add_ns name)
method_query
| None ->
(* When we can't find a class with a name similar to the given one,
just return no search results. *)
[]
end
| _ ->
let (temp_results, _is_complete) =
SymbolIndex.find_matching_symbols
~sienv_ref
~query_text
~max_results
~context
~kind_filter
in
AutocompleteService.add_position_to_results ctx temp_results
in
SymbolIndexCore.log_symbol_index_search
~sienv:!sienv_ref
~start_time
~query_text
~max_results
~kind_filter
~results:(List.length results)
~caller:"ServerSearch.go";
results |
OCaml | hhvm/hphp/hack/src/server/serverSignatureHelp.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Option.Monad_infix
module SourceText = Full_fidelity_source_text
module Syntax = Full_fidelity_positioned_syntax
(** Returns ((symbol_line, symbol_char), argument_idx) where:
- symbol_line: the line number of the function symbol
- symbol_char: the column number of the function symbol
- argument_idx: index of the function argument that contains the offset.
For example, given this line:
25: $myObject->foo(true, null);
offset: ^
We would return the following:
Some ((25, 16), 1)
Returns None if the given offset is not inside a function call.
*)
let get_positional_info (cst : Syntax.t) (file_offset : int) :
((int * int) * int) option =
Syntax.(
let parent_tree = parentage cst file_offset in
(* Search upwards through the parent tree.
* If we find a function call or constructor, signature help should appear.
* If we find a lambda first, don't offer help even if we are within a function call! *)
let within_lambda =
Option.value
~default:false
(List.find_map parent_tree ~f:(fun syntax ->
match syntax.syntax with
| LambdaExpression _ -> Some true
| ConstructorCall _
| FunctionCallExpression _ ->
Some false
| _ -> None))
in
if within_lambda then
None
else
parent_tree
|> List.find_map ~f:(fun syntax ->
match syntax.syntax with
| FunctionCallExpression children ->
Some
( children.function_call_receiver,
children.function_call_argument_list )
| ConstructorCall children ->
Some
( children.constructor_call_type,
children.constructor_call_argument_list )
| _ -> None)
>>= fun (callee_node, argument_list) ->
trailing_token callee_node >>= fun callee_trailing_token ->
let function_symbol_offset = Token.end_offset callee_trailing_token in
let pos =
SourceText.offset_to_position
callee_trailing_token.Token.source_text
function_symbol_offset
in
let arguments = children argument_list in
(* Add 1 to counteract the -1 in trailing_end_offset. *)
let in_args_area =
leading_start_offset argument_list <= file_offset
&& file_offset <= trailing_end_offset argument_list + 1
in
if not in_args_area then
None
else
match arguments with
| [] -> Some (pos, 0)
| arguments ->
arguments
|> List.mapi ~f:(fun idx elem -> (idx, elem))
|> List.find_map ~f:(fun (idx, child) ->
(* Don't bother range checking if we're in the final argument, since we
already checked that in in_args_area up above. *)
let matches_end =
idx = List.length arguments - 1
|| file_offset < trailing_end_offset child
in
if matches_end then
Some (pos, idx)
else
None))
let get_occurrence_info
(ctx : Provider_context.t)
(nast : Nast.program)
(occurrence : Relative_path.t SymbolOccurrence.t) =
let module SO = SymbolOccurrence in
let (ft_opt, full_occurrence) =
(* Handle static methods, instance methods, and constructors *)
match occurrence.SO.type_ with
| SO.Method (SO.ClassName classname, methodname) ->
let classname = Utils.add_ns classname in
let ft =
Decl_provider.get_class ctx classname
|> Option.bind ~f:(fun cls ->
if String.equal methodname "__construct" then
Decl_provider.Class.construct cls |> fst
else
Option.first_some
(Decl_provider.Class.get_method cls methodname)
(Decl_provider.Class.get_smethod cls methodname))
|> Option.map ~f:(fun class_elt ->
(* We'll convert class_elt to fun_decl here solely as a lazy
convenience, so that the "display" code below can display
both class_elt and fun_decl uniformally. *)
{
fe_module = None;
fe_internal = false;
Typing_defs.fe_pos = Lazy.force class_elt.Typing_defs.ce_pos;
fe_type = Lazy.force class_elt.Typing_defs.ce_type;
fe_deprecated = class_elt.Typing_defs.ce_deprecated;
fe_php_std_lib = false;
fe_support_dynamic_type =
Typing_defs.get_ce_support_dynamic_type class_elt;
fe_no_auto_dynamic = false;
fe_no_auto_likes = false;
})
in
(ft, occurrence)
| _ ->
let fun_name =
Utils.expand_namespace
(ParserOptions.auto_namespace_map (Provider_context.get_popt ctx))
occurrence.SO.name
in
let ft = Decl_provider.get_fun ctx fun_name in
let full_occurrence =
match occurrence.SO.type_ with
| SO.Function -> { occurrence with SO.name = fun_name }
| _ -> occurrence
in
(ft, full_occurrence)
in
let def_opt = ServerSymbolDefinition.go ctx (Some nast) full_occurrence in
match ft_opt with
| None -> None
| Some ft -> Some (occurrence, ft, def_opt)
let go_quarantined
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(line : int)
~(column : int) : Lsp.SignatureHelp.result =
let source_text = Ast_provider.compute_source_text ~entry in
let offset = SourceText.position_to_offset source_text (line, column) in
let cst = Ast_provider.compute_cst ~ctx ~entry in
match
get_positional_info (Provider_context.PositionedSyntaxTree.root cst) offset
with
| None -> None
| Some ((symbol_line, symbol_char), argument_idx) ->
let results =
IdentifySymbolService.go_quarantined
~ctx
~entry
~line:symbol_line
~column:symbol_char
in
let results =
List.filter results ~f:(fun r ->
match r.SymbolOccurrence.type_ with
| SymbolOccurrence.Method _
| SymbolOccurrence.Function ->
true
| _ -> false)
in
(match List.hd results with
| None -> None
| Some head_result ->
let ast =
Ast_provider.compute_ast ~popt:(Provider_context.get_popt ctx) ~entry
in
(match get_occurrence_info ctx ast head_result with
| None -> None
| Some (occurrence, fe, def_opt) ->
let open Typing_defs in
let open Lsp.SignatureHelp in
let tast_env = Tast_env.empty ctx in
let siginfo_label =
Tast_env.print_ty_with_identity
tast_env
(DeclTy fe.fe_type)
occurrence
def_opt
in
let siginfo_documentation =
let base_class_name = SymbolOccurrence.enclosing_class occurrence in
def_opt >>= fun def ->
let path = def.SymbolDefinition.pos |> Pos.filename in
let (ctx, entry) = Provider_context.add_entry_if_missing ~ctx ~path in
ServerDocblockAt.go_comments_for_symbol_ctx
~ctx
~entry
~def
~base_class_name
in
let param_docs =
match siginfo_documentation with
| Some siginfo_documentation ->
Some
(Docblock_parser.get_param_docs ~docblock:siginfo_documentation)
| None -> None
in
let ft_params =
match get_node fe.fe_type with
| Tfun ft -> ft.ft_params
| _ -> []
in
let params =
List.map ft_params ~f:(fun param ->
let parinfo_label =
match param.fp_name with
| Some s -> s
| None -> Tast_env.print_decl_ty tast_env fe.fe_type
in
let parinfo_documentation =
match param_docs with
| Some param_docs -> Map.find param_docs parinfo_label
| None -> None
in
{ parinfo_label; parinfo_documentation })
in
let signature_information =
{ siginfo_label; siginfo_documentation; parameters = params }
in
Some
{
signatures = [signature_information];
activeSignature = 0;
activeParameter = argument_idx;
})) |
OCaml Interface | hhvm/hphp/hack/src/server/serverSignatureHelp.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Returns signature help for the given location. *)
val go_quarantined :
ctx:Provider_context.t ->
entry:Provider_context.entry ->
line:int ->
column:int ->
Lsp.SignatureHelp.result |
OCaml | hhvm/hphp/hack/src/server/serverStamp.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* The contract of this stamp "/tmp/hh_server/stamp" file is:
* If someone wants to keep a cache of whether a given file typechecks
* cleanly, then they can look for changes to the file to know that
* their cache might have become invalid and should be re-checked.
*
* Only known consumer is ext_hh_client, an HHVM client which
* exposes some APIs in the namespace HH\Client for typechecking a file.
*
* Note that there's only one stamp file on a system, even though there
* might be several separate instances of hh_server.
*)
let stamp_file = Filename.concat GlobalConfig.tmp_dir "stamp"
let touch_stamp () =
Sys_utils.mkdir_no_fail (Filename.dirname stamp_file);
Sys_utils.with_umask 0o111 (fun () ->
(* Open and close the file to set its mtime. Don't use the Unix.utimes
* function since that will fail if the stamp file doesn't exist. *)
close_out (open_out stamp_file))
let touch_stamp_errors l1 l2 =
(* We don't want to needlessly touch the stamp file if the error list is
* the same and nothing has changed, but we also don't want to spend a ton
* of time comparing huge lists of errors over and over (i.e., grind to a
* halt in the cases when there are thousands of errors). So we cut off
* the comparison at an arbitrary point. *)
let rec length_greater_than n = function
| [] -> false
| _ when n = 0 -> true
| _ :: l -> length_greater_than (n - 1) l
in
if length_greater_than 5 l1 || length_greater_than 5 l2 || l1 <> l2 then
touch_stamp () |
OCaml | hhvm/hphp/hack/src/server/serverStatusSingle.ml | (*
* Copyright (c) 2018, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerCommandTypes
let go file_inputs ctx =
let errors acc file_input =
match file_input with
| FileName file_name ->
let path = Relative_path.create_detect_prefix file_name in
let (ctx, entry) = Provider_context.add_entry_if_missing ~ctx ~path in
let { Tast_provider.Compute_tast_and_errors.errors; _ } =
Tast_provider.compute_tast_and_errors_unquarantined ~ctx ~entry
in
Errors.merge errors acc
| FileContent contents ->
let (ctx, entry) =
Provider_context.add_or_overwrite_entry_contents
~ctx
~path:Relative_path.default
~contents
in
let { Tast_provider.Compute_tast_and_errors.errors; _ } =
(* Explicitly put the contents of `ctx` in a quarantine, since they
may overwrite naming table entries. *)
Provider_utils.respect_but_quarantine_unsaved_changes ~ctx ~f:(fun () ->
Tast_provider.compute_tast_and_errors_unquarantined ~ctx ~entry)
in
Errors.merge errors acc
in
List.fold ~f:errors ~init:Errors.empty file_inputs
|> Errors.get_sorted_error_list
|> List.map ~f:User_error.to_absolute |
OCaml | hhvm/hphp/hack/src/server/serverSymbolDefinition.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open IdentifySymbolService
open Option.Monad_infix
open Typing_defs
module SourceText = Full_fidelity_source_text
module Syntax = Full_fidelity_positioned_syntax
module SyntaxKind = Full_fidelity_syntax_kind
module SyntaxTree = Provider_context.PositionedSyntaxTree
module Cls = Decl_provider.Class
open Aast
(* Element type, class name, element name. Class name refers to "origin" class,
* we expect to find said element in AST/NAST of this class *)
type class_element = class_element_ * string * string
and class_element_ =
| Constructor
| Method
| Static_method
| Property
| Static_property
| Class_const
| Typeconst
let get_class_by_name ctx x =
Naming_provider.get_type_path ctx x >>= fun fn ->
Ide_parser_cache.with_ide_cache @@ fun () ->
Ast_provider.find_class_in_file ctx fn x ~full:false
let get_function_by_name ctx x =
Naming_provider.get_fun_path ctx x >>= fun fn ->
Ide_parser_cache.with_ide_cache @@ fun () ->
Ast_provider.find_fun_in_file ctx fn x ~full:false
let get_gconst_by_name ctx x =
Naming_provider.get_const_path ctx x >>= fun fn ->
Ide_parser_cache.with_ide_cache @@ fun () ->
Ast_provider.find_gconst_in_file ctx fn x ~full:false
let get_module_def_by_name ctx x =
Naming_provider.get_module_path ctx x >>= fun md ->
Ide_parser_cache.with_ide_cache @@ fun () ->
Ast_provider.find_module_in_file ctx md x ~full:false
(* Span information is stored only in parsing AST *)
let get_member_def (ctx : Provider_context.t) (x : class_element) =
let (type_, member_origin, member_name) = x in
get_class_by_name ctx member_origin >>= fun c ->
let member_origin = Utils.strip_ns member_origin in
match type_ with
| Constructor
| Method
| Static_method ->
List.find c.c_methods ~f:(fun m -> String.equal (snd m.m_name) member_name)
>>= fun m -> Some (FileOutline.summarize_method member_origin m)
| Property
| Static_property ->
let props =
c.c_vars @ List.map c.c_xhp_attrs ~f:(fun (_, var, _, _) -> var)
in
let get_prop_name { cv_id; _ } = snd cv_id in
List.find props ~f:(fun p -> String.equal (get_prop_name p) member_name)
>>= fun p -> Some (FileOutline.summarize_property member_origin p)
| Class_const ->
let (consts, abs_consts) =
List.partition_map c.c_consts ~f:(fun cc ->
match cc.cc_kind with
| CCConcrete _
| CCAbstract (Some _) ->
First cc
| CCAbstract None -> Second cc)
in
let name_matches cc = String.equal (snd cc.cc_id) member_name in
let res =
Option.first_some
(List.find consts ~f:name_matches)
(List.find abs_consts ~f:name_matches)
in
Option.map ~f:(FileOutline.summarize_class_const member_origin) res
| Typeconst ->
let tconsts = c.c_typeconsts in
List.find tconsts ~f:(fun t ->
String.equal (snd t.c_tconst_name) member_name)
>>= fun t -> Some (FileOutline.summarize_typeconst member_origin t)
let get_local_var_def ast name p =
let (line, char, _) = Pos.info_pos p in
let def = List.hd (ServerFindLocals.go_from_ast ~ast ~line ~char) in
Option.map def ~f:(FileOutline.summarize_local name)
(* summarize a class, typedef or record *)
let summarize_class_typedef ctx x =
Naming_provider.get_type_path_and_kind ctx x >>= fun (fn, ct) ->
match ct with
| Naming_types.TClass ->
Ast_provider.find_class_in_file ctx fn x ~full:false >>= fun c ->
Some (FileOutline.summarize_class c ~no_children:true)
| Naming_types.TTypedef ->
Ast_provider.find_typedef_in_file ctx fn x ~full:false >>= fun tdef ->
Some (FileOutline.summarize_typedef tdef)
let go ctx ast result =
let module SO = SymbolOccurrence in
match result.SO.type_ with
| SO.Attribute (Some { SO.class_name; method_name; is_static }) ->
let matching_method =
Decl_provider.get_overridden_method
ctx
~class_name
~method_name
~is_static
in
(match matching_method with
| Some meth -> get_member_def ctx (Method, meth.ce_origin, method_name)
| None -> None)
| SO.Attribute None -> summarize_class_typedef ctx result.SO.name
| SO.Method (SO.ClassName c_name, method_name) ->
(* Classes on typing heap have all the methods from inheritance hierarchy
* folded together, so we will correctly identify them even if method_name
* is not defined directly in class c_name *)
Decl_provider.get_class ctx c_name >>= fun class_ ->
if String.equal method_name Naming_special_names.Members.__construct then
match fst (Cls.construct class_) with
| Some m -> get_member_def ctx (Constructor, m.ce_origin, method_name)
| None ->
get_class_by_name ctx c_name >>= fun c ->
Some (FileOutline.summarize_class c ~no_children:true)
else (
match Cls.get_method class_ method_name with
| Some m -> get_member_def ctx (Method, m.ce_origin, method_name)
| None ->
Cls.get_smethod class_ method_name >>= fun m ->
get_member_def ctx (Static_method, m.ce_origin, method_name)
)
| SO.Method (SO.UnknownClass, _) -> None
| SO.Keyword _ -> None
| SO.PureFunctionContext -> None
| SO.BuiltInType _ -> None
| SO.BestEffortArgument _ -> None
| SO.Property (SO.ClassName c_name, property_name)
| SO.XhpLiteralAttr (c_name, property_name) ->
Decl_provider.get_class ctx c_name >>= fun class_ ->
let property_name = clean_member_name property_name in
begin
match Cls.get_prop class_ property_name with
| Some m -> get_member_def ctx (Property, m.ce_origin, property_name)
| None ->
Cls.get_sprop class_ ("$" ^ property_name) >>= fun m ->
get_member_def ctx (Static_property, m.ce_origin, property_name)
end
| SO.Property (SO.UnknownClass, _) -> None
| SO.ClassConst (SO.ClassName c_name, const_name) ->
Decl_provider.get_class ctx c_name >>= fun class_ ->
Cls.get_const class_ const_name >>= fun m ->
get_member_def ctx (Class_const, m.cc_origin, const_name)
| SO.ClassConst (SO.UnknownClass, _) -> None
| SO.EnumClassLabel (class_name, member_name) ->
(* An enum class is a classish with class constants. *)
Decl_provider.get_class ctx class_name >>= fun class_ ->
Cls.get_const class_ member_name >>= fun m ->
get_member_def ctx (Class_const, m.cc_origin, member_name)
| SO.Function ->
get_function_by_name ctx result.SO.name >>= fun f ->
Some (FileOutline.summarize_fun f)
| SO.GConst ->
get_gconst_by_name ctx result.SO.name >>= fun cst ->
Some (FileOutline.summarize_gconst cst)
| SO.Class _ -> summarize_class_typedef ctx result.SO.name
| SO.Typeconst (c_name, typeconst_name) ->
Decl_provider.get_class ctx c_name >>= fun class_ ->
Cls.get_typeconst class_ typeconst_name >>= fun m ->
get_member_def ctx (Typeconst, m.ttc_origin, typeconst_name)
| SO.LocalVar -> begin
match ast with
| None -> None
| Some ast -> get_local_var_def ast result.SO.name result.SO.pos
end
| SO.TypeVar ->
(match ast with
| None -> None
| Some ast -> ServerFindTypeVar.go ast result.SO.pos result.SO.name)
| SO.HhFixme -> None
| SO.Module ->
get_module_def_by_name ctx result.SO.name >>= fun md ->
Some (FileOutline.summarize_module_def md)
let get_definition_cst_node_from_pos ctx entry kind pos =
try
let source_text = Ast_provider.compute_source_text ~entry in
let tree =
if Ide_parser_cache.is_enabled () then
Ide_parser_cache.(with_ide_cache @@ fun () -> get_cst source_text)
else
Ast_provider.compute_cst ~ctx ~entry
in
let (line, start, _) = Pos.info_pos pos in
let offset = SourceText.position_to_offset source_text (line, start) in
let parents = Syntax.parentage (SyntaxTree.root tree) offset in
List.find parents ~f:(fun syntax ->
match (kind, Syntax.kind syntax) with
| (SymbolDefinition.Function, SyntaxKind.FunctionDeclaration)
| (SymbolDefinition.Class, SyntaxKind.ClassishDeclaration)
| (SymbolDefinition.Method, SyntaxKind.MethodishDeclaration)
| (SymbolDefinition.Property, SyntaxKind.PropertyDeclaration)
| (SymbolDefinition.Property, SyntaxKind.XHPClassAttribute)
| (SymbolDefinition.ClassConst, SyntaxKind.ConstDeclaration)
| (SymbolDefinition.GlobalConst, SyntaxKind.ConstDeclaration)
| (SymbolDefinition.ClassConst, SyntaxKind.EnumClassEnumerator)
| (SymbolDefinition.Enum, SyntaxKind.EnumDeclaration)
| (SymbolDefinition.Enum, SyntaxKind.EnumClassDeclaration)
| (SymbolDefinition.Interface, SyntaxKind.ClassishDeclaration)
| (SymbolDefinition.Trait, SyntaxKind.ClassishDeclaration)
| (SymbolDefinition.LocalVar, SyntaxKind.VariableExpression)
| (SymbolDefinition.Typeconst, SyntaxKind.TypeConstDeclaration)
| (SymbolDefinition.Param, SyntaxKind.ParameterDeclaration)
| (SymbolDefinition.Typedef, SyntaxKind.AliasDeclaration) ->
true
| _ -> false)
with
| _ -> None
let get_definition_cst_node_ctx
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(kind : SymbolDefinition.kind)
~(pos : 'a Pos.pos) : Full_fidelity_positioned_syntax.t option =
get_definition_cst_node_from_pos ctx entry kind pos |
OCaml Interface | hhvm/hphp/hack/src/server/serverSymbolDefinition.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val go :
Provider_context.t ->
Nast.program option ->
Relative_path.t SymbolOccurrence.t ->
Relative_path.t SymbolDefinition.t option
val get_definition_cst_node_ctx :
ctx:Provider_context.t ->
entry:Provider_context.entry ->
kind:SymbolDefinition.kind ->
pos:'a Pos.pos ->
Full_fidelity_positioned_syntax.t option
val get_class_by_name : Provider_context.t -> string -> Nast.class_ option |
OCaml | hhvm/hphp/hack/src/server/serverTastHolesBatch.ml | (*
* Copyright (c) 2017, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE fn in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let helper ctx acc path_list =
let (ctx, tasts) = ServerInferTypeBatch.get_tast_map ctx path_list in
let holes =
List.concat_map path_list ~f:(fun path ->
let tast = Relative_path.Map.find tasts path in
ServerCollectTastHoles.tast_holes
ctx
tast.Tast_with_dynamic.under_normal_assumptions
ServerCommandTypes.Tast_hole.Typing)
in
holes @ acc
let go :
MultiWorker.worker list option ->
string list ->
ServerEnv.env ->
TastHolesService.result =
fun workers file_list env ->
let file_list =
file_list
|> List.dedup_and_sort ~compare:String.compare
|> List.map ~f:Relative_path.create_detect_prefix
in
let ctx = Provider_utils.ctx_from_server_env env in
if List.length file_list < 10 then
helper ctx [] file_list
else
MultiWorker.call
workers
~job:(fun acc file -> helper ctx acc file)
~neutral:[]
~merge:List.rev_append
(* constant stack space, though Base.List will call rev_append anyway past a threshold *)
~next:(MultiWorker.next workers file_list) |
OCaml | hhvm/hphp/hack/src/server/serverTypeCheck.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open SearchServiceRunner
open ServerEnv
open Reordered_argument_collections
module SLC = ServerLocalConfig
module CheckKind = struct
type t =
| Lazy
| Full
let to_string = function
| Full -> "Full_check"
| Lazy -> "Lazy_check"
let is_full_check = function
| Full -> true
| Lazy -> false
end
module CheckStats = struct
type t = {
reparse_count: int;
total_rechecked_count: int;
time_first_result: seconds_since_epoch option;
}
(** Update field [time_first_result] if given timestamp is the
first sent result. *)
let record_result_sent_ts stats new_result_sent_ts =
{
stats with
time_first_result =
Option.first_some stats.time_first_result new_result_sent_ts;
}
end
(*****************************************************************************)
(* Debugging *)
(*****************************************************************************)
let print_defs prefix defs =
List.iter defs ~f:(fun (_, fname) -> Printf.printf " %s %s\n" prefix fname)
let print_defs_per_file_pos defs_per_file_pos =
SMap.iter defs_per_file_pos ~f:(fun x (funs, classes) ->
Printf.printf "File: %s\n" x;
print_defs "Fun" funs;
print_defs "Class" classes);
Printf.printf "\n";
Out_channel.flush stdout;
()
let print_fast defs_per_file =
SMap.iter defs_per_file ~f:(fun x (funs, classes) ->
Printf.printf "File: %s\n" x;
SSet.iter funs ~f:(Printf.printf " Fun %s\n");
SSet.iter classes ~f:(Printf.printf " Class %s\n"));
Printf.printf "\n";
Out_channel.flush stdout;
()
(*****************************************************************************)
(* We want add all the declarations that were present in a file *before* the
* current modification. The scenario:
* File foo.php was defining the class A.
* The user gets rid of class A (in foo.php)
* In general, the type-checker determines what must be re-declared or
* re-typechecked, by comparing the old and the new type-definitions.
* That's why we are adding the 'old' definitions to the file.
* In this case, the redecl phase (typing/typing_redecl_service.ml) is going
* to compare the 'old' definition of A with the new one. It will realize that
* the new one is missing, and go ahead and retype everything that depends
* on A.
* Without a call to add_old_decls, the class A wouldn't appear anywhere,
* and we wouldn't realize that we have to re-check the types that depend
* on A.
*)
(*****************************************************************************)
let add_old_decls old_naming_table defs_per_file =
Relative_path.Map.fold
defs_per_file
~f:
begin
fun filename info_names acc ->
match Naming_table.get_file_info old_naming_table filename with
| None -> acc
| Some old_info ->
let old_info_names = FileInfo.simplify old_info in
let info_names = FileInfo.merge_names old_info_names info_names in
Relative_path.Map.add acc ~key:filename ~data:info_names
end
~init:defs_per_file
(*****************************************************************************)
(* Removes the names that were defined in the files *)
(*****************************************************************************)
let remove_decls env defs_per_file_parsed =
Relative_path.Map.iter defs_per_file_parsed ~f:(fun fn _ ->
match Naming_table.get_file_info env.naming_table fn with
| None -> ()
| Some
{
FileInfo.funs;
classes;
typedefs;
consts;
modules;
file_mode = _;
comments = _;
hash = _;
} ->
(* we use [snd] to strip away positions *)
let snd (_, x, _) = x in
Naming_global.remove_decls
~backend:(Provider_backend.get ())
~funs:(List.map funs ~f:snd)
~classes:(List.map classes ~f:snd)
~typedefs:(List.map typedefs ~f:snd)
~consts:(List.map consts ~f:snd)
~modules:(List.map modules ~f:snd))
(* If the only things that would change about file analysis are positions,
* we're not going to recheck it, and positions in its error list might
* become stale. Look if any of those positions refer to files that have
* actually changed and add them to files to recheck. *)
let get_files_with_stale_errors
~(* Set of files that were reparsed (so their ASTs and positions
* in them could have changed. *)
reparsed
~(* A subset of files which errors we want to update, or None if we want
* to update entire error list. *)
filter
~(* Current global error list *)
errors
~ctx =
let fold =
match filter with
| None ->
fun init f ->
(* Looking at global files *)
Errors.fold_errors errors ~init ~f:(fun source error acc ->
f source error acc)
| Some files ->
fun init f ->
(* Looking only at subset of files *)
Relative_path.Set.fold files ~init ~f:(fun file acc ->
Errors.fold_errors_in errors ~file ~init:acc ~f:(fun error acc ->
f file error acc))
in
fold Relative_path.Set.empty (fun source error acc ->
if
List.exists (User_error.to_list_ error) ~f:(fun e ->
Relative_path.Set.mem
reparsed
(fst e |> Naming_provider.resolve_position ctx |> Pos.filename))
then
Relative_path.Set.add acc source
else
acc)
(*****************************************************************************)
(* Parses the set of modified files *)
(*****************************************************************************)
(** This pushes all [phase] errors in errors, that aren't in [files],
to the errors-file. *)
let push_errors_outside_files_to_errors_file
(errors : Errors.t) ~(files : Relative_path.Set.t) : unit =
let typing_errors_not_in_files_to_check =
errors
|> Errors.fold_errors ~drop_fixmed:true ~init:[] ~f:(fun path error acc ->
if Relative_path.Set.mem files path then
acc
else
(path, error) :: acc)
|> Errors.from_file_error_list
in
ServerProgress.ErrorsWrite.report typing_errors_not_in_files_to_check;
()
let indexing genv env to_check cgroup_steps =
let (ide_files, disk_files) =
Relative_path.Set.partition
(Relative_path.Set.mem env.editor_open_files)
to_check
in
File_provider.remove_batch disk_files;
Ast_provider.remove_batch disk_files;
Fixme_provider.remove_batch disk_files;
(* Do not remove ide files from file heap *)
Ast_provider.remove_batch ide_files;
Fixme_provider.remove_batch ide_files;
let env =
{
env with
local_symbol_table =
SymbolIndexCore.remove_files
~sienv:env.local_symbol_table
~paths:to_check;
}
in
SharedMem.GC.collect `gentle;
let get_next =
MultiWorker.next genv.workers (Relative_path.Set.elements disk_files)
in
let ctx = Provider_utils.ctx_from_server_env env in
let defs_per_file =
CgroupProfiler.step_start_end cgroup_steps "parsing" @@ fun _cgroup_step ->
Direct_decl_service.go
ctx
genv.workers
~ide_files
~get_next
~trace:true
~cache_decls:
(* Not caching here, otherwise oldification done in redo_type_decl will
oldify the new version (and override the real old versions. *)
false
in
SearchServiceRunner.update_fileinfo_map
(Naming_table.create defs_per_file)
~source:SearchUtils.TypeChecker;
(* During integration tests, we want to pretend that search is run
synchronously *)
let ctx = Provider_utils.ctx_from_server_env env in
let env =
{
env with
local_symbol_table =
(let sie = env.local_symbol_table in
if
SearchServiceRunner.should_run_completely
genv
sie.SearchUtils.sie_provider
then
SearchServiceRunner.run_completely ctx sie
else
sie);
}
in
(env, defs_per_file)
let get_interrupt_config genv env =
MultiThreadedCall.{ handlers = env.interrupt_handlers genv; env }
(*****************************************************************************)
(* Where the action is! *)
(*****************************************************************************)
module type CheckKindType = sig
(* Parsing treats files open in IDE and files coming from disk differently:
*
* - for IDE files, we need to look up their contents in the map in env,
* instead of reading from disk (duh)
* - we parse IDE files in master process (to avoid passing env to the
* workers)
*)
val get_files_to_parse : ServerEnv.env -> Relative_path.Set.t
(* Which files to typecheck, based on results of declaration phase *)
val get_defs_to_recheck :
reparsed:Relative_path.Set.t ->
defs_per_file:Naming_table.defs_per_file ->
to_recheck:Relative_path.Set.t ->
env:ServerEnv.env ->
ctx:Provider_context.t ->
enable_type_check_filter_files:bool ->
Relative_path.Set.t * Relative_path.Set.t
(* Update the global state based on resuts of parsing, naming and decl *)
val get_env_after_decl :
old_env:ServerEnv.env ->
naming_table:Naming_table.t ->
failed_naming:Relative_path.Set.t ->
ServerEnv.env
(* Update the global state based on resuts of typing *)
val get_env_after_typing :
old_env:ServerEnv.env ->
errorl:Errors.t ->
needs_recheck:Relative_path.Set.t ->
ServerEnv.env
val is_full : bool
end
module FullCheckKind : CheckKindType = struct
let get_files_to_parse env =
Relative_path.Set.(env.ide_needs_parsing |> union env.disk_needs_parsing)
let get_defs_to_recheck
~reparsed
~defs_per_file
~to_recheck
~env
~ctx
~enable_type_check_filter_files =
(* If the user has enabled a custom file filter, we want to only
* type check files that pass the filter *)
let to_recheck =
if enable_type_check_filter_files then
ServerCheckUtils.user_filter_type_check_files
~to_recheck
~reparsed
~is_ide_file:(Relative_path.Set.mem env.editor_open_files)
else
to_recheck
in
(* Besides the files that actually changed, we want to also recheck
* those that have typing errors referring to files that were
* reparsed, since positions in those errors can be now stale.
*)
let stale_errors =
get_files_with_stale_errors ~reparsed ~filter:None ~errors:env.errorl ~ctx
in
let to_recheck = Relative_path.Set.union stale_errors to_recheck in
let to_recheck = Relative_path.Set.union env.needs_recheck to_recheck in
let to_recheck =
Relative_path.Set.union
(Relative_path.Set.of_list (Relative_path.Map.keys defs_per_file))
to_recheck
in
(to_recheck, Relative_path.Set.empty)
let get_env_after_decl ~old_env ~naming_table ~failed_naming =
{
old_env with
naming_table;
failed_naming;
ide_needs_parsing = Relative_path.Set.empty;
disk_needs_parsing = Relative_path.Set.empty;
}
let get_env_after_typing ~old_env ~errorl ~needs_recheck =
let (full_check_status, remote) =
if Relative_path.Set.is_empty needs_recheck then
(Full_check_done, false)
else
(old_env.full_check_status, old_env.remote)
in
let why_needed_full_check =
match old_env.init_env.why_needed_full_check with
| Some why_needed_full_check
when not (is_full_check_done full_check_status) ->
Some why_needed_full_check
| _ -> None
in
{
old_env with
errorl;
needs_recheck;
full_check_status;
remote;
init_env = { old_env.init_env with why_needed_full_check };
}
let is_full = true
end
module LazyCheckKind : CheckKindType = struct
let get_files_to_parse env = env.ide_needs_parsing
let some_ide_diagnosed_files env =
Diagnostic_pusher.get_files_with_diagnostics env.diagnostic_pusher
|> fun l -> List.take l 10 |> Relative_path.Set.of_list
let is_ide_file env x =
Relative_path.Set.mem (some_ide_diagnosed_files env) x
|| Relative_path.Set.mem env.editor_open_files x
let get_defs_to_recheck
~reparsed
~defs_per_file
~to_recheck
~env
~ctx
~enable_type_check_filter_files =
(* If the user has enabled a custom file filter, we want to only
* type check files that pass the filter. As such, we don't want
* to add unwanted files to the "type check later"-queue *)
let to_recheck =
if enable_type_check_filter_files then
ServerCheckUtils.user_filter_type_check_files
~to_recheck
~reparsed
~is_ide_file:(is_ide_file env)
else
to_recheck
in
(* Same as FullCheckKind.get_defs_to_recheck, but we limit returned set only
* to files that are relevant to IDE *)
let stale_errors =
get_files_with_stale_errors
~ctx
~reparsed
~filter:(Some (some_ide_diagnosed_files env))
~errors:env.errorl
in
let to_recheck = Relative_path.Set.union to_recheck stale_errors in
let (to_recheck_now, to_recheck_later) =
Relative_path.Set.partition (is_ide_file env) to_recheck
in
let to_recheck_now =
Relative_path.Set.union
(Relative_path.Set.of_list (Relative_path.Map.keys defs_per_file))
to_recheck_now
in
(to_recheck_now, to_recheck_later)
let get_env_after_decl ~old_env ~naming_table ~failed_naming =
{
old_env with
naming_table;
failed_naming;
ide_needs_parsing = Relative_path.Set.empty;
}
let get_env_after_typing ~old_env ~errorl ~needs_recheck =
(* If it was started, it's still started, otherwise it needs starting *)
let full_check_status =
match old_env.full_check_status with
| Full_check_started -> Full_check_started
| _ -> Full_check_needed
in
{
old_env with
errorl;
ide_needs_parsing = Relative_path.Set.empty;
needs_recheck;
full_check_status;
}
let is_full = false
end
module Make : functor (_ : CheckKindType) -> sig
val type_check_core :
ServerEnv.genv ->
ServerEnv.env ->
float ->
check_reason:string ->
CgroupProfiler.step_group ->
ServerEnv.env
* CheckStats.t
* Telemetry.t
* MultiThreadedCall.cancel_reason option
end =
functor
(CheckKind : CheckKindType)
->
struct
let get_classes_from_old_and_new ~new_naming_table ~old_naming_table path =
let new_classes =
match Naming_table.get_file_info new_naming_table path with
| None -> SSet.empty
| Some info ->
List.fold
info.FileInfo.classes
~init:SSet.empty
~f:(fun acc (_, cid, _) -> SSet.add acc cid)
in
let old_classes =
match Naming_table.get_file_info old_naming_table path with
| None -> SSet.empty
| Some info ->
List.fold
info.FileInfo.classes
~init:SSet.empty
~f:(fun acc (_, cid, _) -> SSet.add acc cid)
in
SSet.union new_classes old_classes
type naming_result = {
failed_naming: Relative_path.Set.t;
naming_table: Naming_table.t;
telemetry: Telemetry.t;
}
(** Updates the naming_table, which is a map from filename to the names of
toplevel symbols declared in that file: at any given point in time, we want
to know what each file defines. The updated naming table is returned
in the return value.
Also runs [Naming_global.ndecl_file_and_get_conflict_files] which updates
the global reverse naming table.
The "winner" in case of duplicate definitions? All filenames involved in any
duplicate definition were stored by the caller in [env.failed_parsing], and
the caller includes then in [defs_per_file_parsed]. We iterate over them
in alphabetical order. Thus, the winner definition will be the one from the
alphabetically first file. (Within a file, classes win over types, and
after that the lexically first definition wins).
Note that on the first typecheck after a duplicate definition has been introduced,
then [env.failed_parsing] doesn't yet contain all filenames involved, so the
winner in this first typecheck is non-determnistic -- it's simply previous
definition. We only get the alphabetically first filename as winner on subsequent
typechecks. Yuck. *)
let do_naming
(env : env)
(ctx : Provider_context.t)
~(defs_per_file_parsed : FileInfo.t Relative_path.Map.t)
~(cgroup_steps : CgroupProfiler.step_group) : naming_result =
let telemetry = Telemetry.create () in
let start_t = Unix.gettimeofday () in
let count = Relative_path.Map.cardinal defs_per_file_parsed in
CgroupProfiler.step_start_end cgroup_steps "naming" @@ fun _cgroup_step ->
(* Update name->filename reverse naming table (global, mutable) *)
remove_decls env defs_per_file_parsed;
let failed_naming =
Relative_path.Map.fold
defs_per_file_parsed
~init:Relative_path.Set.empty
~f:(fun file fileinfo failed ->
let failed' =
Naming_global.ndecl_file_and_get_conflict_files ctx file fileinfo
in
Relative_path.Set.union failed' failed)
in
let t2 =
Hh_logger.log_duration "Declare_names (name->filename)" start_t
in
(* Update filename->FileInfo.t forward naming table (into this local variable) *)
let naming_table =
Naming_table.update_many env.naming_table defs_per_file_parsed
in
(* final telemetry *)
let t3 = Hh_logger.log_duration "Update_many (filename->names)" t2 in
let heap_size = SharedMem.SMTelemetry.heap_size () in
HackEventLogger.naming_end ~count start_t heap_size;
let telemetry =
telemetry
|> Telemetry.float_ ~key:"update_reverse_duration" ~value:(t2 -. start_t)
|> Telemetry.float_ ~key:"update_fwd_duration" ~value:(t3 -. t2)
|> Telemetry.int_ ~key:"end_heap_mb" ~value:heap_size
|> Telemetry.float_ ~key:"total_duration" ~value:(t3 -. start_t)
|> Telemetry.int_ ~key:"count" ~value:count
|> Telemetry.int_
~key:"failed_naming_count"
~value:(Relative_path.Set.cardinal failed_naming)
in
{ failed_naming; naming_table; telemetry }
type redecl_result = {
changed: Typing_deps.DepSet.t;
to_recheck: Relative_path.Set.t;
to_recheck_deps: Typing_deps.DepSet.t;
old_decl_missing_count: int;
}
let do_redecl
(genv : genv)
(env : env)
~(defs_per_file : FileInfo.names Relative_path.Map.t)
~(naming_table : Naming_table.t)
~(cgroup_steps : CgroupProfiler.step_group) : redecl_result =
let get_classes =
get_classes_from_old_and_new
~new_naming_table:naming_table
~old_naming_table:env.naming_table
in
let bucket_size = genv.local_config.SLC.type_decl_bucket_size in
let ctx = Provider_utils.ctx_from_server_env env in
let {
Decl_redecl_service.old_decl_missing_count;
fanout = { Fanout.changed; to_recheck = to_recheck_deps };
} =
CgroupProfiler.step_start_end cgroup_steps "redecl"
@@ fun _cgroup_step ->
Decl_redecl_service.redo_type_decl
~bucket_size
ctx
~during_init:false
genv.workers
get_classes
~previously_oldified_defs:FileInfo.empty_names
~defs:defs_per_file
in
ServerProgress.write "determining files";
let to_recheck = Naming_provider.get_files ctx to_recheck_deps in
{ changed; to_recheck; to_recheck_deps; old_decl_missing_count }
type type_checking_result = {
env: ServerEnv.env;
errors: Errors.t;
telemetry: Telemetry.t;
files_checked: Relative_path.Set.t;
full_check_done: bool;
needs_recheck: Relative_path.Set.t;
total_rechecked_count: int;
time_first_typing_error: seconds option;
cancel_reason: MultiThreadedCall.cancel_reason option;
}
let do_type_checking
(genv : genv)
(env : env)
~(errors : Errors.t)
~(files_to_check : Relative_path.Set.t)
~(lazy_check_later : Relative_path.Set.t)
~(check_reason : string)
~(cgroup_steps : CgroupProfiler.step_group)
~(files_with_naming_errors : Relative_path.Set.t) : type_checking_result
=
let telemetry = Telemetry.create () in
if Relative_path.(Set.mem files_to_check default) then
Hh_logger.log "WARNING: rechecking definition in a dummy file";
let interrupt = get_interrupt_config genv env in
let memory_cap =
genv.local_config.ServerLocalConfig.max_typechecker_worker_memory_mb
in
let longlived_workers =
genv.local_config.ServerLocalConfig.longlived_workers
in
let use_hh_distc_instead_of_hulk =
(* hh_distc and hh_server may behave inconsistently in the face of
duplicate name errors. Eventually we'll want to make duplicate
name errors a typing error and this check can go away. *)
phys_equal (Relative_path.Set.cardinal files_with_naming_errors) 0
&& genv.ServerEnv.local_config
.ServerLocalConfig.use_hh_distc_instead_of_hulk
in
let hh_distc_fanout_threshold =
Some
genv.ServerEnv.local_config
.ServerLocalConfig.hh_distc_fanout_threshold
in
let cgroup_typecheck_telemetry = ref None in
let ( errorl',
telemetry,
env,
unfinished_and_reason,
time_first_typing_error ) =
let ctx = Provider_utils.ctx_from_server_env env in
CgroupProfiler.step_start_end
cgroup_steps
~telemetry_ref:cgroup_typecheck_telemetry
"type check"
@@ fun () ->
let ( ( env,
{
Typing_check_service.errors = errorl;
telemetry;
diagnostic_pusher =
(diagnostic_pusher, time_first_typing_error);
} ),
cancelled ) =
let root = Some (ServerArgs.root genv.ServerEnv.options) in
Typing_check_service.go_with_interrupt
~diagnostic_pusher:env.ServerEnv.diagnostic_pusher
ctx
genv.workers
telemetry
(files_to_check |> Relative_path.Set.elements)
~root
~interrupt
~memory_cap
~longlived_workers
~use_hh_distc_instead_of_hulk
~hh_distc_fanout_threshold
~check_info:
(ServerCheckUtils.get_check_info
~check_reason
~log_errors:CheckKind.is_full
genv
env)
in
let env =
{
env with
diagnostic_pusher =
Option.value diagnostic_pusher ~default:env.diagnostic_pusher;
}
in
(errorl, telemetry, env, cancelled, time_first_typing_error)
in
let telemetry =
telemetry
|> Telemetry.object_opt ~key:"cgroup" ~value:!cgroup_typecheck_telemetry
|> Telemetry.object_ ~key:"gc" ~value:(Telemetry.quick_gc_stat ())
|> Telemetry.object_
~key:"proc"
~value:(ProcFS.telemetry_for_pid (Unix.getpid ()))
in
let files_checked = files_to_check in
(* Add new things that need to be rechecked *)
let needs_recheck =
Relative_path.Set.union env.needs_recheck lazy_check_later
in
(* Remove things that were cancelled from things we started rechecking... *)
let (cancelled, cancel_reason) =
match unfinished_and_reason with
| None -> ([], None)
| Some (unfinished, reason) -> (unfinished, Some reason)
in
let (files_checked, needs_recheck) =
List.fold
cancelled
~init:(files_checked, needs_recheck)
~f:(fun (files_checked, needs_recheck) path ->
( Relative_path.Set.remove files_checked path,
Relative_path.Set.add needs_recheck path ))
in
(* ... leaving only things that we actually checked, and which can be
* removed from needs_recheck *)
let needs_recheck = Relative_path.Set.diff needs_recheck files_checked in
(* Here we do errors paradigm (1) env.errorl: merge in typecheck results, to flow into [env.errorl].
As for paradigms (2) persistent-connection and (3) errors-file, they're handled
inside [Typing_check_service.go_with_interrupt] because they want to push errors
as soon as they're discovered. *)
let errors =
Errors.incremental_update
~old:errors
~new_:errorl'
~rechecked:files_checked
in
let full_check_done =
CheckKind.is_full && Relative_path.Set.is_empty needs_recheck
in
let total_rechecked_count = Relative_path.Set.cardinal files_checked in
(* TODO(ljw) I wish to prove the invariant (expressed in the type system) that
either [cancel_reason=None] or [env.disk_needs_parsing] and [env.need_recheck] are empty.
It's quite hard to reason about at the moment in the presence of lazy checks.
I'll revisit once they've been removed. *)
{
env;
errors;
telemetry;
files_checked;
full_check_done;
needs_recheck;
total_rechecked_count;
time_first_typing_error;
cancel_reason;
}
let quantile ~index ~count : Relative_path.Set.t -> Relative_path.Set.t =
fun files ->
let file_count_in_quantile = Relative_path.Set.cardinal files / count in
let (file_count_in_quantile, index) =
if Int.equal 0 file_count_in_quantile then
let count = Relative_path.Set.cardinal files in
let file_count_in_quantile = 1 in
let index =
if index >= count then
count - 1
else
index
in
(file_count_in_quantile, index)
else
(file_count_in_quantile, index)
in
(* Work with BigList-s the same way Typing_check_service does, to preserve
the same typechecking order within the quantile. *)
let files = files |> Relative_path.Set.elements |> BigList.create in
let rec pop_quantiles n files =
let (bucket, files) = BigList.split_n files file_count_in_quantile in
if n <= 0 then
bucket
else
pop_quantiles (n - 1) files
in
pop_quantiles index files |> Relative_path.Set.of_list
let type_check_core genv env start_time ~check_reason cgroup_steps =
let t = Unix.gettimeofday () in
(* `start_time` is when the recheck_loop started and includes preliminaries like
* reading about file-change notifications and communicating with client.
* We record all our telemetry uniformally with respect to this start.
* `t` is legacy, used for ad-hoc duration reporting within this function.
* For the following, env.int_env.why_needed_full_check is set to Some by
* ServerLazyInit, and we include it here, and then it's subsequently
* set to None at the end of this method by the call to [get_env_after_typing].
* Thus, if it's present here, it means the typecheck we're about to do is
* the initial one of a lazy init. *)
let telemetry =
Telemetry.create ()
|> Telemetry.object_opt
~key:"init"
~value:
(Option.map
env.ServerEnv.init_env.ServerEnv.why_needed_full_check
~f:ServerEnv.Init_telemetry.get)
in
let time_first_error = None in
let do_errors_file =
genv.local_config.ServerLocalConfig.produce_streaming_errors
&& CheckKind.is_full
in
let env =
if CheckKind.is_full then
{ env with full_check_status = Full_check_started }
else
env
in
let files_to_parse = CheckKind.get_files_to_parse env in
(* We need to do naming phase for files that failed naming in a previous cycle.
* "Failed_naming" comes from duplicate name errors; the idea is that a change
* deletes one of the duplicates, well, this change should cause us to re-parse
* and re-process the remaining duplicate so it can be properly recorded in the
* forward and reverse naming tables (even though the remaining duplicate
* didn't itself change). *)
if not (Relative_path.Set.is_empty env.failed_naming) then
Hh_logger.log
"Also reparsing these files with failed naming: %s"
(Relative_path.Set.elements env.failed_naming
|> List.map ~f:Relative_path.suffix
|> String.concat ~sep:" ");
let files_to_parse =
Relative_path.Set.union files_to_parse env.failed_naming
in
let reparse_count = Relative_path.Set.cardinal files_to_parse in
if reparse_count = 1 then
files_to_parse
|> Relative_path.Set.choose
|> Relative_path.to_absolute
|> Hh_logger.log "Processing changes to 1 file: %s"
else
Hh_logger.log "Processing changes to %d files" reparse_count;
let telemetry =
if CheckKind.is_full then (
let check_count = Relative_path.Set.cardinal env.needs_recheck in
Hh_logger.log
"Processing deferred typechecking for %d file(s)"
check_count;
telemetry |> Telemetry.int_ ~key:"check_count" ~value:check_count
) else
telemetry
in
(* PARSING ***************************************************************)
ServerProgress.write
~include_in_logs:false
"parsing %d files"
reparse_count;
let logstring = Printf.sprintf "Parsing %d files" reparse_count in
Hh_logger.log "Begin %s" logstring;
(* Parse all changed files. This clears the file contents cache prior
to parsing. *)
let telemetry =
Telemetry.duration telemetry ~key:"parse_start" ~start_time
in
let errors = env.errorl in
let (env, defs_per_file_parsed) =
indexing genv env files_to_parse cgroup_steps
in
let hs = SharedMem.SMTelemetry.heap_size () in
let telemetry =
telemetry
|> Telemetry.duration ~key:"parse_end" ~start_time
|> Telemetry.int_ ~key:"parse_end_heap_size" ~value:hs
|> Telemetry.int_ ~key:"parse_count" ~value:reparse_count
in
HackEventLogger.parsing_end_for_typecheck t hs ~parsed_count:reparse_count;
let t = Hh_logger.log_duration logstring t in
(* UPDATE NAMING TABLES **************************************************)
ServerProgress.write ~include_in_logs:false "updating naming tables";
let logstring = "updating naming tables" in
Hh_logger.log "Begin %s" logstring;
let telemetry =
Telemetry.duration telemetry ~key:"naming_start" ~start_time
in
let ctx = Provider_utils.ctx_from_server_env env in
let { failed_naming; naming_table; telemetry = naming_telemetry } =
do_naming env ctx ~defs_per_file_parsed ~cgroup_steps
(* Note: although do_naming updates global reverse-naming-table maps,
the updated forward-naming-table "naming_table" only gets assigned
into env.naming_table later on, in get_env_after_decl. *)
in
let t = Hh_logger.log_duration logstring t in
let telemetry =
telemetry
|> Telemetry.duration ~key:"naming_end" ~start_time
|> Telemetry.object_ ~key:"naming" ~value:naming_telemetry
in
(* REDECL PHASE 1 ********************************************************)
(* The things we redecl `defs_per_file` come from the current content of
files changed `defs_per_files_parsed`, plus the previous content `add_old_decls`,
plus those that had duplicate names `failed_naming` *)
ServerProgress.write "determining changes";
let deptable_unlocked =
Typing_deps.allow_dependency_table_reads env.deps_mode true
in
Hh_logger.log "(Recomputing type declarations in relation to naming)";
(* failed_naming can be a superset of keys in defs_per_file - see comment in Naming_global.ndecl_file *)
(* The term [defs_per_file] doesn't mean anything. It's just exactly the same as defs_per_file_parsed,
that is a filename->FileInfo.t map of the files we just parsed,
except it's just filename->FileInfo.names -- i.e. purely the names, without positions. *)
let defs_per_file =
Naming_table.to_defs_per_file (Naming_table.create defs_per_file_parsed)
in
let defs_per_file =
ServerCheckUtils.extend_defs_per_file
genv
defs_per_file
naming_table
failed_naming
in
let defs_per_file = add_old_decls env.naming_table defs_per_file in
let count = Relative_path.Map.cardinal defs_per_file in
let logstring = Printf.sprintf "Type declaration for %d files" count in
Hh_logger.log "Begin %s" logstring;
Hh_logger.log
"(Recomputing type declarations in changed files and determining immediate typechecking fanout)";
let telemetry =
telemetry
|> Telemetry.duration ~key:"redecl_start" ~start_time
|> Telemetry.int_ ~key:"redecl_file_count" ~value:count
in
(* Compute fanout. Here we compare the old and new versions of
the declarations defined in all changed files, and collect the set of
files which need to be re-typechecked as a consequence of those changes. *)
let { changed; to_recheck; to_recheck_deps; old_decl_missing_count } =
do_redecl genv env ~defs_per_file ~naming_table ~cgroup_steps
in
let telemetry =
telemetry
|> Telemetry.int_
~key:"old_decl_missing_count"
~value:old_decl_missing_count
in
let hs = SharedMem.SMTelemetry.heap_size () in
HackEventLogger.first_redecl_end t hs;
let t = Hh_logger.log_duration logstring t in
let telemetry =
telemetry
|> Telemetry.duration ~key:"redecl_end" ~start_time
|> Telemetry.int_ ~key:"redecl_end_heap_size" ~value:hs
in
let telemetry =
Telemetry.duration
telemetry
~key:"revtrack_decl_changed_end"
~start_time
in
ServerRevisionTracker.typing_changed
genv.local_config
(Relative_path.Set.cardinal to_recheck);
(* we use lazy here to avoid expensive string generation when logging
* is not enabled *)
Hh_logger.log_lazy ~category:"fanout_information"
@@ lazy
Hh_json.(
json_to_string
@@ JSON_Object
[
("tag", string_ "incremental_fanout");
( "hashes",
array_
string_
Typing_deps.(
List.map ~f:Dep.to_hex_string
@@ DepSet.elements to_recheck_deps) );
( "files",
array_
string_
Relative_path.(
List.map ~f:suffix @@ Set.elements to_recheck) );
]);
let env =
CheckKind.get_env_after_decl ~old_env:env ~naming_table ~failed_naming
in
(* HANDLE PRECHECKED FILES AFTER LOCAL CHANGES ***************************)
ServerProgress.write "determining trunk changes";
Hh_logger.log "Begin evaluating prechecked changes";
let telemetry =
Telemetry.duration telemetry ~key:"prechecked1_start" ~start_time
in
let (env, prechecked1_telemetry) =
ServerPrecheckedFiles.update_after_local_changes
genv
env
changed
~start_time
in
let t = Hh_logger.log_duration "Evaluating prechecked changes" t in
let telemetry =
telemetry
|> Telemetry.duration ~key:"prechecked1_end" ~start_time
|> Telemetry.object_ ~key:"prechecked1" ~value:prechecked1_telemetry
|> Telemetry.int_
~key:"prechecked1_changed"
~value:(Typing_deps.DepSet.cardinal changed)
in
let (_ : bool) =
Typing_deps.allow_dependency_table_reads env.deps_mode deptable_unlocked
in
(* Checking this before starting typechecking because we want to attribtue
* big rechecks to rebases, even when restarting is disabled *)
if
genv.local_config.ServerLocalConfig.hg_aware_recheck_restart_threshold
= 0
then
ServerRevisionTracker.check_blocking ();
let telemetry =
Telemetry.duration
telemetry
~key:"revtrack3_check_blocking_end"
~start_time
in
(* TYPE CHECKING *********************************************************)
(* The things we recheck are those from the fanout `do_redecl().fanout` plus every file
whose error reasons were in changed files `get_defs_to_recheck`. *)
let type_check_start_t = Unix.gettimeofday () in
ServerProgress.write "typechecking";
(* For a full check, typecheck everything which may be affected by the
changes. For a lazy check, typecheck only the affected files which are
open in the IDE, leaving other affected files to be lazily checked later.
In either case, don't attempt to typecheck files with parse errors. *)
let (files_to_check, lazy_check_later) =
CheckKind.get_defs_to_recheck
~reparsed:files_to_parse
~defs_per_file
~to_recheck
~env
~ctx
~enable_type_check_filter_files:
genv.ServerEnv.local_config
.ServerLocalConfig.enable_type_check_filter_files
in
ServerProgress.write
"typechecking %d files"
(Relative_path.Set.cardinal files_to_check);
let files_to_check =
match genv.local_config.ServerLocalConfig.workload_quantile with
| None -> files_to_check
| Some { ServerLocalConfig.index; count } ->
let files_to_check = quantile ~index ~count files_to_check in
Hh_logger.log
"Will typecheck %d-th %d-quantile only, containing %d files."
index
count
(Relative_path.Set.cardinal files_to_check);
files_to_check
in
(* The errors file must accumulate ALL errors. The call below to [do_type_checking ~files_to_check]
will report all errors in [files_to_check].
But there might be other errors in [env.errorl] from a previous round of typecheck,
but which aren't in the current fanout i.e. not in [files_to_check]. We must report those too.
It remains open for discussion whether the user-experience would be better to have these
not-in-fanout errors reported here before the typecheck starts, or later after the typecheck
has finished. We'll report them here for now. *)
if do_errors_file then begin
push_errors_outside_files_to_errors_file errors ~files:files_to_check
end;
(* And what about the files in [files_to_check] which we were going to typecheck but then
the typecheck got interrupted and they were returned from [do_typechecking] as [needs_recheck]?
Shouldn't we report those too into the errors-file? Well, there's no need to bother:
if there's anything in [needs_recheck] then the current errors-file will be marked as "incomplete"
and another round of ServerTypeCheck (hence another errors-file) will be created next. *)
let to_recheck_count = Relative_path.Set.cardinal files_to_check in
(* The intent of capturing the snapshot here is to increase the likelihood
of the state-on-disk being the same as what the parser saw *)
Hh_logger.log "Begin typechecking %d files." to_recheck_count;
if do_errors_file then
ServerProgress.ErrorsWrite.telemetry
(Telemetry.create ()
|> Telemetry.int_ ~key:"to_recheck_count" ~value:to_recheck_count);
ServerCheckpoint.process_updates files_to_check;
let telemetry =
Telemetry.duration telemetry ~key:"typecheck_start" ~start_time
in
(* Typecheck all of the files we determined might need rechecking as a
consequence of the changes (or, in a lazy check,
the subset of those
files which are open in an IDE buffer). *)
let {
env;
errors;
telemetry = typecheck_telemetry;
files_checked;
full_check_done = _;
needs_recheck;
total_rechecked_count;
time_first_typing_error;
cancel_reason;
} =
do_type_checking
genv
env
~errors
~files_to_check
~lazy_check_later
~check_reason
~cgroup_steps
~files_with_naming_errors:env.failed_naming
in
let time_first_error =
Option.first_some time_first_error time_first_typing_error
in
let heap_size = SharedMem.SMTelemetry.heap_size () in
ServerProgress.write "typecheck ending";
let logstring =
Printf.sprintf
"Typechecked %d files [%d errors]"
total_rechecked_count
(Errors.count errors)
in
let t = Hh_logger.log_duration logstring t in
Hh_logger.log "Total: %f\n%!" (t -. start_time);
let telemetry =
telemetry
|> Telemetry.duration ~key:"typecheck_end" ~start_time
|> Telemetry.object_ ~key:"typecheck" ~value:typecheck_telemetry
|> Telemetry.object_
~key:"hash"
~value:(ServerUtils.log_and_get_sharedmem_load_telemetry ())
|> Telemetry.int_opt
~key:"depgraph_delta_num_edges"
~value:
(Typing_deps.Telemetry.depgraph_delta_num_edges
(Provider_context.get_deps_mode ctx))
|> Telemetry.int_ ~key:"typecheck_heap_size" ~value:heap_size
|> Telemetry.int_
~key:"typecheck_to_recheck_count"
~value:to_recheck_count
|> Telemetry.int_
~key:"typecheck_total_rechecked_count"
~value:total_rechecked_count
|> Telemetry.int_
~key:"typecheck_files_checked"
~value:(Relative_path.Set.cardinal files_checked)
|> Telemetry.int_
~key:"typecheck_lazy_check_later_count"
~value:(Relative_path.Set.cardinal lazy_check_later)
|> Telemetry.int_opt
~key:"typecheck_mem_cap"
~value:
genv.local_config
.ServerLocalConfig.max_typechecker_worker_memory_mb
|> Telemetry.int_opt
~key:"typecheck_defer_decl_threshold"
~value:
genv.local_config
.ServerLocalConfig.defer_class_declaration_threshold
|> Telemetry.bool_
~key:"use_max_typechecker_worker_memory_for_decl_deferral"
~value:
genv.local_config
.ServerLocalConfig
.use_max_typechecker_worker_memory_for_decl_deferral
|> Telemetry.bool_
~key:"enable_type_check_filter_files"
~value:
genv.local_config
.ServerLocalConfig.enable_type_check_filter_files
|> Telemetry.bool_
~key:"typecheck_longlived_workers"
~value:genv.local_config.ServerLocalConfig.longlived_workers
|> Telemetry.string_opt
~key:"cancel_reason"
~value:
(Option.map cancel_reason ~f:(fun r ->
r.MultiThreadedCall.user_message))
|> Telemetry.string_opt
~key:"cancel_details"
~value:
(Option.map cancel_reason ~f:(fun r ->
r.MultiThreadedCall.log_message))
in
(* INVALIDATE FILES (EXPERIMENTAL TYPES IN CODEGEN) **********************)
ServerInvalidateUnits.go
genv
ctx
files_checked
defs_per_file_parsed
naming_table;
let telemetry =
Telemetry.duration telemetry ~key:"invalidate_end" ~start_time
in
(* WRAP-UP ***************************************************************)
let needs_recheck =
if Option.is_some genv.local_config.ServerLocalConfig.workload_quantile
then
(* If we were typechecking quantiles only, then artificially assume that everything
was typechecked. Otherwise the next recheck iteration will keep typechecking the other
quantiles. *)
Relative_path.Set.empty
else
needs_recheck
in
let env =
CheckKind.get_env_after_typing
~old_env:env
~errorl:errors
~needs_recheck
in
(* STATS LOGGING *********************************************************)
if SharedMem.SMTelemetry.hh_log_level () > 0 then begin
Measure.print_stats ();
Measure.print_distributions ()
end;
let telemetry =
if SharedMem.SMTelemetry.hh_log_level () > 0 then
Telemetry.object_
telemetry
~key:"shmem"
~value:(SharedMem.SMTelemetry.get_telemetry ())
else
telemetry
in
let telemetry =
telemetry
|> Telemetry.object_
~key:"errors"
~value:(Errors.as_telemetry env.errorl)
|> Telemetry.object_
~key:"repo_states"
~value:(Watchman.RepoStates.get_as_telemetry ())
in
(* HANDLE PRECHECKED FILES AFTER RECHECK *********************************)
let telemetry =
Telemetry.duration telemetry ~key:"prechecked2_start" ~start_time
in
let deptable_unlocked =
Typing_deps.allow_dependency_table_reads env.deps_mode true
in
let (env, prechecked2_telemetry) =
ServerPrecheckedFiles.update_after_recheck
genv
env
files_checked
~start_time
in
let (_ : bool) =
Typing_deps.allow_dependency_table_reads env.deps_mode deptable_unlocked
in
let telemetry =
telemetry
|> Telemetry.duration ~key:"prechecked2_end" ~start_time
|> Telemetry.object_ ~key:"prechecked2" ~value:prechecked2_telemetry
in
(* We might have completed a full check, which might mean that a rebase was
* successfully processed. *)
ServerRevisionTracker.check_non_blocking
~is_full_check_done:ServerEnv.(is_full_check_done env.full_check_status);
let telemetry =
Telemetry.duration
telemetry
~key:"revtrack4_check_non_blocking_end"
~start_time
in
let telemetry =
Telemetry.duration telemetry ~key:"stop_typing_service" ~start_time
in
(* CAUTION! Lots of alerts/dashboards depend on this event, particularly start_t *)
let should_log =
CheckKind.is_full || Float.(Unix.gettimeofday () -. start_time > 2.)
in
HackEventLogger.type_check_end
(Option.some_if should_log telemetry)
~heap_size
~started_count:to_recheck_count
~total_rechecked_count
~experiments:genv.local_config.ServerLocalConfig.experiments
~desc:"serverTypeCheck"
~start_t:type_check_start_t;
( env,
{
CheckStats.reparse_count;
total_rechecked_count;
time_first_result = time_first_error;
},
telemetry,
cancel_reason )
end
module FC = Make (FullCheckKind)
module LC = Make (LazyCheckKind)
let type_check_unsafe genv env kind start_time profiling =
let check_kind = CheckKind.to_string kind in
let check_reason =
match (kind, env.ServerEnv.init_env.ServerEnv.why_needed_full_check) with
| (CheckKind.Lazy, _) -> "keystroke"
| (CheckKind.Full, Some init_telemetry) ->
ServerEnv.Init_telemetry.get_reason init_telemetry
| (CheckKind.Full, None) -> "incremental"
in
let telemetry =
Telemetry.create ()
|> Telemetry.string_ ~key:"kind" ~value:check_kind
|> Telemetry.duration ~key:"start" ~start_time
in
(* CAUTION! Lots of alerts/dashboards depend on the exact string of check_kind and check_reason *)
HackEventLogger.with_check_kind ~check_kind ~check_reason @@ fun () ->
Hh_logger.log "******************************************";
match kind with
| CheckKind.Lazy ->
Hh_logger.log
"Check kind: will check only those files already open in IDE or with reported errors ('%s')"
check_kind;
let (_ : seconds option) =
ServerBusyStatus.send ServerCommandTypes.Doing_local_typecheck
in
let telemetry =
Telemetry.duration telemetry ~key:"core_start" ~start_time
in
let (env, stats, core_telemetry, cancel_reason) =
LC.type_check_core genv env start_time ~check_reason profiling
in
let telemetry =
telemetry
|> Telemetry.duration ~key:"core_end" ~start_time
|> Telemetry.object_ ~key:"core" ~value:core_telemetry
in
let t_sent_done =
ServerBusyStatus.send ServerCommandTypes.Done_local_typecheck
in
let stats = CheckStats.record_result_sent_ts stats t_sent_done in
let telemetry = Telemetry.duration telemetry ~key:"sent_done" ~start_time in
(env, stats, telemetry, cancel_reason)
| CheckKind.Full ->
Hh_logger.log
"Check kind: will bring hh_server to consistency with code changes, by checking whatever fanout is needed ('%s')"
check_kind;
let (_ : seconds option) =
ServerBusyStatus.send
(ServerCommandTypes.Doing_global_typecheck
(ServerCheckUtils.global_typecheck_kind genv env))
in
let telemetry =
Telemetry.duration telemetry ~key:"core_start" ~start_time
in
let (env, stats, core_telemetry, cancel_reason) =
FC.type_check_core genv env start_time ~check_reason profiling
in
let telemetry =
telemetry
|> Telemetry.duration ~key:"core_end" ~start_time
|> Telemetry.object_ ~key:"core" ~value:core_telemetry
in
let t_sent_done =
if is_full_check_done env.full_check_status then
ServerBusyStatus.send ServerCommandTypes.Done_global_typecheck
else
None
in
let stats = CheckStats.record_result_sent_ts stats t_sent_done in
let telemetry =
telemetry |> Telemetry.duration ~key:"sent_done" ~start_time
in
(env, stats, telemetry, cancel_reason)
let type_check :
genv ->
env ->
CheckKind.t ->
seconds ->
CgroupProfiler.step_group ->
env * CheckStats.t * Telemetry.t =
fun genv env kind start_time cgroup_steps ->
ServerUtils.with_exit_on_exception @@ fun () ->
(*
(1) THE ENV MODEL FOR ERRORS...
env.{errorl, needs_recheck, disk_needs_parsing} are all persistent values that
might be adjusted as we go:
* disk_needs_parsing gets initialized in serverLazyInit, augmented in serverMain both
at the start of the loop and during watchman interrupts, and in serverTypeCheck it
gets reset to empty once we have computed files-to-parse and decls-to-refresh from it.
(files-to-recheck is computed from these two).
* needs_recheck gets augmented in serverTypeCheck from fanout/stale computation,
and gets discharged by the files we end up typechecking
* errorl starts out empty and it grows+shrinks during serverTypeCheck
through calls to "errorl = Errors.incremental_update ~errorl ~new_errors ~phase ~files_examined".
This will shrink those errors that had been in errorl before, and were in files_examined,
but are not in new_errors. It will replace others. It will grow others.
And it will leave remaining in errorl anything that was not touched by files_examined
or which came from a different phase.
To stress, say you have 10k errors and make a small change in one file,
it is very possible that those 10k other files are not checked and the bulk of errorl
just continues through a recheck loop. (However, we do gather every single file
mentioned in any of the *reasons* of those 10k files, and where those reasons intersect
with changed-files then that causes need to redecl and compute fanout, and also need
to recheck.)
(2) THE DIAGNOSTICS_PUSHER MODEL FOR ERRORS...
This is used for the persistent connection. It maintains its belief of what the persistent
client already knows, and pushes deltas. The less said about it, the better.
(3) THE STREAMING MODEL FOR ERRORS...
The errors-file grows monotonically: it has no "backsies", no way to remove an error
from it, short of deciding that the current errors-file is wrong and a new error-file must
be restarted.
* Hh_server, upon startup, must eventually produce an errors-file.
* Right here at the start of the type check, we restart the errors-file (because of the
potential that in the current typecheck we discover that some files no longer have errors).
* The typecheck fulfills the contract that every single error must be reported to the errors-file.
Not just newly discovered errors. Every error, even those from files that do not get rechecked.
* Right here at the end of the type check, if the type check was complete then we need to report
this fact right away in the errors-file so that the client "hh check" can finish "No errors!".
But if it was not complete (e.g. it got interrupted by watchman) then there is no need to finish
for the sake of the client: there will be an immediate next round of
ServerMain.recheck_until_no_changes_left, and it will call us again, and the errors-file
will be restarted on that next round, and the act of restarting will close the current errors-file.
How do we guarantee that hh_server produces an errors-file upon startup?
It boils down to ServerInitCommon.defer_or_do_type_check, called as the final step of both
full init and saved-state init. Its design is to defer an initial typecheck
to the first round of [ServerTypeCheck.type_check] (i.e. us!) which it causes
to happen after its (synchronous, non-interruptible) init has finished. It does
this so that clients will be able to connect as soon as init has finished, and
observe/interrupt the deferred typecheck. It does this by setting
[env.full_check_status=Full_check_started], so that when ServerMain first enters
its main loop and calls [serve_one_iteration] for the first time, it will believe
that a full check is needed and hence call [ServerTypeCheck.type_check]. No matter
if we did a perfect saved-state-load so that [env.disk_needs_parsing] is empty,
no matter if we did a full init and [env.needs_recheck] contains every file in the
project, no matter what init path, the start of ServerMain main loop will always
start by calling [ServerTypeCheck.type_check]. And it's at this moment, right here,
that we'll lay down the first errors file.
*)
let ignore_hh_version = ServerArgs.ignore_hh_version genv.ServerEnv.options in
(* Restart the errors-file at the start of type_check. *)
if CheckKind.is_full_check kind then
ServerProgress.ErrorsWrite.new_empty_file
~ignore_hh_version
~clock:env.clock
~cancel_reason:env.why_needs_server_type_check;
(* This is the main typecheck function. Its contract is to
(1) tweak env.errorl as needed based on what was rechecked , (2) write every single error to errors-file. *)
let (env, stats, telemetry, cancel_reason) =
type_check_unsafe genv env kind start_time cgroup_steps
in
(* If the typecheck completed, them mark the errors-file as complete.
A "completed" typecheck means (1) all the [env.needs_recheck] files
were indeed typechecked, i.e. not interrupted and cancelled by an
interrupt handler like watchman; (2) watchman interrupt didn't
insert any [env.disk_needs_parsing] files.
Because we mark the errors-file as complete, anyone tailing it will
know that they can finish their tailing.
For incomplete typechecks, we don't do anything here. Necessarily ServerMain
will do another round of [ServerTypeCheck.type_check] (i.e. us) shortly,
and then next round will call [ServerProgress.ErrorsWrite.new_empty_file]
which will put a "restarted" sentinel at the end of the current file as
well as starting a new file. Indeed it's *better* to place the "restarted"
sentinel at that future time rather than now, because it'll have a more
up-to-date watchclock at that time. *)
let is_complete =
Relative_path.Set.is_empty env.needs_recheck
&& Relative_path.Set.is_empty env.disk_needs_parsing
in
if CheckKind.is_full_check kind && is_complete then
ServerProgress.ErrorsWrite.complete telemetry;
(* If this was a full check, store in [env] whether+why it got interrupted+cancelled. *)
let env =
if CheckKind.is_full_check kind then
match cancel_reason with
| Some { MultiThreadedCall.user_message; log_message; timestamp = _ } ->
{
env with
ServerEnv.why_needs_server_type_check = (user_message, log_message);
}
| None when not is_complete ->
(* The typecheck wasn't interrupted, but there are still items to check.
This is a weird situation, and one that hopefully won't exist.
Once lazy checks have been eliminated, we'll revisit the TODO
on this subject at the end of [do_type_checking], and see if we can
eliminate this path throgh the typesystem.
Until that time, what now should we put as the value for [env.why_needs_server_type_check]?
Well, the existing value in [env] said why it needed a type check earlier, and it still needs
that same type check to be completed, so it is a plausible answer! *)
env
| None ->
{
env with
ServerEnv.why_needs_server_type_check = ("Type check is complete", "");
}
else
env
in
(env, stats, telemetry) |
OCaml Interface | hhvm/hphp/hack/src/server/serverTypeCheck.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module CheckKind : sig
type t =
| Lazy
(** Lazy check is a check limited to the files open in IDE. It:
- produces push diagnostics for those files
- updates their parsing / naming / decl definitions on heap
- updates their parsing level indexes, like SymbolIndex or
ServerEnv.naming_table
- invalidates their declaration dependencies, by removing them from the
heap and depending on lazy declaration to redeclare them on
as-needed basis later
- stores the information about what it skipped doing to be finished later
by Full
It does not do the "full" expensive fanout:
- does not re-declare dependencies ("phase 2 decl")
- does not fan out to all typing dependencies
- because of that, it does not update structures depending on global state,
like global error list, dependency table or the lists of files that
failed parsing / declaration / checking
Any operation that need the global state to be up to date and cannot get
the data that they need through lazy decl, need to be preceded by
Full. *)
| Full
(** Full check brings the global state of the server to consistency by
executing all the re-checks that lazy checks delayed. It processes the
disk updates and typechecks the full fanout of accumulated changes. *)
(** This function is used to get the variant constructor names of
the check kind type. The names are used in a few places:
- the `type_check_unsafe` function below:
- logs the names into the server log
- uses HackEventLogger to log the names as the check_kind column value
- lots of dashboards depend on it
- serverMain writes it into telemetry
*)
val to_string : t -> string
val is_full_check : t -> bool
end
module CheckStats : sig
type t = {
reparse_count: int;
total_rechecked_count: int;
time_first_result: ServerEnv.seconds_since_epoch option;
(** This is either the duration to get the first error if any
or until we get "typecheck done" status message. *)
}
end
val type_check :
ServerEnv.genv ->
ServerEnv.env ->
CheckKind.t ->
float ->
CgroupProfiler.step_group ->
ServerEnv.env * CheckStats.t * Telemetry.t
(****************************************************************************)
(* Debugging: Declared here to stop ocamlc yelling at us for unused defs *)
(****************************************************************************)
val print_defs : string -> ('a * string) list -> unit
val print_defs_per_file_pos :
(('a * string) list * ('b * string) list) SMap.t -> unit
val print_fast : (SSet.t * SSet.t) SMap.t -> unit |
OCaml | hhvm/hphp/hack/src/server/serverTypeDefinition.ml | (**
* Copyright (c) 2019, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open Typing_defs
let go_common
(ctx : Provider_context.t)
(tast : Tast.program Tast_with_dynamic.t)
~(line : int)
~(column : int) : ServerCommandTypes.Go_to_type_definition.result =
let env_and_ty =
ServerInferType.human_friendly_type_at_pos
~under_dynamic:false
ctx
tast
line
column
in
match env_and_ty with
| None -> []
| Some (env, ty) ->
let rec handle_type acc ty =
match get_node ty with
| Tclass ((_, str), _, _) -> begin
match Naming_global.GEnv.type_pos ctx str with
| None -> acc
| Some pos -> (pos, str) :: acc
end
| Toption ty' -> handle_type acc ty'
| Tunion ty_lst ->
List.fold ty_lst ~init:acc ~f:(fun a y -> handle_type a y)
| Tfun fn_type ->
let ret_type = fn_type.ft_ret.et_type in
begin
match get_node ret_type with
| Tprim _ ->
(* default to function definition *)
( Naming_provider.resolve_position ctx @@ get_pos ty,
Tast_env.print_ty env ty )
:: acc
| _ -> handle_type acc ret_type
end
| _ -> acc
in
List.map (handle_type [] ty) ~f:(fun (pos, s) -> (Pos.to_absolute pos, s))
(* For serverless ide *)
let go_quarantined
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(line : int)
~(column : int) : ServerCommandTypes.Go_to_type_definition.result =
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
go_common ctx tast ~line ~column |
OCaml Interface | hhvm/hphp/hack/src/server/serverTypeDefinition.mli | (**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val go_quarantined :
ctx:Provider_context.t ->
entry:Provider_context.entry ->
line:int ->
column:int ->
ServerCommandTypes.Go_to_type_definition.result |
OCaml | hhvm/hphp/hack/src/server/serverTypeHierarchy.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerTypeHierarchyTypes
open Typing_defs
let get_snippet ctx name ty =
let env = Typing_env_types.empty ctx Relative_path.default ~droot:None in
let (_, ty) = Typing_phase.localize_no_subst env ~ignore_errors:true ty in
let tast_env = Tast_env.empty ctx in
match get_node ty with
| Tfun ft ->
let params = ft.ft_params in
let n = List.length params in
let is_variadic = get_ft_variadic ft in
let param_snippet =
params
|> List.mapi ~f:(fun i param ->
let name =
match param.fp_name with
| Some pn -> pn
| None -> ""
in
let prefix =
if is_variadic && i + 1 = n then
"..."
else
""
in
let ty = Tast_env.print_ty tast_env param.fp_type.et_type in
Printf.sprintf "%s %s%s" ty prefix name)
|> String.concat ~sep:", "
in
let ret_type = Tast_env.print_ty tast_env ft.ft_ret.et_type in
Printf.sprintf "%s(%s): %s" name param_snippet ret_type
| _ -> Printf.sprintf "%s: %s" name (Tast_env.print_ty tast_env ty)
let get_members ctx class_ : memberEntry list =
let class_etl_to_member_entry kind (name, member) : memberEntry =
let snippet =
get_snippet ctx name (Lazy.force member.Typing_defs.ce_type)
in
let pos =
Lazy.force member.Typing_defs.ce_pos
|> Naming_provider.resolve_position ctx
|> Pos.to_absolute
in
let origin = Utils.strip_ns member.Typing_defs.ce_origin in
{ name; kind; snippet; pos; origin }
in
let class_const_to_member_entry
((name, const) : string * Typing_defs.class_const) : memberEntry =
let snippet = get_snippet ctx name const.Typing_defs.cc_type in
let pos =
const.Typing_defs.cc_pos
|> Naming_provider.resolve_position ctx
|> Pos.to_absolute
in
let origin = Utils.strip_ns const.Typing_defs.cc_origin in
{ name; kind = ServerTypeHierarchyTypes.Const; snippet; pos; origin }
in
(Decl_provider.Class.methods class_
|> List.map ~f:(class_etl_to_member_entry ServerTypeHierarchyTypes.Method))
@ (Decl_provider.Class.smethods class_
|> List.map ~f:(class_etl_to_member_entry ServerTypeHierarchyTypes.SMethod)
)
@ (Decl_provider.Class.props class_
|> List.map ~f:(class_etl_to_member_entry ServerTypeHierarchyTypes.Property)
)
@ (Decl_provider.Class.sprops class_
|> List.map
~f:(class_etl_to_member_entry ServerTypeHierarchyTypes.SProperty))
@ (Decl_provider.Class.consts class_
|> List.map ~f:class_const_to_member_entry)
let classish_kind_to_entryKind (kind : Ast_defs.classish_kind) : entryKind =
let open Ast_defs in
match kind with
| Cclass _ -> Class
| Cenum_class _ -> Enum
| Cinterface -> Interface
| Ctrait -> Trait
| Cenum -> Enum
let get_ancestor_entry ctx name : ancestorEntry =
let class_ = Decl_provider.get_class ctx name in
match class_ with
| None -> AncestorName (Utils.strip_ns name)
| Some class_ ->
AncestorDetails
{
name = Utils.strip_ns name;
kind = classish_kind_to_entryKind (Decl_provider.Class.kind class_);
pos =
Decl_provider.Class.pos class_
|> Naming_provider.resolve_position ctx
|> Pos.to_absolute;
}
let decl_to_hierarchy ctx class_ : hierarchyEntry =
let name = Utils.strip_ns (Decl_provider.Class.name class_) in
let kind = classish_kind_to_entryKind (Decl_provider.Class.kind class_) in
let pos =
Decl_provider.Class.pos class_
|> Naming_provider.resolve_position ctx
|> Pos.to_absolute
in
let members = get_members ctx class_ in
let ancestors =
Decl_provider.Class.all_ancestor_names class_
|> List.map ~f:(get_ancestor_entry ctx)
|> List.filter ~f:(function
| AncestorDetails e ->
(not (phys_equal e.kind Interface)) || phys_equal kind Interface
| _ -> false)
in
{ name; kind; pos; ancestors; members }
let go_quarantined
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(line : int)
~(column : int) : ServerTypeHierarchyTypes.result =
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
let symbols =
IdentifySymbolService.go_quarantined ~ctx ~entry ~line ~column
in
let identity =
List.find symbols ~f:(fun v ->
match v.SymbolOccurrence.type_ with
| SymbolOccurrence.Class _ -> true
| _ -> false)
in
let env_and_ty =
ServerInferType.human_friendly_type_at_pos
~under_dynamic:false
ctx
tast
line
column
in
match (identity, env_and_ty) with
| (None, None) -> None
| (Some sym, _) ->
(* found a named entity, a class name *)
let class_ = Decl_provider.get_class ctx sym.SymbolOccurrence.name in
Option.map class_ ~f:(fun class_ -> decl_to_hierarchy ctx class_)
| (_, Some (_env, ty)) ->
(* type of an expression, look to see if we have a class to show here *)
(match get_node ty with
| Tclass ((_, c_name), _, _) ->
let class_ = Decl_provider.get_class ctx c_name in
Option.map class_ ~f:(fun class_ -> decl_to_hierarchy ctx class_)
| _ -> None)
let json_of_member_entry (entry : memberEntry) =
Hh_json.JSON_Object
[
("name", Hh_json.string_ entry.name);
("snippet", Hh_json.string_ entry.snippet);
( "kind",
Hh_json.string_ (ServerTypeHierarchyTypes.show_memberKind entry.kind) );
("pos", Hh_json.string_ (Pos.string_no_file entry.pos));
("origin", Hh_json.string_ entry.origin);
]
let json_of_ancestor_entry (entry : ancestorEntry) =
match entry with
| AncestorName name -> Hh_json.string_ name
| AncestorDetails entry ->
Hh_json.JSON_Object
[
("name", Hh_json.string_ entry.name);
("kind", Hh_json.string_ (show_entryKind entry.kind));
("pos", Hh_json.string_ (Pos.string_no_file entry.pos));
]
let json_of_hierarchy_entry (entry : hierarchyEntry) =
Hh_json.JSON_Object
[
("name", Hh_json.string_ entry.name);
("kind", Hh_json.string_ (show_entryKind entry.kind));
("pos", Hh_json.string_ (Pos.string_no_file entry.pos));
("ancestors", Hh_json.array_ json_of_ancestor_entry entry.ancestors);
("members", Hh_json.array_ json_of_member_entry entry.members);
]
let json_of_results ~(results : result) =
match results with
| None -> Hh_json.JSON_Object []
| Some entry -> json_of_hierarchy_entry entry |
OCaml | hhvm/hphp/hack/src/server/serverTypeHierarchyTypes.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type memberKind =
| Method [@value 1]
| SMethod [@value 2]
| Property [@value 3]
| SProperty [@value 4]
| Const [@value 5]
[@@deriving show]
type memberEntry = {
name: string;
snippet: string;
kind: memberKind;
pos: Pos.absolute;
origin: string;
}
type entryKind =
| Class [@value 1]
| Interface [@value 2]
| Enum [@value 3]
| Trait [@value 4]
[@@deriving show]
type ancestorEntry =
| AncestorName of string
| AncestorDetails of {
name: string;
kind: entryKind;
pos: Pos.absolute;
}
type hierarchyEntry = {
name: string;
kind: entryKind;
pos: Pos.absolute;
ancestors: ancestorEntry list;
members: memberEntry list;
}
type result = hierarchyEntry option |
OCaml | hhvm/hphp/hack/src/server/serverUtils.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module SN = Naming_special_names
type 'env handle_command_result =
| Done of 'env
| Needs_full_recheck of {
env: 'env;
finish_command_handling: 'env -> 'env;
reason: string;
}
| Needs_writes of {
env: 'env;
finish_command_handling: 'env -> 'env;
recheck_restart_is_needed: bool;
reason: string;
}
let wrap ~try_ f env = try_ env (fun () -> f env)
let wrap ~try_ = function
| Done env -> Done env
| Needs_full_recheck cont ->
Needs_full_recheck
{
cont with
finish_command_handling = wrap ~try_ cont.finish_command_handling;
}
| Needs_writes cont ->
Needs_writes
{
cont with
finish_command_handling = wrap ~try_ cont.finish_command_handling;
}
let shutdown_client (_ic, oc) =
let cli = Unix.descr_of_out_channel oc in
try
Unix.shutdown cli Unix.SHUTDOWN_ALL;
Out_channel.close oc
with
| _ -> ()
let log_and_get_sharedmem_load_telemetry () : Telemetry.t =
let telemetry = Telemetry.create () in
let unwrap (result : (Telemetry.t, Telemetry.t) result) : Telemetry.t =
match result with
| Ok telemetry -> telemetry
| Error telemetry -> telemetry
in
Utils.try_with_stack SharedMem.SMTelemetry.hash_stats
|> Result.map_error ~f:(fun e ->
Hh_logger.exception_ e;
Telemetry.string_
telemetry
~key:"hashtable_stats_error"
~value:(Exception.get_ctor_string e))
|> Result.map
~f:(fun { SharedMem.SMTelemetry.used_slots; slots; nonempty_slots } ->
let load_factor = float_of_int used_slots /. float_of_int slots in
Hh_logger.log
"Hashtable load factor: %d / %d (%.02f) with %d nonempty slots"
used_slots
slots
load_factor
nonempty_slots;
Telemetry.float_
telemetry
~key:"hashtable_load_factor"
~value:load_factor)
|> unwrap
let exit_on_exception (e : Exception.t) =
match Exception.to_exn e with
| SharedMem.Out_of_shared_memory ->
ignore (log_and_get_sharedmem_load_telemetry () : Telemetry.t);
Printf.eprintf "Error: failed to allocate in the shared heap.\n%!";
Exit.exit Exit_status.Out_of_shared_memory
| SharedMem.Hash_table_full ->
ignore (log_and_get_sharedmem_load_telemetry () : Telemetry.t);
Printf.eprintf "Error: failed to allocate in the shared hashtable.\n%!";
Exit.exit Exit_status.Hash_table_full
| Watchman.Watchman_error s ->
Hh_logger.exception_ e;
Hh_logger.log "Exiting. Failed due to watchman error: %s" s;
Exit.exit Exit_status.Watchman_failed
| MultiThreadedCall.Coalesced_failures failures ->
Hh_logger.exception_ e;
let failure_msg = MultiThreadedCall.coalesced_failures_to_string failures in
Hh_logger.log "%s" failure_msg;
let is_oom_failure f =
match f with
| WorkerController.Worker_oomed -> true
| _ -> false
in
let has_oom_failure = List.exists ~f:is_oom_failure failures in
if has_oom_failure then
let () = Hh_logger.log "Worker oomed. Exiting" in
Exit.exit Exit_status.Worker_oomed
else
(* We attempt to exit with the same code as a worker by folding over
* all the failures and looking for a WEXITED. *)
let worker_exit f =
match f with
| WorkerController.Worker_quit (Unix.WEXITED i) -> Some i
| _ -> None
in
let exit_code =
List.fold_left
~f:(fun acc f ->
if Option.is_some acc then
acc
else
worker_exit f)
~init:None
failures
in
(match exit_code with
| Some i ->
(* Exit with same code. *)
exit i
| None -> failwith failure_msg)
(* In single-threaded mode, WorkerController exceptions are raised directly
* instead of being grouped into MultiThreaadedCall.Coalesced_failures *)
| WorkerController.(Worker_failed (_, Worker_oomed)) ->
Hh_logger.exception_ e;
Exit.exit Exit_status.Worker_oomed
| WorkerController.Worker_busy ->
Hh_logger.exception_ e;
Exit.exit Exit_status.Worker_busy
| WorkerController.(Worker_failed (_, Worker_quit (Unix.WEXITED i))) ->
Hh_logger.exception_ e;
(* Exit with the same exit code that that worker used. *)
exit i
| WorkerController.Worker_failed_to_send_job _ ->
Hh_logger.exception_ e;
Exit.exit Exit_status.Worker_failed_to_send_job
| File_provider.File_provider_stale ->
Exit.exit Exit_status.File_provider_stale
| Decl_class.Decl_heap_elems_bug _ ->
Exit.exit Exit_status.Decl_heap_elems_bug
| Decl_defs.Decl_not_found _ -> Exit.exit Exit_status.Decl_not_found
| SharedMem.C_assertion_failure _ ->
Hh_logger.exception_ e;
Exit.exit Exit_status.Shared_mem_assertion_failure
| SharedMem.Sql_assertion_failure err_num ->
Hh_logger.exception_ e;
let exit_code =
match err_num with
| 11 -> Exit_status.Sql_corrupt
| 14 -> Exit_status.Sql_cantopen
| 21 -> Exit_status.Sql_misuse
| _ -> Exit_status.Sql_assertion_failure
in
Exit.exit exit_code
| Exit_status.Exit_with ec -> Exit.exit ec
| _ ->
Hh_logger.exception_ e;
Exit.exit (Exit_status.Uncaught_exception e)
let with_exit_on_exception f =
try f () with
| exn ->
let e = Exception.wrap exn in
exit_on_exception e
let make_next
?(hhi_filter = FindUtils.is_hack)
~(indexer : unit -> string list)
~(extra_roots : Path.t list) : Relative_path.t list Bucket.next =
let next_files_root =
Utils.compose (List.map ~f:Relative_path.(create Root)) indexer
in
let hhi_root = Hhi.get_hhi_root () in
let next_files_hhi =
Utils.compose
(List.map ~f:Relative_path.(create Hhi))
(Find.make_next_files ~name:"hhi" ~filter:hhi_filter hhi_root)
in
let rec concat_next_files l () =
match l with
| [] -> []
| hd :: tl -> begin
match hd () with
| [] -> concat_next_files tl ()
| x -> x
end
in
let next_files_extra =
List.map
~f:(fun root ->
Utils.compose
(List.map ~f:Relative_path.create_detect_prefix)
(Find.make_next_files ~filter:FindUtils.file_filter root))
extra_roots
|> concat_next_files
in
fun () ->
let next =
concat_next_files [next_files_hhi; next_files_extra; next_files_root] ()
in
Bucket.of_list next
(* During naming, we desugar:
*
* invariant(foo(), "oh dear");
*
* To:
*
* if (!foo()) {
* invariant_violation("oh dear");
* }
*
* If [cond] and [then_body] look like desugared syntax, return the
* equivalent expression that calls invariant().
*)
let resugar_invariant_call env (cond : Tast.expr) (then_body : Tast.block) :
Tast.expr option =
(* If a user has actually written
*
* if (!foo()) {
* invariant_violation("oh dear");
* }
*
* then the position of the if statement and the call will be different. If
* the positions are the same, we know that we desugared a call to invariant().
*)
let has_same_start pos1 pos2 =
let pos1_start = Pos.start_offset pos1 in
let pos2_start = Pos.start_offset pos2 in
pos1_start = pos2_start
in
match (cond, then_body) with
| ( (_, _, Aast.Unop (Ast_defs.Unot, invariant_cond)),
[
( stmt_pos,
Aast.Expr
( call_ty,
call_pos,
Aast.(
Call
{ func = (recv_ty, recv_pos, Id (name_pos, name)); args; _ })
) );
] )
when String.equal name SN.AutoimportedFunctions.invariant_violation
&& has_same_start stmt_pos call_pos ->
let recv_ty_invariant =
match
Decl_provider.get_fun
(Tast_env.get_ctx env)
SN.AutoimportedFunctions.invariant
with
| Some fun_decl ->
let (_, f_locl_ty) =
Tast_env.localize_no_subst
env
~ignore_errors:true
fun_decl.Typing_defs.fe_type
in
f_locl_ty
| None -> recv_ty
in
Some
( call_ty,
call_pos,
Aast.(
Call
{
func =
( recv_ty_invariant,
recv_pos,
Id (name_pos, SN.AutoimportedFunctions.invariant) );
targs = [];
args = (Ast_defs.Pnormal, invariant_cond) :: args;
unpacked_arg = None;
}) )
| _ -> None |
OCaml Interface | hhvm/hphp/hack/src/server/serverUtils.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** The result of handling a command message from a client. *)
type 'env handle_command_result =
| Done of 'env
(** The command was fully executed, and this is the new environment. *)
| Needs_full_recheck of {
env: 'env;
finish_command_handling: 'env -> 'env;
(** This continuation needs to be run after finished
full check to complete the handling of the command. *)
reason: string;
(** Reason why this command needs full recheck (for logging/debugging purposes) *)
}
(** The command needs a full recheck to complete before it can finish being executed. *)
| Needs_writes of {
env: 'env;
finish_command_handling: 'env -> 'env;
recheck_restart_is_needed: bool;
(** Whether current recheck should be automatically
restarted after applying the writes. *)
reason: string;
(** Reason why this command needs writes (for logging/debugging purposes). *)
}
(** The command wants to modify global state, by modifying file contents. *)
(** Wrap all the continuations inside result in provided try function *)
val wrap :
try_:('env -> (unit -> 'env) -> 'env) ->
'env handle_command_result ->
'env handle_command_result
(** Shutdown given sockets. *)
val shutdown_client : 'in_channel * out_channel -> unit
val log_and_get_sharedmem_load_telemetry : unit -> Telemetry.t
(** Exit with exit code corresponding to given exception.
Perform any necessary cleanups. *)
val exit_on_exception : Exception.t -> 'result
(** Execute given function. If function raises and exception,
exit with exit code corresponding to given exception.
Perform any necessary cleanups. *)
val with_exit_on_exception : (unit -> 'result) -> 'result
(** Return all the files that we need to typecheck *)
val make_next :
?hhi_filter:(string -> bool) ->
indexer:(unit -> string list) ->
extra_roots:Path.t list ->
Relative_path.t list Bucket.next
(** If the components of this if statement look like they were
desugared from an invariant() call, return the equivalent
invariant() expression. *)
val resugar_invariant_call :
Tast_env.env -> Tast.expr -> Tast.block -> Tast.expr option |
OCaml | hhvm/hphp/hack/src/server/serverWorker.ml | (*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
(* As for [Daemon.register_entry_point], this should stay
at toplevel, in order to be executed before
[Daemon.check_entry_point]. *)
let entry =
WorkerControllerEntryPoint.register ~restore:ServerGlobalState.restore
(** We use the call_wrapper to classify some exceptions in all calls in the
* same way. *)
let catch_and_classify_exceptions : 'x 'b. ('x -> 'b) -> 'x -> 'b =
fun f x ->
try f x with
| Decl_class.Decl_heap_elems_bug _ ->
Exit.exit Exit_status.Decl_heap_elems_bug
| File_provider.File_provider_stale ->
Exit.exit Exit_status.File_provider_stale
| Decl_defs.Decl_not_found x ->
Hh_logger.log "Decl_not_found %s" x;
Exit.exit Exit_status.Decl_not_found
| Not_found_s _
| Caml.Not_found ->
Exit.exit Exit_status.Worker_not_found_exception
let make ~longlived_workers ~nbr_procs gc_control heap_handle ~logging_init =
MultiWorker.make
~call_wrapper:{ WorkerController.wrap = catch_and_classify_exceptions }
~saved_state:(ServerGlobalState.save ~logging_init)
~entry
~longlived_workers
nbr_procs
~gc_control
~heap_handle |
OCaml Interface | hhvm/hphp/hack/src/server/serverWorker.mli | (*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
val make :
longlived_workers:bool ->
nbr_procs:int ->
Gc.control ->
SharedMem.handle ->
logging_init:(unit -> unit) ->
MultiWorker.worker list |
OCaml | hhvm/hphp/hack/src/server/symbolFunCallService.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module SN = Naming_special_names
open ServerCommandTypes.Symbol_info_service
module Result_set = Caml.Set.Make (struct
type t = ServerCommandTypes.Symbol_info_service.symbol_fun_call
let compare a b =
(* Descending order, since SymbolInfoService.format_result uses rev_append
and will reverse our sorted result list. *)
let r = Pos.compare_pos String.compare b.pos a.pos in
if r <> 0 then
r
else
let r = String.compare b.name a.name in
if r <> 0 then
r
else
let r = compare_target_type b.type_ a.type_ in
if r <> 0 then
r
else
String.compare b.caller a.caller
end)
let combine_name cur_class cur_caller =
match (cur_class, cur_caller) with
| (_, None) -> "" (* Top-level function call *)
| (None, Some f) -> f
| (Some c, Some f) -> c ^ "::" ^ f
let is_pseudofunction name =
List.mem ~equal:String.equal SN.PseudoFunctions.[isset; unset] name
class visitor =
object (self)
inherit [_] Tast_visitor.reduce as super
method zero = Result_set.empty
method plus = Result_set.union
val mutable cur_caller = None
method fun_call env target_type name pos =
if is_pseudofunction name then
self#zero
else
let name = Utils.strip_ns name in
if String.equal name SN.SpecialFunctions.echo then
self#zero
else
let cur_class =
Tast_env.get_self_id env |> Option.map ~f:Utils.strip_ns
in
Result_set.singleton
{
name;
type_ = target_type;
pos = Pos.to_relative_string pos;
caller = combine_name cur_class cur_caller;
}
method method_call env target_type class_name method_id =
let (pos, method_name) = method_id in
let method_fullname = combine_name (Some class_name) (Some method_name) in
self#fun_call env target_type method_fullname pos
method! on_fun_def env fd =
let name = snd fd.Aast.fd_name in
cur_caller <- Some (Utils.strip_ns name);
let acc = super#on_fun_def env fd in
cur_caller <- None;
acc
method! on_method_ env m =
cur_caller <- Some (snd m.Aast.m_name);
let acc = super#on_method_ env m in
cur_caller <- None;
acc
method! on_expr env ((ty, pos, expr_) as expr) =
let acc =
match expr_ with
| Aast.New _ ->
let mid = (pos, SN.Members.__construct) in
Tast_env.get_class_ids env ty
|> List.map ~f:(fun cid -> self#method_call env Constructor cid mid)
|> List.fold ~init:self#zero ~f:self#plus
| Aast.Method_caller ((_, cid), mid) ->
self#method_call env Method cid mid
| _ -> self#zero
in
let special_fun_acc =
let special_fun id = self#fun_call env Function id pos in
let module SF = SN.AutoimportedFunctions in
match expr_ with
| Aast.Method_caller _ -> special_fun SF.meth_caller
| _ -> self#zero
in
let ( + ) = self#plus in
special_fun_acc + acc + super#on_expr env expr
method! on_Call env call =
let Aast.{ func = (_, _, expr_); _ } = call in
let acc =
match expr_ with
| Aast.Id (pos, name) -> self#fun_call env Function name pos
| Aast.Class_const ((ty, _, _), mid)
| Aast.Obj_get ((ty, _, _), (_, _, Aast.Id mid), _, _) ->
let target_type =
if String.equal (snd mid) SN.Members.__construct then
Constructor
else
Method
in
Tast_env.get_class_ids env ty
|> List.map ~f:(fun cid -> self#method_call env target_type cid mid)
|> List.fold ~init:self#zero ~f:self#plus
| _ -> self#zero
in
self#plus acc (super#on_Call env call)
end
let find_fun_calls ctx tasts =
List.concat_map tasts ~f:(fun x ->
(new visitor)#go ctx x |> Result_set.elements) |
OCaml | hhvm/hphp/hack/src/server/symbolInfoService.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open SymbolInfoServiceUtils
(* This module dumps all the symbol info(like fun-calls) in input files *)
let parallel_helper workers filename_l tcopt =
MultiWorker.call
workers
~job:(helper tcopt)
~neutral:[]
~merge:List.rev_append
~next:(MultiWorker.next workers filename_l)
(* Entry Point *)
let go workers file_list env =
let filename_l =
file_list
|> List.filter ~f:FindUtils.file_filter
|> List.map ~f:(Relative_path.create Relative_path.Root)
in
let ctx = Provider_utils.ctx_from_server_env env in
let raw_result =
if List.length filename_l < 10 then
helper ctx [] filename_l
else
parallel_helper workers filename_l ctx
in
format_result raw_result |
OCaml | hhvm/hphp/hack/src/server/symbolInfoServiceUtils.ml | (*
* Copyright (c) 2021, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerCommandTypes.Symbol_info_service
let recheck_naming ctx filename_l =
List.iter filename_l ~f:(fun file ->
Errors.ignore_ (fun () ->
(* We only need to name to find references to locals *)
List.iter (Ast_provider.get_ast ctx file ~full:false) ~f:(function
| Aast.Fun f ->
let _ = Naming.fun_def ctx f in
()
| Aast.Class c ->
let _ = Naming.class_ ctx c in
()
| _ -> ())))
let helper ctx acc filename_l =
let filename_l = List.rev filename_l in
recheck_naming ctx filename_l;
let tasts =
List.map filename_l ~f:(fun path ->
let (ctx, entry) = Provider_context.add_entry_if_missing ~ctx ~path in
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_unquarantined ~ctx ~entry
in
tast.Tast_with_dynamic.under_normal_assumptions)
in
let fun_calls = SymbolFunCallService.find_fun_calls ctx tasts in
let symbol_types = SymbolTypeService.generate_types ctx tasts in
(fun_calls, symbol_types) :: acc
(* Format result from '(fun_calls * symbol_types) list' raw result into *)
(* 'fun_calls list, symbol_types list' and store in SymbolInfoService.result *)
let format_result raw_result =
let result_list =
List.fold_left
raw_result
~f:
begin
fun acc bucket ->
let (result1, result2) = acc in
let (part1, part2) = bucket in
(List.rev_append part1 result1, List.rev_append part2 result2)
end
~init:([], [])
in
{ fun_calls = fst result_list; symbol_types = snd result_list } |
OCaml | hhvm/hphp/hack/src/server/symbolTypeService.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Set = Stdlib.Set
open Aast
open ServerCommandTypes.Symbol_type
module Result_set = Set.Make (struct
type t = ServerCommandTypes.Symbol_type.t
let compare a b = Pos.compare_pos String.compare a.pos b.pos
end)
let visitor =
object (self)
inherit [_] Tast_visitor.reduce as super
method zero = Result_set.empty
method plus = Result_set.union
method! on_expr env ((ty, pos, expr_) as expr) =
let acc =
match expr_ with
| Lvar (_, id)
| Dollardollar (_, id) ->
Result_set.singleton
{
pos = Pos.to_relative_string pos;
type_ = Tast_env.print_ty env ty;
ident_ = Local_id.to_int id;
}
| _ -> self#zero
in
self#plus acc @@ super#on_expr env expr
method! on_fun_param env param =
let acc =
let ty = param.param_annotation in
Result_set.singleton
{
pos = Pos.to_relative_string param.param_pos;
type_ = Tast_env.print_ty env ty;
ident_ = Local_id.to_int (Local_id.make_unscoped param.param_name);
}
in
self#plus acc @@ super#on_fun_param env param
end
let generate_types ctx tasts =
tasts
|> List.map ~f:(visitor#go ctx)
|> List.fold ~init:Result_set.empty ~f:Result_set.union
|> Result_set.elements |
OCaml | hhvm/hphp/hack/src/server/tastHolesService.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module X = struct
type t = {
actual_ty_string: string;
actual_ty_json: string;
expected_ty_string: string;
expected_ty_json: string;
pos: Pos.t;
}
[@@deriving ord]
end
include X
module Set = Set.Make (X)
type result = t list |
OCaml Interface | hhvm/hphp/hack/src/server/tastHolesService.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type t = {
actual_ty_string: string;
actual_ty_json: string;
expected_ty_string: string;
expected_ty_json: string;
pos: Pos.t;
}
type result = t list
val compare : t -> t -> int
module Set : Set.S with type elt := t |
OCaml | hhvm/hphp/hack/src/server/testClientProvider.ml | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
open ServerCommandTypes
module type RefsType = sig
val clear : unit -> unit
val set_new_client_type : connection_type option -> unit
val set_client_request : 'a ServerCommandTypes.t option -> unit
val set_client_response : 'a option -> unit
val set_unclean_disconnect : bool -> unit
val set_persistent_client_request : 'a ServerCommandTypes.t option -> unit
val set_persistent_client_response : 'a option -> unit
val push_message : ServerCommandTypes.push -> unit
val get_new_client_type : unit -> connection_type option
val get_client_request : unit -> 'a ServerCommandTypes.t option
val get_client_response : unit -> 'a option
val get_unclean_disconnect : unit -> bool
val get_persistent_client_request : unit -> 'b
val get_persistent_client_response : unit -> 'a option
val get_push_messages : unit -> ServerCommandTypes.push list
end
module Refs : RefsType = struct
let new_client_type = ref None
(* Those references are used for mocking the results of Marshal.from_channel
* function, which is untypeable. Hence, Obj.magic *)
let client_request = Obj.magic (ref None)
let client_response = Obj.magic (ref None)
let unclean_disconnect = ref false
let persistent_client_request = Obj.magic (ref None)
let persistent_client_response = Obj.magic (ref None)
let push_messages : ServerCommandTypes.push list ref = ref []
let set_new_client_type x = new_client_type := x
let set_client_request x = client_request := x
let set_client_response x = client_response := x
let set_unclean_disconnect x = unclean_disconnect := x
let set_persistent_client_request x = persistent_client_request := x
let set_persistent_client_response x = persistent_client_response := x
let clear_push_messages () = push_messages := []
let push_message x = push_messages := x :: !push_messages
let get_new_client_type () = !new_client_type
let get_client_response () = !client_response
let get_unclean_disconnect () = !unclean_disconnect
let get_client_request () = !client_request
let get_persistent_client_request () = !persistent_client_request
let get_persistent_client_response () = !persistent_client_response
let get_push_messages () =
let push_messages = !push_messages in
clear_push_messages ();
push_messages
let clear () =
set_new_client_type None;
set_client_request None;
set_client_response None;
set_unclean_disconnect false;
set_persistent_client_request None;
set_persistent_client_response None;
set_persistent_client_response None;
clear_push_messages ();
()
end
let clear = Refs.clear
let mock_new_client_type x = Refs.set_new_client_type (Some x)
let mock_client_request x = Refs.set_client_request (Some x)
let mock_unclean_disconnect () = Refs.set_unclean_disconnect true
let mock_persistent_client_request x =
Refs.set_persistent_client_request (Some x)
let get_mocked_new_client_type () = Refs.get_new_client_type ()
let get_mocked_client_request = function
| Non_persistent -> Refs.get_client_request ()
| Persistent -> Refs.get_persistent_client_request ()
let get_mocked_unclean_disconnect = function
| Non_persistent -> false
| Persistent -> Refs.get_unclean_disconnect ()
let record_client_response x = function
| Non_persistent -> Refs.set_client_response (Some x)
| Persistent -> Refs.set_persistent_client_response (Some x)
let get_client_response = function
| Non_persistent -> Refs.get_client_response ()
| Persistent -> Refs.get_persistent_client_response ()
let push_message x = Refs.push_message x
let get_push_messages = Refs.get_push_messages
type t = unit
type client = connection_type
type handoff = {
client: client;
m2s_sequence_number: int;
}
type select_outcome =
| Select_persistent
| Select_new of handoff
| Select_nothing
| Select_exception of Exception.t
| Not_selecting_hg_updating
exception Client_went_away
let provider_from_file_descriptors _ = ()
let provider_for_test _ = ()
let sleep_and_check _ _ ~ide_idle:_ ~idle_gc_slice:_ _ =
let client_opt = get_mocked_new_client_type () in
let is_persistent = Option.is_some (get_mocked_client_request Persistent) in
match (is_persistent, client_opt) with
| (true, _) -> Select_persistent
| (false, Some client) -> Select_new { client; m2s_sequence_number = 0 }
| (false, None) -> Select_nothing
let has_persistent_connection_request _ =
Option.is_some (get_mocked_client_request Persistent)
let priority_fd _ = None
let not_implemented () = failwith "not implemented"
let get_client_fd _ = not_implemented ()
let track ~key:_ ?time:_ ?log:_ ?msg:_ ?long_delay_okay:_ _ = ()
let accept_client _ = Non_persistent
let read_connection_type _ = Utils.unsafe_opt (get_mocked_new_client_type ())
let send_response_to_client c x =
if get_mocked_unclean_disconnect c then
raise Client_went_away
else
record_client_response x c
let send_push_message_to_client _ x = push_message x
let client_has_message _ = Option.is_some (get_mocked_client_request Persistent)
let read_client_msg c =
let metadata = { ServerCommandTypes.from = "test"; desc = "cmd" } in
Rpc (metadata, Utils.unsafe_opt (get_mocked_client_request c))
let get_channels _ = not_implemented ()
let is_persistent = function
| Persistent -> true
| Non_persistent -> false
let priority_to_string (_client : client) : string = "mock"
let make_persistent _ = ServerCommandTypes.Persistent
let shutdown_client _ = ()
let ping _ = () |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/add_local_type_hint.ml | open Hh_prelude
type candidate = {
lhs_var: string;
lhs_type: string;
lhs_pos: Pos.t;
}
let should_offer_refactor ~(selection : Pos.t) ~lhs_pos ~rhs_pos =
let contains_full_assignment =
Pos.contains selection rhs_pos && Pos.contains selection lhs_pos
in
contains_full_assignment || Pos.contains lhs_pos selection
let find_candidate ~(selection : Pos.t) ~entry ctx : candidate option =
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
let visitor =
object
inherit [candidate option] Tast_visitor.reduce as super
method zero = None
method plus = Option.first_some
method! on_class_ env class_ =
let pos = class_.Aast_defs.c_span in
if Pos.contains pos selection then
super#on_class_ env class_
else
None
method! on_method_ env meth =
let pos = Aast_defs.(meth.m_span) in
if Pos.contains pos selection then
super#on_method_ env meth
else
None
method! on_fun_def env fd =
let pos = Aast_defs.(fd.fd_fun.f_span) in
if Pos.contains pos selection then
super#on_fun_def env fd
else
None
method! on_stmt env stmt =
let (pos, stmt_) = stmt in
if Pos.contains pos selection then
let open Aast in
match stmt_ with
| Expr
( _,
_,
Binop
{
bop = Ast_defs.Eq None;
lhs = (lvar_ty, lhs_pos, Lvar (lid_pos, lid));
rhs = (_, rhs_pos, _);
} )
when should_offer_refactor ~selection ~lhs_pos ~rhs_pos ->
let tenv = Tast_env.tast_env_as_typing_env env in
Some
{
lhs_var = Local_id.get_name lid;
lhs_type = Typing_print.full_strip_ns tenv lvar_ty;
lhs_pos = lid_pos;
}
| _ -> super#on_stmt env stmt
else
None
end
in
visitor#go ctx tast.Tast_with_dynamic.under_normal_assumptions
let edit_of_candidate ~path { lhs_var; lhs_type; lhs_pos } : Lsp.WorkspaceEdit.t
=
let edit =
let range =
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal lhs_pos
in
let text = Printf.sprintf "let %s : %s " lhs_var lhs_type in
Lsp.TextEdit.{ range; newText = text }
in
let changes = SMap.singleton (Relative_path.to_absolute path) [edit] in
Lsp.WorkspaceEdit.{ changes }
let to_refactor ~path candidate =
let edit = lazy (edit_of_candidate ~path candidate) in
let title = Printf.sprintf "Add local type hint for %s" candidate.lhs_var in
Code_action_types.Refactor.{ title; edit }
let has_typed_local_variables_enabled root_node =
let open Full_fidelity_positioned_syntax in
let skip_traversal n =
Full_fidelity_positioned_syntax.(
is_classish_declaration n
|| is_classish_body n
|| is_methodish_declaration n
|| is_methodish_trait_resolution n
|| is_function_declaration n
|| is_function_declaration_header n)
in
let has_file_attr kwrd attrs =
String.equal kwrd "file"
&& String.is_substring attrs ~substring:"EnableUnstableFeatures"
&& String.is_substring attrs ~substring:"typed_local_variables"
in
let rec aux nodes =
match nodes with
| [] -> false
| [] :: nss -> aux nss
| (n :: ns) :: nss ->
(match n.syntax with
| FileAttributeSpecification r ->
if
has_file_attr
(text r.file_attribute_specification_keyword)
(text r.file_attribute_specification_attributes)
then
true
else
aux (ns :: nss)
| _ ->
if skip_traversal n then
aux (ns :: nss)
else
aux (children n :: ns :: nss))
in
aux [[root_node]]
let find ~entry ~(range : Lsp.range) ctx =
let source_text = Ast_provider.compute_source_text ~entry in
let cst = Ast_provider.compute_cst ~ctx ~entry in
let root_node = Provider_context.PositionedSyntaxTree.root cst in
if has_typed_local_variables_enabled root_node then
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
let path = entry.Provider_context.path in
let selection = Lsp_helpers.lsp_range_to_pos ~line_to_offset path range in
find_candidate ~selection ~entry ctx
|> Option.map ~f:(to_refactor ~path)
|> Option.to_list
else
[] |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/add_local_type_hint.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
hhvm/hphp/hack/src/server/server_code_actions_services/dune | (library
(name server_code_actions_services)
(wrapped true)
(libraries
annotated_ast
ast
code_action_types
collections
decl_provider
errors
extract_classish
extract_method
inline_method
full_fidelity
logging
lsp
pos
provider_context
tast_env
tast_provider
typing_skeleton
utils_core)
(preprocess
(pps lwt_ppx ppx_deriving.std))) |
|
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_shape_type.ml | open Hh_prelude
(** Don't truncate types in printing unless they are really big,
so we almost always generate valid code.
The number is somewhat arbitrary: it's the smallest power of 2
that could print without truncation for
extract_shape_type_13.php in our test suite.
We *do* want to truncate at some finite number so editors can
handle that much text. *)
let lots_of_typing_print_fuel = 2048
type candidate =
| Of_expr of {
tast_env: Tast_env.t;
(* the function or class containing the shape expression *)
expr_container_pos: Pos.t;
shape_ty: Typing_defs.locl_ty;
}
| Of_hint of {
hint_container_pos: Pos.t;
hint_pos: Pos.t;
}
(** We use distinct titles so we can tell the refactors apart in analytics *)
let title_of_candidate = function
| Of_expr _ -> "Extract shape type"
| Of_hint _ -> "Extract shape type to alias"
type state =
| Searching of (Pos.t * candidate) option
| Selected_non_shape_type of Pos.t
(** When searching for an expression of shape type,
we don't want to provide the refactor for larger expressions
that happen to contain an expression of shape type.
For example, we don't want to provide a refactor for this selection range:
$x = /*range-start*/(() ==> shape('a' => 2, 'b' => $a)['a'])()/*range-end*/;
*)
let plus_state (a : state) (b : state) : state =
match (a, b) with
| (Searching (Some _), Searching (Some (pos_b, _))) ->
HackEventLogger.invariant_violation_bug
~path:(Pos.filename pos_b)
~pos:(Pos.string @@ Pos.to_absolute pos_b)
"expected only one candidate to be found, since we select the largest shape-typed expression containing the selection";
(* Safe to continue in spite of being in an unexpected situation:
We still provide *a* reasonable refactoring by picking arbitrarily
*)
a
| (Searching c1, Searching c2) -> Searching (Option.first_some c1 c2)
| (Searching (Some (pos1, _)), Selected_non_shape_type pos2)
when Pos.contains pos1 pos2 ->
(* A shape can contain a non-shape *)
a
| (Selected_non_shape_type pos1, Selected_non_shape_type pos2) ->
Selected_non_shape_type (Pos.merge pos1 pos2)
| (Selected_non_shape_type _, _) -> a
| (_, Selected_non_shape_type _) -> b
let find_candidate ~(selection : Pos.t) ~entry ctx : candidate option =
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
let visitor =
let container_pos = ref None in
object
inherit [state] Tast_visitor.reduce as super
method zero = Searching None
method plus = plus_state
method! on_class_ env class_ =
let pos = class_.Aast_defs.c_span in
if Pos.contains pos selection then (
container_pos := Some pos;
super#on_class_ env class_
) else
Searching None
method! on_type_hint_ env hint_ =
match Option.both hint_ !container_pos with
| Some ((hint_pos, Aast_defs.Hshape _), hint_container_pos)
when Pos.contains selection hint_pos ->
Searching (Some (hint_pos, Of_hint { hint_container_pos; hint_pos }))
| _ -> super#on_type_hint_ env hint_
method! on_fun_def env fd =
let pos = Aast_defs.(fd.fd_fun.f_span) in
if Pos.contains pos selection then (
container_pos := Some pos;
super#on_fun_def env fd
) else
Searching None
method! on_stmt env stmt =
let stmt_pos = fst stmt in
if Pos.contains selection stmt_pos then
Selected_non_shape_type stmt_pos
else
super#on_stmt env stmt
method! on_expr env expr =
let (ty, expr_pos, _) = expr in
if Pos.contains selection expr_pos then
let ty_ = Typing_defs_core.get_node ty in
match (ty_, !container_pos) with
| (Typing_defs_core.Tshape _, Some expr_container_pos) ->
Searching
(Some
( expr_pos,
Of_expr { tast_env = env; expr_container_pos; shape_ty = ty }
))
| _ -> Selected_non_shape_type expr_pos
else
super#on_expr env expr
end
in
match visitor#go ctx tast.Tast_with_dynamic.under_normal_assumptions with
| Searching (Some (_, candidate)) -> Some candidate
| Searching _
| Selected_non_shape_type _ ->
None
let snippet_for_decl_of : string -> string =
Printf.sprintf "type T${0:placeholder_} = %s;\n\n"
let snippet_for_use = "T${0:placeholder_}"
let range_of_container_pos container_pos : Lsp.range =
let pos = Pos.shrink_to_start container_pos in
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal pos
let with_typing_print_fuel (tenv : Typing_env_types.env) ~(fuel : int) :
Typing_env_types.env =
(* let genv = tenv.Typing_env_types.genv in *)
Typing_env.map_tcopt tenv ~f:(fun tcopt ->
{ tcopt with GlobalOptions.tco_type_printer_fuel = fuel })
let edit_of_candidate source_text ~path candidate : Lsp.WorkspaceEdit.t =
let sub_of_pos = Full_fidelity_source_text.sub_of_pos source_text in
let edits =
match candidate with
| Of_expr { shape_ty; expr_container_pos; tast_env } ->
let range = range_of_container_pos expr_container_pos in
let text =
let ty_text =
let tenv =
tast_env
|> Tast_env.tast_env_as_typing_env
|> with_typing_print_fuel ~fuel:lots_of_typing_print_fuel
in
Typing_print.full_strip_ns tenv shape_ty
in
snippet_for_decl_of ty_text
in
[Lsp.TextEdit.{ range; newText = text }]
| Of_hint { hint_container_pos; hint_pos } ->
let decl_edit =
let range = range_of_container_pos hint_container_pos in
let text =
let ty_text = sub_of_pos hint_pos in
snippet_for_decl_of ty_text
in
Lsp.TextEdit.{ range; newText = text }
in
let use_edit =
let range =
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal hint_pos
in
Lsp.TextEdit.{ range; newText = snippet_for_use }
in
[decl_edit; use_edit]
in
let changes = SMap.singleton (Relative_path.to_absolute path) edits in
Lsp.WorkspaceEdit.{ changes }
let to_refactor source_text ~path candidate =
let edit = lazy (edit_of_candidate source_text ~path candidate) in
Code_action_types.Refactor.{ title = title_of_candidate candidate; edit }
let find ~entry ~(range : Lsp.range) ctx =
let source_text = Ast_provider.compute_source_text ~entry in
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
let path = entry.Provider_context.path in
let selection = Lsp_helpers.lsp_range_to_pos ~line_to_offset path range in
find_candidate ~selection ~entry ctx
|> Option.map ~f:(to_refactor source_text ~path)
|> Option.to_list |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_shape_type.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_variable.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let placeholder_regexp = Str.regexp {|\$placeholder\([0-9]+\)|}
type candidate = {
stmt_pos: Pos.t;
pos: Pos.t;
placeholder_n: int;
}
(**
We don't want to extract variables for lambdas like this: `() ==> 200`.
The AST of such a lambda is indistinguishable from `() ==> { return 200; }`
so we peek at the source *)
let might_be_expression_lambda ~f_body:Aast.{ fb_ast } ~pos ~source_text =
match fb_ast with
| [(stmt_pos, _)] ->
let length = Pos.start_offset stmt_pos - Pos.start_offset pos in
if length > 0 then
let src = Full_fidelity_source_text.sub_of_pos source_text pos ~length in
not @@ String.is_substring ~substring:"{" src
else
(* length can be negative to curlies in default params: `(($a = () ==> {}) ==> ...` *)
true
| _ -> false
let positions_visitor (selection : Pos.t) ~source_text =
let stmt_pos = ref Pos.none in
let expression_lambda_pos = ref None in
let placeholder_n = ref 0 in
let expr_positions_overlapping_selection = ref [] in
let ensure_selection_common_root =
(* filter out invalid selection like this:
(1 + 2) + 3
^-----^ selection
*)
Option.filter ~f:(fun candidate ->
List.for_all !expr_positions_overlapping_selection ~f:(fun p ->
Pos.(contains candidate.pos p || contains p candidate.pos)))
in
object
inherit [candidate option] Tast_visitor.reduce as super
method zero = None
method plus = Option.first_some
method! on_method_ env meth =
ensure_selection_common_root @@ super#on_method_ env meth
method! on_fun_def env fd =
ensure_selection_common_root @@ super#on_fun_def env fd
method! on_lid env lid =
let name = Local_id.get_name @@ snd lid in
if Str.string_match placeholder_regexp name 0 then
Str.matched_group 1 name
|> int_of_string_opt
|> Option.iter ~f:(fun n -> placeholder_n := max (n + 1) !placeholder_n);
super#on_lid env lid
method! on_func_body env fb =
let acc = super#on_func_body env fb in
(match List.hd fb.Aast.fb_ast with
| Some (pos, _) -> stmt_pos := pos
| _ -> ());
match acc with
| Some acc -> Some { acc with placeholder_n = !placeholder_n }
| None -> None
method! on_stmt env stmt =
stmt_pos := fst stmt;
super#on_stmt env stmt
method! on_expr env expr =
let (_, pos, expr_) = expr in
if Pos.overlaps selection pos then
expr_positions_overlapping_selection :=
pos :: !expr_positions_overlapping_selection;
match expr_ with
| Aast.(Binop { bop = Ast_defs.Eq _; lhs = (_, lhs_pos, _); rhs = _ }) ->
let acc = super#on_expr env expr in
Option.filter acc ~f:(fun candidate ->
not @@ Pos.contains lhs_pos candidate.pos)
| Aast.Lfun (Aast.{ f_body; _ }, _) ->
expression_lambda_pos :=
Option.some_if
(might_be_expression_lambda ~f_body ~pos ~source_text)
pos;
super#on_expr env expr
| Aast.Efun _ ->
expression_lambda_pos := None;
super#on_expr env expr
| _ ->
if
Pos.contains selection pos
&& (not @@ Pos.equal !stmt_pos Pos.none)
&& not
(Option.map !expression_lambda_pos ~f:(fun lpos ->
Pos.contains lpos pos)
|> Option.value ~default:false)
then
Some
{
stmt_pos = !stmt_pos;
pos;
placeholder_n = 0 (* will be adjusted on the way up *);
}
else
super#on_expr env expr
end
(** ensures that `positions_visitor` only traverses
functions and methods such that
the function body contains the selected range *)
let top_visitor (selection : Pos.t) ~source_text =
let should_traverse outer = Pos.contains outer selection in
object
inherit [candidate option] Tast_visitor.reduce
method zero = None
method plus = Option.first_some
method! on_method_ env meth =
if should_traverse meth.Aast.m_span then
(positions_visitor selection ~source_text)#on_method_ env meth
else
None
method! on_fun_def env fun_def =
if should_traverse Aast.(fun_def.fd_fun.f_span) then
(positions_visitor selection ~source_text)#on_fun_def env fun_def
else
None
end
(** Generate a snippet from the placeholder number.
This relies on a nonstandard LSP extension recognized by the client:
https://fburl.com/code/0vzkqds8. We can implement non-hackily if LSP is updated:
https://github.com/microsoft/language-server-protocol/issues/592 *)
let placeholder_name_of_n (n : int) = Format.sprintf "$${0:placeholder%d}" n
let refactor_of_candidate ~source_text ~path { stmt_pos; pos; placeholder_n } =
let placeholder = placeholder_name_of_n placeholder_n in
let exp_string = Full_fidelity_source_text.sub_of_pos source_text pos in
let change_expression =
Lsp.TextEdit.
{
range = Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal pos;
newText = placeholder;
}
in
let change_add_assignment =
let (line, character) =
Pos.line_column stmt_pos |> Tuple2.map_fst ~f:(( + ) (-1))
in
let indent = String.make character ' ' in
Lsp.
{
TextEdit.range =
{ start = { line; character }; end_ = { line; character } };
newText = Printf.sprintf "%s = %s;\n%s" placeholder exp_string indent;
}
in
let edit =
lazy
(let changes =
SMap.singleton
(Relative_path.to_absolute path)
[change_add_assignment; change_expression]
in
Lsp.WorkspaceEdit.{ changes })
in
Code_action_types.Refactor.{ title = "Extract into variable"; edit }
let find ~entry ~(range : Lsp.range) ctx =
let path = entry.Provider_context.path in
let source_text = Ast_provider.compute_source_text ~entry in
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
let selection = Lsp_helpers.lsp_range_to_pos ~line_to_offset path range in
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
(top_visitor selection ~source_text)#go
ctx
tast.Tast_with_dynamic.under_normal_assumptions
|> Option.map ~f:(refactor_of_candidate ~source_text ~path)
|> Option.to_list |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_variable.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/flip_around_comma.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Candidate : sig
(**
In `foo(param_a, param_b, param_c)`
^
|
selection
`positions` is the positions of each of the params
`insertion_index` is 2 (in bounds by construction)
`pos` is the span from the start of `param_a` to the end of `param_c`
*)
type t = private {
positions: Pos.t list;
insertion_index: int;
pos: Pos.t;
}
val create_exn : positions:Pos.t list -> insertion_index:int -> t
end = struct
type t = {
positions: Pos.t list;
insertion_index: int;
pos: Pos.t;
}
let create_exn ~positions ~insertion_index =
if insertion_index >= List.length positions then begin
HackEventLogger.invariant_violation_bug
~data:
(Printf.sprintf
"insertion index: %d positions length: %d"
insertion_index
(List.length positions))
"flip around comma: insertion index out of bounds";
failwith "flip around comma: insertion index out of bounds"
end else
let pos =
let first = List.hd_exn positions in
let last = List.last_exn positions in
Pos.merge first last
in
{ insertion_index; positions; pos }
end
let list_flip ~insertion_index l =
let rec aux i = function
| h1 :: h2 :: tail when i = insertion_index - 1 -> h2 :: h1 :: tail
| h :: tail -> h :: aux (i + 1) tail
| [] -> []
in
aux 0 l
let find_insertion_index ~(cursor : Pos.t) (positions : Pos.t list) : int option
=
let is_after_cursor pos = Pos.start_offset pos > Pos.start_offset cursor in
let is_before_cursor pos = Pos.start_offset pos < Pos.start_offset cursor in
if positions |> List.exists ~f:is_before_cursor then
positions |> List.findi ~f:(fun _i -> is_after_cursor) |> Option.map ~f:fst
else
None
let find_candidate ~(cursor : Pos.t) (positions : Pos.t list) :
Candidate.t option =
find_insertion_index ~cursor positions
|> Option.map ~f:(fun insertion_index ->
Candidate.create_exn ~insertion_index ~positions)
let pos_of_expr = Tuple3.get2
let pos_of_shape_field_name =
Ast_defs.(
function
| SFlit_int (pos, _) -> pos
| SFlit_str (pos, _) -> pos
| SFclass_const (_, (pos, _)) -> pos)
let pos_of_type_hint : 'a Aast_defs.type_hint -> Pos.t = function
| (_, Some (pos, _)) -> pos
| (_, None) -> Pos.none
let option_or_thunk opt ~f =
match opt with
| Some _ -> opt
| None -> f ()
let visitor ~(cursor : Pos.t) =
let find_in_positions = find_candidate ~cursor in
let find_in_positions_params params =
let is_easy_to_flip param =
Aast_defs.(
(not param.param_is_variadic)
&& Option.is_none param.param_readonly
&& Option.is_none param.param_visibility
&& (match param.param_callconv with
| Ast_defs.Pnormal -> true
| Ast_defs.Pinout _ -> false)
&& List.is_empty param.param_user_attributes)
in
let pos_of_expr_opt = function
| Some (_, pos, _) -> pos
| None -> Pos.none
in
if List.for_all params ~f:is_easy_to_flip then
params
|> List.map
~f:
Aast_defs.(
fun param ->
List.fold
~init:param.param_pos
~f:Pos.merge
[
pos_of_expr_opt param.param_expr;
pos_of_type_hint param.param_type_hint;
])
|> find_in_positions
else
None
in
object
inherit [Candidate.t option] Tast_visitor.reduce as super
method zero = None
method plus = Option.first_some
method! on_method_ env meth =
if Pos.contains meth.Aast_defs.m_span cursor then
option_or_thunk (super#on_method_ env meth) ~f:(fun () ->
find_in_positions_params meth.Aast_defs.m_params)
else
None
method! on_class_ env class_ =
if Pos.contains class_.Aast_defs.c_span cursor then
super#on_class_ env class_
else
None
method! on_fun_ env fun_ =
if Pos.contains fun_.Aast_defs.f_span cursor then
option_or_thunk (super#on_fun_ env fun_) ~f:(fun () ->
find_in_positions_params fun_.Aast_defs.f_params)
else
None
method! on_expr_ env expr =
option_or_thunk (super#on_expr_ env expr) ~f:(fun () ->
match expr with
| Aast_defs.(Call { args; _ }) ->
args
|> List.map
~f:
Ast_defs.(
function
| (Pinout inout_pos, expr) ->
Pos.merge inout_pos (pos_of_expr expr)
| (Pnormal, expr) -> pos_of_expr expr)
|> find_in_positions
| Aast_defs.ValCollection (_, _, exprs)
| Aast_defs.List exprs
| Aast_defs.Tuple exprs ->
exprs |> List.map ~f:pos_of_expr |> find_in_positions
| Aast_defs.KeyValCollection (_, _, fields) ->
fields
|> List.map ~f:(fun (e1, e2) ->
Pos.merge (pos_of_expr e1) (pos_of_expr e2))
|> find_in_positions
| Aast_defs.Shape fields ->
fields
|> List.map ~f:(fun (field_name, expr) ->
Pos.merge
(pos_of_shape_field_name field_name)
(pos_of_expr expr))
|> find_in_positions
| _ -> None)
end
let edit_of_candidate
~path ~source_text Candidate.{ insertion_index; positions; pos } =
let text =
positions
|> List.map ~f:(Full_fidelity_source_text.sub_of_pos source_text)
|> list_flip ~insertion_index
|> String.concat ~sep:", "
in
let change =
Lsp.
{
TextEdit.range =
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal pos;
newText = text;
}
in
let changes = SMap.singleton (Relative_path.to_absolute path) [change] in
Lsp.WorkspaceEdit.{ changes }
let refactor_of_candidate ~path ~source_text candidate =
let edit = lazy (edit_of_candidate ~path ~source_text candidate) in
Code_action_types.Refactor.{ title = "Flip around comma"; edit }
let find ~entry ~(range : Lsp.range) ctx =
if not (Lsp_helpers.lsp_range_is_selection range) then
let source_text = Ast_provider.compute_source_text ~entry in
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
let path = entry.Provider_context.path in
let cursor = Lsp_helpers.lsp_range_to_pos ~line_to_offset path range in
(visitor ~cursor)#go ctx tast.Tast_with_dynamic.under_normal_assumptions
|> Option.map ~f:(refactor_of_candidate ~path ~source_text)
|> Option.to_list
else
[] |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/flip_around_comma.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/inline_variable.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type def = {
def_pos: Pos.t;
def_rhs_pos: Pos.t;
def_needs_grouping: bool;
(** iff `def_needs_grouping`, then we need to surround with parentheses when inlining.
```
$x = 3 + 4;
foo($x * 5); --inline--> foo((3+4) * 5)
```
*)
def_deps: String.Set.t; (** In `$x = 0 + $y`, `$x` depends on `$y` *)
}
type candidate = {
name: string;
def: def;
use_pos: Pos.t;
}
let remove_leading_whitespace ~source_text pos : Pos.t =
let rec calc_strip_amount offset =
let ch = Full_fidelity_source_text.get source_text offset in
if offset = 0 || Char.equal ch '\n' then
0
else if Char.is_whitespace ch then
1 + calc_strip_amount (offset - 1)
else
0
in
let orig_col_start = snd @@ Pos.line_column pos in
let strip_amount = calc_strip_amount (Pos.start_offset pos - 1) in
Pos.set_col_start (orig_col_start - strip_amount) pos
let next_char_is_newline ~source_text pos : bool =
let offset =
let (line, col) = Pos.end_line_column pos in
Full_fidelity_source_text.position_to_offset source_text (line, col + 1)
in
let ch = Full_fidelity_source_text.get source_text offset in
Char.(ch = '\n')
module Var_info : sig
type t
val empty : t
(** Useful for calculating the dependencies of a variable. In `$x = 0 + $y`, `$x` depends on `$y`. *)
val referenced : t -> String.Set.t
val to_candidate_opt : t -> selection:Pos.t -> candidate option
val add_def : t -> name:string -> def:def -> t
val add_use : t -> name:string -> use_pos:Pos.t -> t
val mark_ineligible : t -> name:string -> t
val merge : t -> t -> t
end = struct
(** The state of a variable: updated as we traverse the tast *)
type info =
| Used_never of def
| Used_once of {
use_pos: Pos.t;
def: def;
}
(** `Used_once` is the "good" state: we only offer to inline variables that are used exactly once *)
| Used_undefined of Pos.t
(**
`Used_undefined` indicates a use of a variable that is not defined.
We do *not* traverse the AST in the same order as variable bindings,
so we may discover later that the variable is indeed defined.
See [merge] for an example of how `Used_undefined` is used.
*)
| Ineligible
let use_pos_of_info = function
| Used_never _ -> None
| Used_once { use_pos; _ } -> Some use_pos
| Used_undefined use_pos -> Some use_pos
| Ineligible -> None
type t = {
referenced: String.Set.t;
infos: info String.Map.t;
}
let referenced { referenced; _ } = referenced
let empty = { infos = String.Map.empty; referenced = String.Set.empty }
(**
A variable is a candidate iff:
- The variable is not `Ineligible`
- It's the only variable used in the selection
- None of the variable's deps is redefined between the definition and use of the variable.
Here is a case where we cannot inline `$x` because a dep (`$y`) changes:
```
$y = 3;
$x = $y; // $x is defined, depends on $y
$y = 6;
// inlining $x on the next line would change behavior because $y was redefined between use and def of $x
foo( $x );
// ^-^ selection
```
*)
let to_candidate_opt { infos; _ } ~selection =
begin
let vars_in_selection =
infos
|> String.Map.filter ~f:(fun info ->
match use_pos_of_info info with
| Some use_pos -> Pos.contains use_pos selection
| None -> false)
|> String.Map.to_alist
in
match vars_in_selection with
| [(name, Used_once { def; use_pos })] ->
let dep_may_have_changed =
def.def_deps
|> String.Set.exists ~f:(fun dep ->
String.Map.find infos dep
|> Option.exists ~f:(fun dep_info ->
match use_pos_of_info dep_info with
| Some use_pos ->
Pos.start_offset use_pos
> Pos.end_offset def.def_rhs_pos
| None -> true))
in
if not dep_may_have_changed then
Some { name; def; use_pos }
else
None
| _ -> None
end
let add_def { referenced; infos } ~name ~def =
let infos = String.Map.set infos ~key:name ~data:(Used_never def) in
{ referenced; infos }
let add_use { referenced; infos } ~name ~use_pos =
let infos =
String.Map.update infos name ~f:(function
| None -> Used_undefined use_pos
| Some (Used_never def) -> Used_once { use_pos; def }
| Some (Used_once _ | Used_undefined _ | Ineligible) -> Ineligible)
in
let referenced = String.Set.add referenced name in
{ referenced; infos }
let mark_ineligible { referenced; infos } ~name =
let infos = String.Map.set infos ~key:name ~data:Ineligible in
{ referenced; infos }
let merge
{ referenced = _; infos = infos1 } { referenced = _; infos = infos2 } =
let infos =
String.Map.merge_skewed infos1 infos2 ~combine:(fun ~key:_ v1 v2 ->
match (v1, v2) with
| (Used_never def, Used_undefined use_pos) ->
(*
This branch is hit for code like the following:
$x = 0; // $x is `Used_never` here
$_ = $x + 1; // $x is `Used_undefined` here
*)
Used_once { def; use_pos }
| (_, Used_never _) ->
(* redefined *)
v2
| ( (Used_never _ | Used_once _ | Used_undefined _ | Ineligible),
(Used_once _ | Used_undefined _ | Ineligible) ) ->
Ineligible)
in
{ referenced = String.Set.empty; infos }
end
let visitor ~(selection : Pos.t) =
let in_lvalue = ref false in
let with_in_lvalue f =
let orig_in_lvalue = !in_lvalue in
in_lvalue := true;
let res = f () in
in_lvalue := orig_in_lvalue;
res
in
object (self)
inherit [Var_info.t] Tast_visitor.reduce as super
method zero = Var_info.empty
method plus = Var_info.merge
method! on_class_ env class_ =
if Pos.contains class_.Aast.c_span selection then
super#on_class_ env class_
else
Var_info.empty
method! on_fun_ env fun_ =
if Pos.contains fun_.Aast.f_span selection then
super#on_fun_ env fun_
else
Var_info.empty
method! on_fun_param env param =
super#on_fun_param env param
|> Var_info.mark_ineligible ~name:param.Aast.param_name
method! on_stmt env stmt =
let (pos, stmt_) = stmt in
let open Aast in
match stmt_ with
| Expr
( _,
_,
Binop
{ bop = Ast_defs.Eq _; lhs = (_, _, Lvar (_, lid)) as lhs; rhs }
) ->
let name = Local_id.get_name lid in
let def_needs_grouping =
match rhs with
| (_, _, (Binop _ | Pipe _)) -> true
| _ -> false
in
let rhs_acc = self#on_expr env rhs in
let def =
{
def_pos = pos;
def_rhs_pos = Tuple3.get2 rhs;
def_needs_grouping;
def_deps = Var_info.referenced rhs_acc;
}
in
let acc = Var_info.add_def ~name ~def rhs_acc in
self#plus acc (with_in_lvalue (fun () -> self#on_expr env lhs))
| _ -> super#on_stmt env stmt
method! on_expr env expr =
let open Aast in
match Tuple3.get3 expr with
| Lvar (use_pos, lid) when not !in_lvalue ->
let name = Local_id.get_name lid in
Var_info.add_use Var_info.empty ~name ~use_pos
| _ -> super#on_expr env expr
end
let edit_of_candidate ~path ~source_text { def; use_pos; _ } :
Lsp.WorkspaceEdit.t =
let change_replace_def =
let pos = remove_leading_whitespace ~source_text def.def_pos in
let range =
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal pos
in
let range =
if next_char_is_newline ~source_text def.def_pos then
Lsp.{ range with end_ = { line = range.end_.line + 1; character = 0 } }
else
range
in
Lsp.{ TextEdit.range; newText = "" }
in
let change_replace_use =
let text =
Full_fidelity_source_text.sub_of_pos source_text def.def_rhs_pos
in
let text =
if def.def_needs_grouping then
Printf.sprintf "(%s)" text
else
text
in
Lsp.
{
TextEdit.range =
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal use_pos;
newText = text;
}
in
let changes =
SMap.singleton
(Relative_path.to_absolute path)
[change_replace_def; change_replace_use]
in
Lsp.WorkspaceEdit.{ changes }
let refactor_of_candidate ~path ~source_text candidate =
let edit = lazy (edit_of_candidate ~path ~source_text candidate) in
Code_action_types.Refactor.
{ title = Printf.sprintf "Inline variable %s" candidate.name; edit }
let find ~entry ~(range : Lsp.range) ctx =
let source_text = Ast_provider.compute_source_text ~entry in
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
let path = entry.Provider_context.path in
let selection = Lsp_helpers.lsp_range_to_pos ~line_to_offset path range in
(visitor ~selection)#go ctx tast.Tast_with_dynamic.under_normal_assumptions
|> Var_info.to_candidate_opt ~selection
|> Option.map ~f:(refactor_of_candidate ~path ~source_text)
|> Option.to_list |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/inline_variable.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/override_method.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let to_range (pos : Pos.t) : Lsp.range =
let (first_line, first_col) = Pos.line_column pos in
let (last_line, last_col) = Pos.end_line_column pos in
{
Lsp.start = { Lsp.line = first_line - 1; character = first_col };
end_ = { Lsp.line = last_line - 1; character = last_col };
}
let stub_method_action
~(is_static : bool)
(class_name : string)
(parent_name : string)
((meth_name, meth) : string * Typing_defs.class_elt) : Pos.t Quickfix.t =
let new_text =
Typing_skeleton.of_method ~is_static ~is_override:true meth_name meth ^ "\n"
in
let title =
Printf.sprintf
"Add override for %s::%s"
(Utils.strip_ns parent_name)
meth_name
in
Quickfix.make_classish ~title ~new_text ~classish_name:class_name
(* Return a list of quickfixes for [cls] which add a method that
overrides one in [parent_name]. *)
let override_method_quickfixes
(env : Tast_env.env) (cls : Tast.class_) (parent_name : string) :
Pos.t Quickfix.t list =
let (_, class_name) = cls.Aast.c_name in
let existing_methods =
SSet.of_list (List.map cls.Aast.c_methods ~f:(fun m -> snd m.Aast.m_name))
in
match Decl_provider.get_class (Tast_env.get_ctx env) parent_name with
| Some decl ->
(* Offer an override action for any inherited method which isn't
final and that the current class hasn't already overridden. *)
let actions_for_methods ~is_static methods =
methods
|> List.filter ~f:(fun (name, meth) ->
(not (SSet.mem name existing_methods))
&& not (Typing_defs.get_ce_final meth))
|> List.map ~f:(stub_method_action ~is_static class_name parent_name)
in
actions_for_methods ~is_static:false (Decl_provider.Class.methods decl)
@ actions_for_methods ~is_static:true (Decl_provider.Class.smethods decl)
| None -> []
(* Quickfixes available at cursor position [start_line] and
[start_col]. These aren't associated with errors, rather they
transform code from one valid state to another. *)
let override_method_refactorings_at ~start_line ~start_col =
object
inherit [_] Tast_visitor.reduce as super
method zero = []
method plus = ( @ )
method! on_class_ env c =
let acc = super#on_class_ env c in
let meth_actions =
match c.Aast.c_kind with
| Ast_defs.Cclass _ ->
List.map c.Aast.c_extends ~f:(fun (parent_id_pos, parent_hint) ->
if Pos.inside parent_id_pos start_line start_col then
match parent_hint with
| Aast.Happly ((_, parent_name), _) ->
override_method_quickfixes env c parent_name
| _ -> []
else
[])
| _ -> []
in
List.concat meth_actions @ acc
end
let text_edits (classish_starts : Pos.t SMap.t) (quickfix : Pos.t Quickfix.t) :
Lsp.TextEdit.t list =
let edits = Quickfix.get_edits ~classish_starts quickfix in
List.map edits ~f:(fun (new_text, pos) ->
{ Lsp.TextEdit.range = to_range pos; newText = new_text })
let refactor_action
path (classish_starts : Pos.t SMap.t) (quickfix : Pos.t Quickfix.t) :
Code_action_types.Refactor.t =
let edit =
lazy
(let changes =
SMap.singleton
(Relative_path.to_absolute path)
(text_edits classish_starts quickfix)
in
Lsp.WorkspaceEdit.{ changes })
in
Code_action_types.Refactor.{ title = Quickfix.get_title quickfix; edit }
let find ~entry ~(range : Lsp.range) ctx =
let Lsp.{ start = { line = start_line; character = start_col }; _ } = range in
let cst = Ast_provider.compute_cst ~ctx ~entry in
let tree = Provider_context.PositionedSyntaxTree.root cst in
let path = entry.Provider_context.path in
let source_text = Ast_provider.compute_source_text ~entry in
let classish_starts = Quickfix_ffp.classish_starts tree source_text path in
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
let override_method_refactorings =
(override_method_refactorings_at ~start_line ~start_col)#go
ctx
tast.Tast_with_dynamic.under_normal_assumptions
in
List.map
override_method_refactorings
~f:(refactor_action path classish_starts) |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/override_method.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/quickfixes.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let to_range (pos : Pos.t) : Lsp.range =
let (first_line, first_col) = Pos.line_column pos in
let (last_line, last_col) = Pos.end_line_column pos in
{
Lsp.start = { Lsp.line = first_line - 1; character = first_col };
end_ = { Lsp.line = last_line - 1; character = last_col };
}
let text_edits (classish_starts : Pos.t SMap.t) (quickfix : Pos.t Quickfix.t) :
Lsp.TextEdit.t list =
let edits = Quickfix.get_edits ~classish_starts quickfix in
List.map edits ~f:(fun (new_text, pos) ->
{ Lsp.TextEdit.range = to_range pos; newText = new_text })
let fix_action
path (classish_starts : Pos.t SMap.t) (quickfix : Pos.t Quickfix.t) :
Code_action_types.Quickfix.t =
let open Lsp in
let edit =
lazy
(let changes =
SMap.singleton
(Relative_path.to_absolute path)
(text_edits classish_starts quickfix)
in
WorkspaceEdit.{ changes })
in
Code_action_types.Quickfix.{ title = Quickfix.get_title quickfix; edit }
let actions_for_errors
(errors : Errors.t)
(path : Relative_path.t)
(classish_starts : Pos.t SMap.t)
~(start_line : int)
~(start_col : int) : Code_action_types.Quickfix.t list =
let errors = Errors.get_error_list ~drop_fixmed:false errors in
let errors_here =
List.filter errors ~f:(fun e ->
let e_pos = User_error.get_pos e in
Pos.inside e_pos start_line start_col)
in
let quickfixes =
List.map ~f:User_error.quickfixes errors_here |> List.concat
in
List.map quickfixes ~f:(fun qf -> fix_action path classish_starts qf)
let find ~ctx ~entry ~(range : Lsp.range) : Code_action_types.Quickfix.t list =
let Lsp.{ start = { line = start_line; character = start_col }; _ } = range in
let cst = Ast_provider.compute_cst ~ctx ~entry in
let tree = Provider_context.PositionedSyntaxTree.root cst in
let classish_starts =
match entry.Provider_context.source_text with
| Some source_text ->
Quickfix_ffp.classish_starts tree source_text entry.Provider_context.path
| None -> SMap.empty
in
let { Tast_provider.Compute_tast_and_errors.errors; _ } =
Tast_provider.compute_tast_and_errors_quarantined ~ctx ~entry
in
let path = entry.Provider_context.path in
actions_for_errors errors path classish_starts ~start_line ~start_col |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/quickfixes.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find :
ctx:Provider_context.t ->
entry:Provider_context.entry ->
range:Lsp.range ->
Code_action_types.Quickfix.t list |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/refactors.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let find ~entry ~(range : Lsp.range) ctx =
let variable_actions =
match Inline_variable.find ~entry ~range ctx with
| [] -> Extract_variable.find ~entry ~range ctx
| actions -> actions
in
Override_method.find ~entry ~range ctx
@ variable_actions
@ Inline_method.find ~entry ~range ctx
@ Extract_method.find ~entry ~range ctx
@ Extract_classish.find ~entry ~range ctx
@ Extract_shape_type.find ~entry ~range ctx
@ Flip_around_comma.find ~entry ~range ctx
@ Add_local_type_hint.find ~entry ~range ctx |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/refactors.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/server_code_actions_services.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
(**
For LSP "textDocument/codeAction" response, we do not compute the edit for the action.
For LSP "codeAction/resolve" response, we compute the edit.
We never use commands on the server side of the code action flow: afaict that's a legacy technique
from before "codeAction/resolve" was introduced.
See [CodeAction.edit_or_command] in lsp.ml for more on the code action flow.
*)
type resolvable_command_or_action =
Lsp.WorkspaceEdit.t Lazy.t Lsp.CodeAction.command_or_action_
let lsp_range_of_ide_range (ide_range : Ide_api_types.range) : Lsp.range =
let lsp_pos_of_ide_pos ide_pos =
Lsp.
{
line = ide_pos.Ide_api_types.line;
character = ide_pos.Ide_api_types.column;
}
in
Lsp.
{
start = lsp_pos_of_ide_pos ide_range.Ide_api_types.st;
end_ = lsp_pos_of_ide_pos ide_range.Ide_api_types.ed;
}
(* Example: in "${0:placeholder}" extracts "placeholder" *)
let snippet_regexp = Str.regexp {|\${[0-9]+:\([^}]+\)}|}
let remove_snippets Lsp.WorkspaceEdit.{ changes } =
let un_snippet_string = Str.global_replace snippet_regexp {|\1|} in
let un_snippet_text_edit text_edit =
Lsp.TextEdit.
{ text_edit with newText = un_snippet_string text_edit.newText }
in
let changes = SMap.map (List.map ~f:un_snippet_text_edit) changes in
Lsp.WorkspaceEdit.{ changes }
let find
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(range : Lsp.range) : resolvable_command_or_action list =
let to_action ~title ~edit ~kind =
Lsp.CodeAction.Action
{
Lsp.CodeAction.title;
kind;
diagnostics = [];
action = Lsp.CodeAction.UnresolvedEdit edit;
}
in
let quickfixes =
Quickfixes.find ~ctx ~entry ~range
|> List.map ~f:(fun Code_action_types.Quickfix.{ title; edit } ->
to_action ~title ~edit ~kind:Lsp.CodeActionKind.quickfix)
in
let refactors =
Refactors.find ~entry ~range ctx
|> List.map ~f:(fun Code_action_types.Refactor.{ title; edit } ->
to_action ~title ~edit ~kind:Lsp.CodeActionKind.refactor)
in
quickfixes @ refactors
let update_edit ~f =
Lsp.CodeAction.(
function
| Command _ as c -> c
| Action ({ action; _ } as a) ->
let action =
match action with
(* Currently only [UnresolvedEdit] is used, since all code actions involve lazy edits *)
| UnresolvedEdit lazy_edit -> f lazy_edit
| EditOnly e -> EditOnly e
| CommandOnly c -> CommandOnly c
| BothEditThenCommand ca -> BothEditThenCommand ca
in
Action { a with action })
let go
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(range : Ide_api_types.range) =
let strip = update_edit ~f:(fun _ -> Lsp.CodeAction.UnresolvedEdit ()) in
find ~ctx ~entry ~range:(lsp_range_of_ide_range range) |> List.map ~f:strip
let content_modified =
Lsp.Error.
{
code = ContentModified;
message =
{|Expected the code action requested with codeAction/resolve to be findable.
Note: This error message may be caused by the source text changing between
when the code action menu pops up and when the user selects the code action.
In such cases we may not be able to find a code action at the same location with
the same title, so cannot resolve the code action.
|};
data = None;
}
let resolve
~(ctx : Provider_context.t)
~(entry : Provider_context.entry)
~(range : Ide_api_types.range)
~(resolve_title : string)
~(use_snippet_edits : bool) : Lsp.CodeActionResolve.result =
let transform_command_or_action :
Lsp.WorkspaceEdit.t Lazy.t Lsp.CodeAction.command_or_action_ ->
Lsp.CodeAction.resolved_command_or_action =
update_edit ~f:(fun lazy_edit ->
let edit = Lazy.force lazy_edit in
let edit =
if use_snippet_edits then
edit
else
remove_snippets edit
in
Lsp.CodeAction.EditOnly edit)
in
find ~ctx ~entry ~range:(lsp_range_of_ide_range range)
|> List.find ~f:(fun command_or_action ->
let title = Lsp_helpers.title_of_command_or_action command_or_action in
String.equal title resolve_title)
(* When we can't find a matching code action, ContentModified is the right error
per https://github.com/microsoft/language-server-protocol/issues/1738 *)
|> Result.of_option ~error:content_modified
|> Result.map ~f:transform_command_or_action |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/server_code_actions_services.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val go :
ctx:Provider_context.t ->
entry:Provider_context.entry ->
range:Ide_api_types.range ->
Lsp.CodeAction.result
val resolve :
ctx:Provider_context.t ->
entry:Provider_context.entry ->
range:Ide_api_types.range ->
resolve_title:string ->
use_snippet_edits:bool ->
Lsp.CodeActionResolve.result |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/code_actions_cli_lib/code_actions_cli_lib.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module CodeAction = Lsp.CodeAction
let apply_patches_to_string old_content (patches : ServerRenameTypes.patch list)
: string =
let buf = Buffer.create (String.length old_content) in
let patch_list =
List.sort ~compare:ServerRenameTypes.compare_result patches
in
ServerRenameTypes.write_patches_to_buffer buf old_content patch_list;
Buffer.contents buf
let lsp_range_to_pos ~source_text path range =
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
(* Correct for inconsistencies in 0-vs-1-indexing elsewhere.
TODO(T154897774): fix off-by-ones in conversion functions and remove this adaptor
*)
let range =
Lsp.
{
start =
{ line = range.start.line + 1; character = range.start.character + 1 };
end_ =
{ line = range.end_.line + 1; character = range.end_.character + 1 };
}
in
Lsp_helpers.lsp_range_to_pos ~line_to_offset path range |> Pos.to_absolute
let patched_text_of_command_or_action ~source_text path = function
| CodeAction.Action
CodeAction.{ action = EditOnly Lsp.WorkspaceEdit.{ changes }; _ } ->
let to_patch Lsp.TextEdit.{ range; newText = text } =
let pos = lsp_range_to_pos ~source_text path range in
ServerRenameTypes.Replace ServerRenameTypes.{ pos; text }
in
let patches = SMap.values changes |> List.concat |> List.map ~f:to_patch in
let source_text = Sys_utils.cat @@ Relative_path.to_absolute path in
let rewritten_contents = apply_patches_to_string source_text patches in
Some rewritten_contents
| CodeAction.Action _
| CodeAction.Command _ ->
None
let run_exn ctx entry range ~title_prefix ~use_snippet_edits =
let commands_or_actions =
Server_code_actions_services.go ~ctx ~entry ~range
in
if List.is_empty commands_or_actions then
Printf.printf "No commands or actions found\n"
else begin
let description_triples =
commands_or_actions
|> List.map ~f:(function
| CodeAction.Action CodeAction.{ title; kind; _ } ->
let kind_str =
Printf.sprintf "CodeActionKind: %s"
@@ Lsp.CodeActionKind.string_of_kind kind
in
let is_selected = String.is_prefix ~prefix:title_prefix title in
(title, kind_str, is_selected)
| CodeAction.Command Lsp.Command.{ title; _ } ->
let is_selected = String.is_prefix ~prefix:title_prefix title in
(title, "Command", is_selected))
in
let separator = "\n------------------------------------------\n" in
Printf.printf "Code actions available:%s" separator;
description_triples
|> List.iter ~f:(fun (title, kind_str, is_selected) ->
let selected_str =
if is_selected then
" SELECTED"
else
""
in
Printf.printf "%s (%s)%s\n" title kind_str selected_str);
let selected_titles =
description_triples
|> List.filter_map ~f:(fun (title, _, is_selected) ->
Option.some_if is_selected title)
in
match selected_titles with
| [selected_title] ->
let resolved =
Server_code_actions_services.resolve
~ctx
~entry
~range
~resolve_title:selected_title
~use_snippet_edits
|> Result.map_error ~f:(fun e ->
Hh_json.json_to_string ~sort_keys:true ~pretty:true
@@ Lsp_fmt.print_error e)
|> Result.ok_or_failwith
in
let hermeticize_paths =
Str.global_replace (Str.regexp "\".+?.php\"") "\"FILE.php\""
in
let source_text =
entry.Provider_context.source_text |> Option.value_exn
in
let text_of_selected =
let patched_opt =
patched_text_of_command_or_action
~source_text
entry.Provider_context.path
resolved
in
match patched_opt with
| Some patched ->
Printf.sprintf "\nApplied edit for code action:%s%s" separator patched
| None -> "\nThe command_or_action cannot be converted into patches.\n"
in
Printf.printf "\nJSON for selected code action:%s" separator;
resolved
|> Result.return
|> Lsp_fmt.print_codeActionResolveResult
|> Hh_json.json_to_string ~sort_keys:true ~pretty:true
|> hermeticize_paths
|> Printf.printf "%s\n";
Printf.printf "%s\n" text_of_selected
| _ :: _ ->
Printf.printf
"\nMultiple code action titles match prefix: %s\n"
title_prefix
| [] ->
Printf.printf "\nNo code action titles match prefix: %s\n" title_prefix
end
let run ctx entry range ~title_prefix ~use_snippet_edits =
match run_exn ctx entry range ~title_prefix ~use_snippet_edits with
| exception exn -> print_endline @@ Exn.to_string exn
| () -> () |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/code_actions_cli_lib/code_actions_cli_lib.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
- Enables previewing code actions from the command line sans language server.
- Currently used only by hh_single_type_check
*)
val run :
Provider_context.t ->
Provider_context.entry ->
Ide_api_types.range ->
title_prefix:string ->
use_snippet_edits:bool ->
(* Corresponds experimental snippetTextEdit LSP client capability.*)
unit |
hhvm/hphp/hack/src/server/server_code_actions_services/code_actions_cli_lib/dune | (library
(name code_actions_cli_lib)
(wrapped true)
(libraries
full_fidelity
lsp
pos
server_code_actions_services
utils_core)
(preprocess
(pps lwt_ppx ppx_deriving.std))) |
|
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/code_action_types/code_action_types.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module Refactor = struct
type t = {
title: string;
edit: Lsp.WorkspaceEdit.t Lazy.t;
}
type find =
entry:Provider_context.entry ->
range:Lsp.range ->
Provider_context.t ->
t list
end
module Quickfix = struct
type t = {
title: string;
edit: Lsp.WorkspaceEdit.t Lazy.t;
}
end |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/code_action_types/code_action_types.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Internal representation of code actions for refactoring *)
module Refactor : sig
type t = {
title: string;
edit: Lsp.WorkspaceEdit.t Lazy.t;
}
type find =
entry:Provider_context.entry ->
range:Lsp.range ->
Provider_context.t ->
t list
end
(** Internal representation of code actions for quickfixes.
* Note that we do not include diagnostics.
* We can tell LSP which error this fixed, but we'd have to
* recompute the diagnostic from the error and there's no clear benefit *)
module Quickfix : sig
type t = {
title: string;
edit: Lsp.WorkspaceEdit.t Lazy.t;
}
end |
hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/dune | (library
(name extract_classish)
(wrapped true)
(libraries
annotated_ast
ast
code_action_types
full_fidelity
lsp
pos
provider_context
tast_env
tast_provider
utils_core)) |
|
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let find ~entry ~(range : Lsp.range) ctx =
if Lsp_helpers.lsp_range_is_selection range then
let source_text = Ast_provider.compute_source_text ~entry in
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
let path = entry.Provider_context.path in
let selection = Lsp_helpers.lsp_range_to_pos ~line_to_offset path range in
Extract_classish_find_candidate.find_candidate ~selection entry ctx
|> Option.map
~f:(Extract_classish_to_refactors.to_refactors source_text path)
|> Option.value ~default:[]
else
[] |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Provide refactors for when the user selects class members
such as "Extract method" *)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish_find_candidate.ml | open Hh_prelude
module T = Extract_classish_types
let class_kind_supports_extraction =
Ast_defs.(
function
| Cclass Concrete -> true
| Cclass Abstract -> false (* could handle with more logic *)
| Cinterface
| Ctrait
| Cenum
| Cenum_class _ ->
false)
let find_candidate ~(selection : Pos.t) (entry : Provider_context.entry) ctx :
T.candidate option =
let { Tast_provider.Compute_tast.tast; _ } =
Tast_provider.compute_tast_quarantined ~ctx ~entry
in
List.find_map
tast.Tast_with_dynamic.under_normal_assumptions
~f:
Aast_defs.(
function
| Class class_
when Pos.contains class_.c_span selection
&& class_kind_supports_extraction class_.c_kind ->
let selected_methods =
class_.c_methods
|> List.filter ~f:(fun meth -> Pos.contains selection meth.m_span)
in
if List.is_empty selected_methods then
None
else
Some T.{ class_; selected_methods }
| _ -> None) |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish_find_candidate.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find_candidate :
selection:Pos.t ->
Provider_context.entry ->
Provider_context.t ->
Extract_classish_types.candidate option |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish_to_refactors.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module PositionedTree =
Full_fidelity_syntax_tree.WithSyntax (Full_fidelity_positioned_syntax)
module T = Extract_classish_types
let placeholder_name = "Placeholder_"
let interface_body_of_methods source_text T.{ selected_methods; _ } : string =
let open Aast_defs in
let abstractify_one meth =
let stmts = meth.m_body.fb_ast in
let remove_async_modifier : string -> string =
match meth.m_fun_kind with
| Ast_defs.FSync
| Ast_defs.FGenerator ->
Fn.id
| Ast_defs.FAsync
| Ast_defs.FAsyncGenerator ->
String.substr_replace_first ~pos:0 ~pattern:"async " ~with_:""
in
match List.hd stmts with
| Some (stmt_pos, _) when not (Pos.equal stmt_pos Pos.none) ->
let body_until_first_statement_length =
Pos.start_offset stmt_pos - Pos.start_offset meth.m_span
in
Full_fidelity_source_text.sub_of_pos
source_text
~length:body_until_first_statement_length
meth.m_span
|> String.rstrip ~drop:(fun ch ->
Char.is_whitespace ch || Char.equal ch '{')
|> fun x -> x ^ ";" |> remove_async_modifier
| Some _
| None ->
Full_fidelity_source_text.sub_of_pos source_text meth.m_span
|> String.rstrip ~drop:(fun ch ->
Char.is_whitespace ch || Char.equal ch '}')
|> String.rstrip ~drop:(fun ch ->
Char.is_whitespace ch || Char.equal ch '{')
|> fun x -> x ^ ";" |> remove_async_modifier
in
selected_methods |> List.map ~f:abstractify_one |> String.concat ~sep:"\n"
let format_classish path ~(body : string) : string =
let classish = Printf.sprintf "interface %s {\n%s\n}" placeholder_name body in
let prefixed = "<?hh\n" ^ classish in
let strip_prefix s =
s
|> String.split_lines
|> (fun lines -> List.drop lines 1)
|> String.concat ~sep:"\n"
in
prefixed
|> Full_fidelity_source_text.make path
|> PositionedTree.make
|> Libhackfmt.format_tree
|> strip_prefix
|> fun x -> x ^ "\n\n"
(** Create text edit for "interface Placeholder_ { .... }" *)
let extracted_classish_text_edit source_text path candidate : Lsp.TextEdit.t =
let range_of_extracted =
Pos.shrink_to_start candidate.T.class_.Aast_defs.c_span
|> Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal
in
let body = interface_body_of_methods source_text candidate in
let text = format_classish path ~body in
Lsp.TextEdit.{ range = range_of_extracted; newText = text }
(** Generate text edit like: "extends Placeholder_" *)
let update_implements_text_edit class_ : Lsp.TextEdit.t =
match List.last class_.Aast.c_implements with
| Some (pos, _) ->
let range =
pos
|> Pos.shrink_to_end
|> Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal
in
let text = Printf.sprintf ", %s" placeholder_name in
Lsp.TextEdit.{ range; newText = text }
| None ->
let range =
let extends_pos_opt =
class_.Aast.c_extends |> List.hd |> Option.map ~f:fst
in
let c_name_pos = class_.Aast.c_name |> fst in
extends_pos_opt
|> Option.value ~default:c_name_pos
|> Pos.shrink_to_end
|> Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal
in
let text = Printf.sprintf "\n implements %s" placeholder_name in
Lsp.TextEdit.{ range; newText = text }
let edit_of_candidate source_text path candidate : Lsp.WorkspaceEdit.t =
let edits =
let extracted_edit =
extracted_classish_text_edit source_text path candidate
in
let reference_edit = update_implements_text_edit candidate.T.class_ in
[reference_edit; extracted_edit]
in
let changes = SMap.singleton (Relative_path.to_absolute path) edits in
Lsp.WorkspaceEdit.{ changes }
let to_refactor source_text path candidate : Code_action_types.Refactor.t =
let edit = lazy (edit_of_candidate source_text path candidate) in
Code_action_types.Refactor.{ title = "Extract interface"; edit }
let to_refactors (source_text : Full_fidelity_source_text.t) path candidate :
Code_action_types.Refactor.t list =
[to_refactor source_text path candidate] |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish_to_refactors.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val to_refactors :
Full_fidelity_source_text.t ->
Relative_path.t ->
Extract_classish_types.candidate ->
Code_action_types.Refactor.t list |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish_types.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type candidate = {
class_: Tast.class_;
selected_methods: Tast.method_ list;
} |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_classish/extract_classish_types.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type candidate = {
class_: Tast.class_;
selected_methods: Tast.method_ list;
(** The user triggers the refactor by selecting
a range within a class body. We ignore any
properties, constants, attributes, etc. in this range.
*)
} |
hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/dune | (library
(name extract_method)
(wrapped true)
(libraries
annotated_ast
ast
extract_method_plugins
extract_method_types
full_fidelity
lsp
pos
provider_context
code_action_types
tast_env
tast_provider
utils_core)) |
|
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let find ~entry ~(range : Lsp.range) ctx : Code_action_types.Refactor.t list =
if Lsp_helpers.lsp_range_is_selection range then
let source_text = Ast_provider.compute_source_text ~entry in
let path = entry.Provider_context.path in
let selection =
let line_to_offset line =
Full_fidelity_source_text.position_to_offset source_text (line, 0)
in
Lsp_helpers.lsp_range_to_pos ~line_to_offset path range
in
match Extract_method_traverse.find_candidate ~selection ~entry ctx with
| Some candidate ->
let refactor =
Extract_method_to_refactor.of_candidate ~source_text ~path candidate
in
let refactors_from_plugins : Code_action_types.Refactor.t list =
Extract_method_plugins.find ~selection ~entry ctx candidate
in
refactors_from_plugins @ [refactor]
| None -> []
else
[] |
OCaml Interface | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val find : Code_action_types.Refactor.find |
OCaml | hhvm/hphp/hack/src/server/server_code_actions_services/extract_method/extract_method_to_refactor.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module T = Extract_method_types
module SyntaxTree =
Full_fidelity_syntax_tree.WithSyntax (Full_fidelity_positioned_syntax)
let tree_from_string s =
let source_text = Full_fidelity_source_text.make Relative_path.default s in
let env = Full_fidelity_parser_env.make ~mode:FileInfo.Mstrict () in
let tree = SyntaxTree.make ~env source_text in
if List.is_empty (SyntaxTree.all_errors tree) then
Some tree
else
None
let hackfmt src =
let prefix = "<?hh\n" in
match tree_from_string (prefix ^ src) with
| Some tree ->
tree |> Libhackfmt.format_tree |> String.chop_prefix_if_exists ~prefix
| None -> src
let indent ~(indent_amount : int) (s : string) : string =
let indentation = String.make indent_amount ' ' in
s
|> String.split_lines
|> List.map ~f:(fun line -> indentation ^ line)
|> String.concat ~sep:"\n"
let return_type_string_of_candidate
~(return : (string * T.ty_string) list)
T.{ selection_kind; is_async; iterator_kind; _ } =
let wrap_return_type =
match (is_async, iterator_kind) with
| (true, Some T.Iterator) -> Fn.const "AsyncIterator<_, _, _>"
| (true, Some T.KeyedIterator) -> Fn.const "AsyncKeyedIterator<_, _, _>"
| (false, Some T.Iterator) -> Fn.const "Iterator<_>"
| (false, Some T.KeyedIterator) -> Fn.const "KeyedIterator<_>"
| (true, None) -> Format.sprintf "Awaitable<%s>"
| (false, None) -> Fn.id
in
match selection_kind with
| T.SelectionKindExpression (T.Ty return_type_string) ->
wrap_return_type return_type_string
| T.SelectionKindStatement ->
wrap_return_type
@@
(match return with
| [] -> "void"
| [(_, T.Ty s)] -> s
| _ ->
return
|> List.map ~f:(fun (_, T.Ty s) -> s)
|> String.concat ~sep:", "
|> Format.sprintf "(%s)")
let body_string_of_candidate
~source_text
~(return : (string * T.ty_string) list)
T.{ selection_kind; pos; iterator_kind; method_pos; _ } =
let raw_body_string =
let (first_line, first_col) = Pos.line_column pos in
let exp_offset =
Full_fidelity_source_text.position_to_offset
source_text
(first_line, first_col + 1)
in
Full_fidelity_source_text.sub source_text exp_offset (Pos.length pos)
in
match selection_kind with
| T.SelectionKindExpression _ -> Format.sprintf "return %s;" raw_body_string
| T.SelectionKindStatement ->
if Option.is_some iterator_kind then
raw_body_string
else
let method_indent_amount = snd @@ Pos.line_column method_pos in
let format_as_return : string -> string =
let whitespace = String.make (2 * method_indent_amount) ' ' in
Format.sprintf "\n%sreturn %s;" whitespace
in
let return_string =
match return with
| [] -> ""
| [(var_name, _)] -> format_as_return var_name
| _ ->
return
|> List.map ~f:fst
|> String.concat ~sep:", "
|> Format.sprintf "tuple(%s)"
|> format_as_return
in
raw_body_string ^ return_string
let method_string_of_candidate
~source_text
~(params : (string * T.ty_string) list)
~(return : (string * T.ty_string) list)
~(snippet : string)
(T.{ method_is_static; is_async; method_pos; _ } as candidate) =
let return_type_string = return_type_string_of_candidate ~return candidate in
let body_string = body_string_of_candidate ~source_text ~return candidate in
let add_modifiers : string -> string =
let static_string =
if method_is_static then
(*
The extracted function is static iff the function we are extracting from is static.
We could "Principle of Least Privilege" and default to `static` if `this` isn't used *but*
that would make things harder to mock and some people like mocking.
*)
"static "
else
""
in
let function_kind_string =
if is_async then
"async "
else
""
in
Format.sprintf "private %s%s%s" static_string function_kind_string
in
let params_string =
params
|> List.map ~f:(fun (name, T.Ty shown_ty) ->
Format.sprintf "%s %s" shown_ty name)
|> String.concat ~sep:", "
in
(* we format as a function before adding modifiers, since a function is hackfmt-able (a valid top-level form) *)
let placeholder_to_replace_with_snippet = "the_function_name" in
let raw_function_string =
Format.sprintf
"function %s(%s): %s {\n%s\n}"
placeholder_to_replace_with_snippet
params_string
return_type_string
body_string
in
let indent_amount = snd @@ Pos.line_column method_pos in
let add_suffix s = s ^ "\n\n" in
raw_function_string
|> hackfmt
|> String.substr_replace_first
~pattern:placeholder_to_replace_with_snippet
~with_:snippet
|> add_modifiers
|> indent ~indent_amount
|> add_suffix
let method_call_string_of_candidate
~(params : (string * T.ty_string) list)
~(return : (string * T.ty_string) list)
~(snippet : string)
T.
{
method_is_static;
selection_kind;
is_async;
iterator_kind;
pos;
method_pos;
_;
} =
let args_string = params |> List.map ~f:fst |> String.concat ~sep:", " in
let receiver_string =
if method_is_static then
"self::"
else
"$this->"
in
let call_expr =
Format.sprintf "%s%s(%s)" receiver_string snippet args_string
in
match iterator_kind with
| None ->
(* examples:
- `foo($arg1)`
- `await foo($arg1, $arg2)`
*)
let call_expr =
if is_async then
Format.sprintf "await %s" call_expr
else
call_expr
in
(match selection_kind with
| T.SelectionKindExpression _ -> call_expr
| T.SelectionKindStatement ->
let fmt_assignment lhs_string =
Format.sprintf "%s = %s;" lhs_string call_expr
in
(match return with
| [] -> call_expr ^ ";"
| [(var_name, _)] -> fmt_assignment var_name
| _ ->
return
|> List.map ~f:fst
|> String.concat ~sep:", "
|> Format.sprintf "list(%s)"
|> fmt_assignment))
| Some iterator_kind ->
(* example:
foreach(self::foo() as $value__) {
}
*)
let await_string =
if is_async then
"await "
else
""
in
let as_string =
match iterator_kind with
| T.Iterator -> "$value__"
| T.KeyedIterator -> "$key__ => $value__"
in
let comment_and_whitespace =
(* generate comments like: "/* TODO: assign to $x, $y */"
TODO(T152359779): do more work for the user to handle assignments
*)
let indent_amount = snd @@ Pos.line_column method_pos in
let call_site_indent_amount = snd @@ Pos.line_column pos in
let outer_indent = String.make call_site_indent_amount ' ' in
let inner_indent =
String.make (call_site_indent_amount + indent_amount) ' '
in
let of_var_name_string var_names_string =
Format.sprintf
"\n%s/* TODO: assign to %s */\n%s\n%s"
inner_indent
var_names_string
inner_indent
outer_indent
in
match return with
| [] -> Format.sprintf "\n%s\n%s" inner_indent outer_indent
| [(var_name, _)] -> of_var_name_string var_name
| _ ->
return
|> List.map ~f:fst
|> String.concat ~sep:", "
|> of_var_name_string
in
Format.sprintf
"foreach (%s %sas %s) {%s}"
call_expr
await_string
as_string
comment_and_whitespace
let edit_of_candidate
~source_text
~path
(T.{ method_pos; params; return; pos; placeholder_name; _ } as candidate) :
Lsp.WorkspaceEdit.t =
let type_assoc_list_of map =
map
|> String.Map.to_alist ~key_order:`Increasing
|> List.dedup_and_sort ~compare:(fun (s1, _) (s2, _) ->
String.compare s1 s2)
in
let params = type_assoc_list_of params in
let return = type_assoc_list_of return in
let snippet = Format.sprintf "${0:%s}" placeholder_name in
let change_add_call =
let call_string =
method_call_string_of_candidate ~params ~return ~snippet candidate
in
{
Lsp.TextEdit.range =
Lsp_helpers.hack_pos_to_lsp_range ~equal:Relative_path.equal pos;
newText = call_string;
}
in
let change_add_method =
let line = (fst @@ Pos.line_column method_pos) - 1 in
let character = 0 in
let method_string =
method_string_of_candidate ~source_text ~params ~return ~snippet candidate
in
Lsp.
{
Lsp.TextEdit.range =
{ start = { line; character }; end_ = { line; character } };
newText = method_string;
}
in
let changes =
SMap.singleton
(Relative_path.to_absolute path)
[change_add_method; change_add_call]
in
Lsp.WorkspaceEdit.{ changes }
let of_candidate ~source_text ~path candidate =
let edit = lazy (edit_of_candidate ~source_text ~path candidate) in
Code_action_types.Refactor.{ title = "Extract into method"; edit } |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.