language
stringlengths 0
24
| filename
stringlengths 9
214
| code
stringlengths 99
9.93M
|
---|---|---|
OCaml Interface | hhvm/hphp/hack/src/utils/core/prim_defs.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
**
*
* "Primitive" definitions; fighting the dependency web, this module is a leaf
* on the dependency tree. It may only depend on external libraries and not on
* a single module inside the repository.
*
*)
type comment =
| CmtLine of string
| CmtBlock of string
[@@deriving eq, show]
val is_line_comment : comment -> bool
val string_of_comment : comment -> string |
OCaml | hhvm/hphp/hack/src/utils/core/random_id.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
let initialized = ref false
(* Do not use / in random ids as they appear in filenames. *)
let alphanumeric_alphabet =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
let short_string_with_alphabet alphabet =
(* If we haven't seeded random then do it now *)
if not !initialized then (
initialized := true;
Random.self_init ()
);
let r = ref ((Random.bits () lsl 30) lor Random.bits ()) in
let cs = ref [] in
while !r > 0 do
let c = alphabet.[!r mod String.length alphabet] in
cs := String.make 1 c :: !cs;
r := !r lsr 6
done;
String.concat "" !cs
let short_string () = short_string_with_alphabet alphanumeric_alphabet |
OCaml Interface | hhvm/hphp/hack/src/utils/core/random_id.mli | (*
* Copyright (c) 2019, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Generates a short string with alphanumeric alphabet *)
val short_string : unit -> string |
OCaml | hhvm/hphp/hack/src/utils/core/stats.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* Not all stats are worth logging for every user. Things like the initial heap
* size are pretty deterministic if you know the input (i.e. the files being
* checked). In fact, it's *only* useful information if you know the input.
* This file is for storing these types of stats: Things that would be useful
* for a benchmark script to know, so it can say "for these inputs, under these
* conditions, here's how hh_server behaves".
*)
type t = {
mutable init_parsing_heap_size: int;
mutable init_heap_size: int;
mutable max_heap_size: int;
gc_stat: Gc.stat;
}
let stats : t =
{
init_parsing_heap_size = 0;
init_heap_size = 0;
max_heap_size = 0;
gc_stat = Gc.quick_stat ();
}
let get_stats () = { stats with gc_stat = Gc.quick_stat () }
let update_max_heap_size x = stats.max_heap_size <- max stats.max_heap_size x
let to_json stats =
Hh_json.JSON_Object
[
("init_parsing_heap_size", Hh_json.int_ stats.init_parsing_heap_size);
("init_shared_heap_size", Hh_json.int_ stats.init_heap_size);
("max_shared_heap_size", Hh_json.int_ stats.max_heap_size);
("master_heap_words", Hh_json.int_ stats.gc_stat.Gc.heap_words);
("master_top_heap_words", Hh_json.int_ stats.gc_stat.Gc.top_heap_words);
] |
OCaml Interface | hhvm/hphp/hack/src/utils/core/stats.mli | (*
* Copyright (c) 2019, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* This `.mli` file was generated automatically. It may include extra
definitions that should not actually be exposed to the caller. If you notice
that this interface file is a poor interface, please take a few minutes to
clean it up manually, and then delete this comment once the interface is in
shape. *)
type t = {
mutable init_parsing_heap_size: int;
mutable init_heap_size: int;
mutable max_heap_size: int;
gc_stat: Gc.stat;
}
val stats : t
val get_stats : unit -> t
val update_max_heap_size : int -> unit
val to_json : t -> Hh_json.json |
OCaml | hhvm/hphp/hack/src/utils/core/telemetry.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type key_value_pair = string * Hh_json.json [@@deriving show]
(** This list is in reverse order (i.e. most recent first) *)
type t = key_value_pair list [@@deriving show]
(* Ignore - we only use the generated `pp_key_value_pair` in deriving `show` for t *)
let _ = show_key_value_pair
let compare (left : key_value_pair) (right : key_value_pair) : int =
String.compare (fst left) (fst right)
let empty = []
let create () : t = empty
let to_json (telemetry : t) : Hh_json.json =
Hh_json.JSON_Object (List.rev telemetry)
let to_string ?(pretty = false) (telemetry : t) : string =
to_json telemetry |> Hh_json.json_to_string ~pretty
let string_
?(truncate : int option) ~(key : string) ~(value : string) (telemetry : t) :
t =
let value =
match truncate with
| None -> value
| Some truncate -> String_utils.truncate truncate value
in
(key, Hh_json.JSON_String value) :: telemetry
let string_opt
?(truncate : int option)
~(key : string)
~(value : string option)
(telemetry : t) : t =
match value with
| None -> (key, Hh_json.JSON_Null) :: telemetry
| Some value -> string_ ?truncate telemetry ~key ~value
let string_list
?(truncate_list : int option)
?(truncate_each_string : int option)
~(key : string)
~(value : string list)
(telemetry : t) : t =
let value =
match truncate_list with
| None -> value
| Some truncate_list -> List.take value truncate_list
in
let value =
match truncate_each_string with
| None -> value
| Some truncate_each_string ->
List.map ~f:(fun s -> String_utils.truncate truncate_each_string s) value
in
let value = List.map ~f:(fun s -> Hh_json.JSON_String s) value in
(key, Hh_json.JSON_Array value) :: telemetry
let string_list_opt
?(truncate_list : int option)
?(truncate_each_string : int option)
~(key : string)
~(value : string list option)
(telemetry : t) : t =
match value with
| None -> (key, Hh_json.JSON_Null) :: telemetry
| Some value ->
string_list ?truncate_list ?truncate_each_string telemetry ~key ~value
let object_list ~(key : string) ~(value : t list) (telemetry : t) : t =
let value = List.map ~f:to_json value in
(key, Hh_json.JSON_Array value) :: telemetry
let bool_ ~(key : string) ~(value : bool) (telemetry : t) : t =
(key, Hh_json.JSON_Bool value) :: telemetry
let int_ ~(key : string) ~(value : int) (telemetry : t) : t =
(key, Hh_json.int_ value) :: telemetry
let int_opt ~(key : string) ~(value : int option) (telemetry : t) : t =
match value with
| None -> (key, Hh_json.JSON_Null) :: telemetry
| Some value -> int_ telemetry ~key ~value
let int_list
?(truncate_list : int option)
~(key : string)
~(value : int list)
(telemetry : t) : t =
let value =
match truncate_list with
| None -> value
| Some truncate_list -> List.take value truncate_list
in
let value = List.map ~f:(fun i -> Hh_json.int_ i) value in
(key, Hh_json.JSON_Array value) :: telemetry
let json_ ~(key : string) ~(value : Hh_json.json) (telemetry : t) : t =
(key, value) :: telemetry
let object_ ~(key : string) ~(value : t) (telemetry : t) : t =
(key, Hh_json.JSON_Object (List.rev value)) :: telemetry
let object_opt ~(key : string) ~(value : t option) (telemetry : t) : t =
match value with
| None -> (key, Hh_json.JSON_Null) :: telemetry
| Some value -> object_ ~key ~value telemetry
let duration
?(key : string = "duration")
~(start_time : float)
?(end_time : float option)
(telemetry : t) : t =
let end_time = Option.value end_time ~default:(Unix.gettimeofday ()) in
let seconds = end_time -. start_time in
let ms = int_of_float (1000.0 *. seconds) in
(key, Hh_json.int_ ms) :: telemetry
let float_ ~(key : string) ~(value : float) (telemetry : t) : t =
(key, Hh_json.float_ value) :: telemetry
let float_opt ~(key : string) ~(value : float option) (telemetry : t) : t =
match value with
| None -> (key, Hh_json.JSON_Null) :: telemetry
| Some value -> float_ telemetry ~key ~value
let error ~(stack : string option) (e : string) : key_value_pair =
let vals = [("message", Hh_json.JSON_String e)] in
let vals =
match stack with
| None -> vals
| Some stack -> ("stack", Hh_json.JSON_String stack) :: vals
in
("error", Hh_json.JSON_Object vals)
let exception_ (e : Exception.t) : key_value_pair =
error
~stack:(Some (Exception.get_backtrace_string e))
(Exception.get_ctor_string e)
let error_with_stack ~(stack : string) ~(e : string) (telemetry : t) : t =
let stack = Exception.clean_stack stack in
error ~stack:(Some stack) e :: telemetry
let error ~(e : string) (telemetry : t) : t = error ~stack:None e :: telemetry
let exception_ ~(e : Exception.t) (telemetry : t) : t =
exception_ e :: telemetry
let quick_gc_stat () : t =
let stat = Gc.quick_stat () in
let bytes_per_word = Stdlib.Sys.word_size / 8 in
let bytes_per_wordf = bytes_per_word |> float_of_int in
let open Gc.Stat in
create ()
|> float_ ~key:"minor_bytes" ~value:(stat.minor_words *. bytes_per_wordf)
|> float_ ~key:"promoted_bytes" ~value:(stat.promoted_words *. bytes_per_wordf)
|> float_ ~key:"major_bytes" ~value:(stat.major_words *. bytes_per_wordf)
|> int_ ~key:"minor_collections" ~value:stat.minor_collections
|> int_ ~key:"major_collections" ~value:stat.major_collections
|> int_ ~key:"heap_bytes" ~value:(stat.heap_words * bytes_per_word)
|> int_ ~key:"compactions" ~value:stat.compactions
|> int_ ~key:"top_heap_bytes" ~value:(stat.top_heap_words * bytes_per_word)
let diff ~(all : bool) ?(suffix_keys = true) (telemetry : t) ~(prev : t) : t =
let (prev_suffix, diff_suffix) =
if suffix_keys then
("__prev", "__diff")
else
("", "")
in
let rec diff (telemetry : t) ~(prev : t) : t =
let telemetry = List.sort telemetry ~compare in
let prev = List.sort prev ~compare in
let acc = [] in
diff_already_sorted telemetry ~prev acc
and diff_already_sorted (current : t) ~(prev : t) (acc : t) : t =
match (current, prev, all) with
| ([], [], _) -> acc
| (c :: cs, [], true) ->
acc |> diff_no_prev c |> diff_already_sorted cs ~prev:[]
| (_, [], false) -> acc
| ([], p :: ps, true) ->
acc |> diff_no_current p |> diff_already_sorted [] ~prev:ps
| ([], _, false) -> acc
| (c :: cs, p :: ps, true) when compare c p < 0 ->
acc |> diff_no_prev c |> diff_already_sorted cs ~prev:(p :: ps)
| (c :: cs, p :: ps, false) when compare c p > 0 ->
acc |> diff_no_current p |> diff_already_sorted (c :: cs) ~prev:ps
| (c :: cs, p :: ps, _) ->
acc |> diff_both c p |> diff_already_sorted cs ~prev:ps
and diff_no_prev ((key, val_c) : key_value_pair) (acc : t) : t =
(key, val_c) :: (key ^ "__prev", Hh_json.JSON_Null) :: acc
and diff_no_current ((key, val_p) : key_value_pair) (acc : t) : t =
let open Hh_json in
match val_p with
| JSON_Object elems ->
let elems =
elems |> List.fold ~init:[] ~f:(fun acc e -> diff_no_current e acc)
in
(key, JSON_Null) :: (key ^ prev_suffix, JSON_Object elems) :: acc
| _ -> (key, Hh_json.JSON_Null) :: (key ^ prev_suffix, val_p) :: acc
and acc_if b elem acc =
if b then
elem :: acc
else
acc
and diff_both
((key, val_c) : key_value_pair) ((_key, val_p) : key_value_pair) (acc : t)
: t =
let open Hh_json in
match (val_c, val_p) with
| (JSON_Object elems_c, JSON_Object elems_p) ->
let elems = diff elems_c ~prev:elems_p in
acc_if
(all || not (List.is_empty elems))
(key, JSON_Object (diff elems_c ~prev:elems_p))
acc
| (JSON_Object _, _)
| (_, JSON_Object _)
| (JSON_Array _, _)
| (_, JSON_Array _) ->
acc_if all (key, val_c) acc
| (JSON_Bool val_c, JSON_Bool val_p) when Bool.equal val_c val_p ->
acc_if all (key, JSON_Bool val_c) acc
| (JSON_String val_c, JSON_String val_p) when String.equal val_c val_p ->
acc_if all (key, JSON_String val_c) acc
| (JSON_Number val_c, JSON_Number val_p) when String.equal val_c val_p ->
acc_if all (key, JSON_Number val_c) acc
| (JSON_Null, JSON_Null) -> acc_if all (key, JSON_Null) acc
| (JSON_Number c, JSON_Number p) -> begin
(* JSON_Numbers are strings - maybe ints, maybe floats, maybe we
can't parse them or they're outside ocaml maximum range *)
try
let (c, p) = (int_of_string c, int_of_string p) in
(key ^ diff_suffix, int_ (c - p)) :: acc_if all (key, int_ c) acc
with
| _ -> begin
try
let (c, p) = (float_of_string c, float_of_string p) in
(key ^ diff_suffix, float_ (c -. p)) :: acc_if all (key, float_ c) acc
with
| _ -> (key, JSON_Number c) :: (key ^ prev_suffix, JSON_Number p) :: acc
end
end
| (_, _) -> (key, val_c) :: (key ^ prev_suffix, val_p) :: acc
in
diff telemetry ~prev
let merge (telemetry1 : t) (telemetry2 : t) : t = telemetry2 @ telemetry1
let rec add (telemetry1 : t) (telemetry2 : t) : t =
let telemetry1 = List.sort telemetry1 ~compare in
let telemetry2 = List.sort telemetry2 ~compare in
add_already_sorted telemetry1 telemetry2 []
and add_already_sorted (telemetry1 : t) (telemetry2 : t) (acc : t) : t =
match (telemetry1, telemetry2) with
| ([], []) -> acc
| (t :: telemetry, [])
| ([], t :: telemetry) ->
let acc = add_single t acc in
add_already_sorted telemetry [] acc
| (t1 :: telemetry1, t2 :: _) when compare t1 t2 < 0 ->
let acc = add_single t1 acc in
add_already_sorted telemetry1 telemetry2 acc
| (t1 :: _, t2 :: telemetry2) when compare t1 t2 > 0 ->
let acc = add_single t2 acc in
add_already_sorted telemetry1 telemetry2 acc
| (t1 :: telemetry1, t2 :: telemetry2) ->
let acc = add_elems t1 t2 acc in
add_already_sorted telemetry1 telemetry2 acc
and add_single ((key, value) : key_value_pair) (acc : t) : t =
let open Hh_json in
match value with
| JSON_Number _ -> (key, value) :: acc
| JSON_Object elems ->
let elems = add elems [] in
if not (List.is_empty elems) then
(key, JSON_Object elems) :: acc
else
acc
| JSON_Array _
| JSON_Bool _
| JSON_Null
| JSON_String _ ->
acc
and add_elems
((key, val1) : key_value_pair) ((_key, val2) : key_value_pair) (acc : t) : t
=
let open Hh_json in
match (val1, val2) with
| (JSON_Number n1, JSON_Number n2) ->
(try
let n1 = int_of_string n1 in
let n2 = int_of_string n2 in
(key, int_ (n1 + n2)) :: acc
with
| _ ->
let n1 = float_of_string n1 in
let n2 = float_of_string n2 in
(key, float_ (n1 +. n2)) :: acc)
| (JSON_Object elems1, JSON_Object elems2) ->
let elems = add elems1 elems2 in
if not @@ List.is_empty elems then
(key, JSON_Object elems) :: acc
else
acc
| ( ( JSON_Number _ | JSON_Object _ | JSON_Array _ | JSON_Bool _ | JSON_Null
| JSON_String _ ),
_ ) ->
acc |
OCaml Interface | hhvm/hphp/hack/src/utils/core/telemetry.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type t [@@deriving show]
val create : unit -> t
val to_string : ?pretty:bool -> t -> string
val to_json : t -> Hh_json.json
(** `diff ~all current ~prev` is for when `current` and `prev` have the same structure.
It produces a hybrid telemetry object where, element by element, if they're the same
then we only see the current element, but if they're different then we see both.
(If you pass ~all:true then it hides elements that have remained the same.)
It works with nested telemetry objects. In places where the structure differs,
only `current` is kept. *)
val diff : all:bool -> ?suffix_keys:bool -> t -> prev:t -> t
(** [add t1 t2] does a key-by-key numeric add, for instance [add {a->1,b->1} {a->3}] will produce [{a->4,b->1}].
It retains only numerics (int, float) and hierarchical objects that contain numerics.
It doesn't retain int lists. *)
val add : t -> t -> t
(** [merge t1 t2] appends two telemetries, for instance [merge {a->1,b->true} {c->2}]
will produce [{a->1,b->true,c->2}]. No matching is done: if an item with the same [key]
appears in both [t1] and [t2] then it will be listed twice in the output, similar
to if you'd done [t |> int_ ~key ~value |> int_ ~key ~value] twice using the same
key. (so don't do it!) *)
val merge : t -> t -> t
val string_ : ?truncate:int -> key:string -> value:string -> t -> t
val string_opt : ?truncate:int -> key:string -> value:string option -> t -> t
val string_list :
?truncate_list:int ->
?truncate_each_string:int ->
key:string ->
value:string list ->
t ->
t
val string_list_opt :
?truncate_list:int ->
?truncate_each_string:int ->
key:string ->
value:string list option ->
t ->
t
val object_list : key:string -> value:t list -> t -> t
val bool_ : key:string -> value:bool -> t -> t
val int_ : key:string -> value:int -> t -> t
val int_opt : key:string -> value:int option -> t -> t
val int_list : ?truncate_list:int -> key:string -> value:int list -> t -> t
val json_ : key:string -> value:Hh_json.json -> t -> t
val object_ : key:string -> value:t -> t -> t
val object_opt : key:string -> value:t option -> t -> t
val duration : ?key:string -> start_time:float -> ?end_time:float -> t -> t
val float_ : key:string -> value:float -> t -> t
val float_opt : key:string -> value:float option -> t -> t
val error : e:string -> t -> t
val error_with_stack : stack:string -> e:string -> t -> t
val exception_ : e:Exception.t -> t -> t
val quick_gc_stat : unit -> t |
OCaml | hhvm/hphp/hack/src/utils/core/utils.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Printexc = Stdlib.Printexc
(** Callstack is simply a typed way to indicate that a string is a callstack *)
type callstack = Callstack of string [@@deriving show]
let () = Random.self_init ~allow_in_tests:true ()
module Map = struct end
let spf = Printf.sprintf
let print_endlinef fmt = Printf.ksprintf Stdio.print_endline fmt
let prerr_endlinef fmt = Printf.ksprintf Stdio.prerr_endline fmt
let pp_large_list ?(pp_sep = None) ?(max_items = 5) pp_elt fmt xs =
let list_len = List.length xs in
if list_len <= max_items then
Format.pp_print_list ?pp_sep pp_elt fmt xs
else
let xs = List.take xs max_items in
Format.fprintf
fmt
"<only showing first %d of %d elems: %a>"
max_items
list_len
(Format.pp_print_list ?pp_sep pp_elt)
xs
let timestring (time : float) : string =
let tm = Unix.localtime time in
Printf.sprintf
"[%d-%02d-%02d %02d:%02d:%02d.%03d]"
(tm.Unix.tm_year + 1900)
(tm.Unix.tm_mon + 1)
tm.Unix.tm_mday
tm.Unix.tm_hour
tm.Unix.tm_min
tm.Unix.tm_sec
(Int.of_float (time *. 1000.) % 1000)
let time (timestring : string) : float =
let to_float year mon tm_mday tm_hour tm_min tm_sec millsec =
let tm =
Unix.
{
tm_sec;
tm_min;
tm_hour;
tm_mday;
tm_mon = mon - 1;
tm_year = year - 1900;
tm_wday = 0;
tm_yday = 0;
tm_isdst = true;
}
in
fst (Unix.mktime tm) +. (Float.of_int millsec *. 0.001)
in
Scanf.sscanf timestring "[%d-%02d-%02d %02d:%02d:%02d.%03d]" to_float
let opt f env = function
| None -> (env, None)
| Some x ->
let (env, x) = f env x in
(env, Some x)
let singleton_if cond x =
if cond then
[x]
else
[]
let rec wfold_left2 f env l1 l2 =
match (l1, l2) with
| ([], _)
| (_, []) ->
env
| (x1 :: rl1, x2 :: rl2) ->
let env = f env x1 x2 in
wfold_left2 f env rl1 rl2
let sl l = List.fold_right l ~f:( ^ ) ~init:""
let maybe f env = function
| None -> ()
| Some x -> f env x
(* Since OCaml usually runs w/o backtraces enabled, the note makes errors
* easier to debug. *)
let unsafe_opt_note note = function
| None -> raise (Invalid_argument note)
| Some x -> x
let unsafe_opt x = unsafe_opt_note "unsafe_opt got None" x
let try_with_stack (f : unit -> 'a) : ('a, Exception.t) result =
try Ok (f ()) with
| exn ->
let e = Exception.wrap exn in
Error e
let set_of_list l = List.fold_right l ~f:SSet.add ~init:SSet.empty
(* \A\B\C -> A\B\C *)
let strip_ns s = String.chop_prefix_if_exists s ~prefix:"\\"
let strip_xhp_ns s = String.chop_prefix_if_exists s ~prefix:":"
let strip_both_ns s = s |> strip_ns |> strip_xhp_ns
(* \HH\C -> C
* \HH\Lib\C -> C
* \A\B\C -> A\B\C
*)
let strip_hh_lib_ns s =
s
|> String.chop_prefix_if_exists ~prefix:"\\HH\\Lib\\"
|> String.chop_prefix_if_exists ~prefix:"\\HH\\"
|> strip_ns
(* A\B\C -> \A\B\C *)
let add_ns s =
if String.is_prefix s ~prefix:"\\" then
s
else
"\\" ^ s
(* A:B:C -> :A:B:C *)
let add_xhp_ns s =
if String.is_prefix s ~prefix:":" then
s
else
":" ^ s
(* \A\B\C -> C *)
let strip_all_ns s =
match String.rindex s '\\' with
| Some pos ->
let base_name_start = pos + 1 in
String.sub s ~pos:base_name_start ~len:(String.length s - base_name_start)
| None -> s
(* "\\A\\B\\C" -> ("\\A\\B\\" * "C") *)
let split_ns_from_name (s : string) : string * string =
match String.rindex s '\\' with
| Some pos ->
let base_name_start = pos + 1 in
let name_part =
String.sub s ~pos:base_name_start ~len:(String.length s - base_name_start)
in
let namespace_part = String.sub s ~pos:0 ~len:base_name_start in
(namespace_part, name_part)
| None -> ("\\", s)
(* Expands a namespace using the namespace map, a list of (string, string) tuples
* Ensures the beginning backslash is present
*
* "Str\\join" -> "\\HH\\Lib\\Str\\join" (when "Str", "HH\\Lib\\Str" is present in map)
* "HH\\Lib\\Str\\Join" -> "\\HH\\Lib\\Str\\join"
* "\\HH\\Lib\\Str\\Join" -> "\\HH\\Lib\\Str\\join"
* "just_plain_func" -> "\\just_plain_func"
*)
let expand_namespace (ns_map : (string * string) list) (s : string) : string =
let (raw_ns, name) = split_ns_from_name s in
(* Might need left backslash *)
let ns = add_ns raw_ns in
let matching_alias =
List.find ns_map ~f:(fun (alias, _) ->
let fixup = add_ns alias ^ "\\" in
String.equal fixup ns)
in
match matching_alias with
| None -> add_ns s
| Some (_, expanded) -> add_ns (expanded ^ "\\" ^ name)
(*****************************************************************************)
(* Same as List.iter2, except that we only iterate as far as the shortest
* of both lists.
*)
(*****************************************************************************)
let rec iter2_shortest f l1 l2 =
match (l1, l2) with
| ([], _)
| (_, []) ->
()
| (x1 :: rl1, x2 :: rl2) ->
f x1 x2;
iter2_shortest f rl1 rl2
let compose f g x = f (g x)
module With_complete_flag = struct
type 'a t = {
is_complete: bool;
value: 'a;
}
end
let try_finally ~f ~(finally : unit -> unit) =
let res =
try f () with
| exn ->
let e = Exception.wrap exn in
finally ();
Exception.reraise e
in
finally ();
res
let with_context
~(enter : unit -> unit) ~(exit : unit -> unit) ~(do_ : unit -> 'a) : 'a =
enter ();
let result =
try do_ () with
| exn ->
let e = Exception.wrap exn in
exit ();
Exception.reraise e
in
exit ();
result
(* We run with exception backtraces turned off for performance reasons. But for
* some kinds of catastrophic exceptions, which we never recover from (so the
* performance doesn't matter) we do want the backtrace. "assert false" is one
* of such conditions.
*)
let assert_false_log_backtrace msg =
Printf.eprintf "assert false with backtrace:\n";
Option.iter msg ~f:(Printf.eprintf "%s\n");
Printf.eprintf
"%s"
(Printexc.raw_backtrace_to_string (Printexc.get_callstack 100));
assert false
(* Returns the largest element in arr strictly less than `bound` *)
let infimum (arr : 'a array) (bound : 'b) (compare : 'a -> 'b -> int) :
int option =
let rec binary_search low high =
if low = high then
Some low
else if low > high then
None
else
let mid = (low + high + 1) / 2 in
let test = arr.(mid) in
if compare test bound < 0 then
binary_search mid high
else
binary_search low (mid - 1)
in
binary_search 0 (Array.length arr - 1)
let unwrap_snd (a, b_opt) =
match b_opt with
| None -> None
| Some b -> Some (a, b) |
OCaml Interface | hhvm/hphp/hack/src/utils/core/utils.mli | (*
* Copyright (c) 2019, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* This `.mli` file was generated automatically. It may include extra
definitions that should not actually be exposed to the caller. If you notice
that this interface file is a poor interface, please take a few minutes to
clean it up manually, and then delete this comment once the interface is in
shape. *)
type callstack = Callstack of string [@@deriving show]
module Map : sig end
val spf : ('a, unit, string) format -> 'a
val print_endlinef : ('a, unit, string, unit) format4 -> 'a
val prerr_endlinef : ('a, unit, string, unit) format4 -> 'a
val pp_large_list :
?pp_sep:(Format.formatter -> unit -> unit) option ->
?max_items:int ->
(Format.formatter -> 'a -> unit) ->
Format.formatter ->
'a list ->
unit
val timestring : float -> string
val time : string -> float
val opt : ('a -> 'b -> 'a * 'c) -> 'a -> 'b option -> 'a * 'c option
val singleton_if : bool -> 'a -> 'a list
val wfold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b list -> 'c list -> 'a
val sl : string list -> string
val maybe : ('a -> 'b -> unit) -> 'a -> 'b option -> unit
val unsafe_opt_note : string -> 'a option -> 'a
val unsafe_opt : 'a option -> 'a
val try_with_stack : (unit -> 'a) -> ('a, Exception.t) result
val set_of_list : SSet.elt list -> SSet.t
(* Strip NS removes only the leading backslash *)
val strip_ns : string -> string
(* Strip XHP removes only the leading colon *)
val strip_xhp_ns : string -> string
(* Strip Both removes either leading backslash and colon, or both *)
val strip_both_ns : string -> string
val strip_hh_lib_ns : string -> string
(* Strip All removes all backslash-based namespaces, but does nothing to XHP *)
val strip_all_ns : string -> string
(** A\B\C -> \A\B\C *)
val add_ns : string -> string
(** A:B:C -> :A:B:C *)
val add_xhp_ns : string -> string
val split_ns_from_name : string -> string * string
val expand_namespace : (string * string) list -> string -> string
val iter2_shortest : ('a -> 'b -> 'c) -> 'a list -> 'b list -> unit
val compose : ('a -> 'b) -> ('c -> 'a) -> 'c -> 'b
module With_complete_flag : sig
type 'a t = {
is_complete: bool;
value: 'a;
}
end
val try_finally : f:(unit -> 'a) -> finally:(unit -> unit) -> 'a
val with_context :
enter:(unit -> unit) -> exit:(unit -> unit) -> do_:(unit -> 'a) -> 'a
val assert_false_log_backtrace : string option -> 'a
val infimum : 'a array -> 'b -> ('a -> 'b -> int) -> int option
val unwrap_snd : 'a * 'b option -> ('a * 'b) option |
Rust | hhvm/hphp/hack/src/utils/core/utils.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::borrow::Cow;
/**
* A\B\C -> \A\B\C
*/
pub fn add_ns(s: &str) -> Cow<'_, str> {
if !s.starts_with('\\') {
let mut new_str = String::with_capacity(1 + s.len());
new_str.push('\\');
new_str.push_str(s);
Cow::Owned(new_str)
} else {
Cow::Borrowed(s)
}
}
/**
* A\B\C -> \A\B\C
*/
pub fn add_ns_bstr(s: &[u8]) -> Cow<'_, [u8]> {
if !s.starts_with(b"\\") {
let mut new_str = Vec::with_capacity(1 + s.len());
new_str.push(b'\\');
new_str.extend(s);
Cow::Owned(new_str)
} else {
Cow::Borrowed(s)
}
}
/**
* \A\B\C -> A\B\C
*/
pub fn strip_ns(s: &str) -> &str {
if s.is_empty() || !s.starts_with('\\') {
s
} else {
&s[1..]
}
}
// A\B\C -> C
pub fn strip_all_ns(s: &str) -> &str {
s.rfind('\\').map_or(s, |pos| &s[pos + 1..])
}
/// \A\B\C -> (\A\B\, C)
/// A -> (\, A)
pub fn split_ns_from_name(s: &str) -> (&str, &str) {
match s.rfind('\\') {
Some(p) => s.split_at(p + 1),
None => ("\\", s),
}
}
#[cfg(test)]
mod utils_tests {
use pretty_assertions::assert_eq;
#[test]
fn add_ns_test() {
let test_string = "\\MyTestClass";
assert_eq!(super::add_ns(test_string), "\\MyTestClass");
let test_string2 = "MyTestClass";
assert_eq!(super::add_ns(test_string2), "\\MyTestClass");
let test_string3 = "SubNamespace\\MyTestClass";
assert_eq!(super::add_ns(test_string3), "\\SubNamespace\\MyTestClass");
let test_string4 = "\\SubNamespace\\MyTestClass";
assert_eq!(super::add_ns(test_string4), "\\SubNamespace\\MyTestClass");
let test_string5 = "";
assert_eq!(super::add_ns(test_string5), "\\");
}
#[test]
fn strip_ns_test() {
let test_string = "\\MyTestClass";
assert_eq!(super::strip_ns(test_string), "MyTestClass");
let test_string2 = "MyTestClass";
assert_eq!(super::strip_ns(test_string2), "MyTestClass");
let test_string3 = "SubNamespace\\MyTestClass";
assert_eq!(super::strip_ns(test_string3), "SubNamespace\\MyTestClass");
let test_string4 = "\\SubNamespace\\MyTestClass";
assert_eq!(super::strip_ns(test_string4), "SubNamespace\\MyTestClass");
let test_string5 = "";
assert_eq!(super::strip_ns(test_string5), "");
}
#[test]
fn split_ns_from_name_test() {
let f = super::split_ns_from_name;
assert_eq!(f("\\A\\B"), ("\\A\\", "B"));
assert_eq!(f("\\A\\"), ("\\A\\", ""));
assert_eq!(f(""), ("\\", ""));
assert_eq!(f("\\"), ("\\", ""));
assert_eq!(f("A\\B"), ("A\\", "B"));
}
} |
OCaml | hhvm/hphp/hack/src/utils/core/approx_set/approxSet.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module Set_relation = struct
type t =
| Equal
| Subset
| Superset
| Disjoint
| Unknown
end
module type DomainType = sig
type t
type ctx
val relation : t -> ctx:ctx -> t -> Set_relation.t
end
module type S = sig
module Domain : DomainType
type t
val singleton : Domain.t -> t
val union : t -> t -> t
val inter : t -> t -> t
val diff : t -> t -> t
type disjoint =
| Sat
| Unsat of Domain.t * Domain.t
val disjoint : Domain.ctx -> t -> t -> disjoint
end
(* To keep the logic simple we do not perform any simplification during
construction of the set. Instead specific simplification rules are
applied when computing [disjoint] *)
module Make (Domain : DomainType) : S with module Domain := Domain = struct
type t =
| Set of Domain.t
| Union of t * t
| Inter of t * t
| Compl of t
type disjoint =
| Sat
| Unsat of Domain.t * Domain.t
let singleton elt = Set elt
let rec disjoint ctx set1 set2 =
let open Set_relation in
match (set1, set2) with
(* (L ∪ R) disj S if (L disj S) && (R disj S) *)
| (Union (l, r), set) ->
let result =
match disjoint ctx l set with
| Sat -> disjoint ctx r set
| Unsat _ as unsat -> unsat
in
result
(* (L ∩ R) disj S if (L disj S) || (R disj S) *)
| (Inter (l, r), set) ->
let result =
match disjoint ctx l set with
| Sat -> Sat
| Unsat _ -> disjoint ctx r set
in
result
(* !(!A) = A *)
| (Compl (Compl a), b) -> disjoint ctx a b
(* De Morgan's Law: !(A ∪ B) = !A ∩ !B *)
| (Compl (Union (a, b)), set) -> disjoint ctx (Inter (Compl a, Compl b)) set
(* De Morgan's Law: !(A ∩ B) = !A ∪ !B *)
| (Compl (Inter (a, b)), set) -> disjoint ctx (Union (Compl a, Compl b)) set
| (Set elt1, Set elt2) ->
let result =
match Domain.relation ~ctx elt1 elt2 with
| Disjoint -> Sat
| Equal
| Subset
| Superset
| Unknown ->
Unsat (elt1, elt2)
in
result
| (Set a, Compl (Set b)) ->
(* (A disj !B) if A ⊆ B *)
let result =
match Domain.relation ~ctx a b with
| Equal
| Subset ->
Sat
| Superset
| Unknown
| Disjoint ->
Unsat (a, b)
in
result
| (Compl (Set set1), Compl (Set set2)) ->
(* Approximation:
(!A disj !B) iff (A ∪ B) = U && A = !B
where U := Universal Set (Set Containing All Elements in the Domain)
There is no way in our model to determine if (A ∪ B) = U holds
so we are forced to approximate the result. The safest approximation
is to assume the sets are not disjoint *)
Unsat (set1, set2)
| (Compl (Set _), (Union _ | Inter _ | Compl _ | Set _))
| (Set _, (Union _ | Inter _ | Compl _)) ->
disjoint ctx set2 set1
let union (l : t) (r : t) : t = Union (l, r)
let inter (l : t) (r : t) : t = Inter (l, r)
(* A ∖ B = A ∩ !B *)
let diff (a : t) (b : t) : t = inter a (Compl b)
end |
OCaml Interface | hhvm/hphp/hack/src/utils/core/approx_set/approxSet.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** The relationship between atomic elements in the set *)
module Set_relation : sig
type t =
| Equal
| Subset
| Superset
| Disjoint
| Unknown
end
(** Specifies the domain that the set will range over. Individual
members of the domain can be related to one another via the [relation]
function. *)
module type DomainType = sig
(** Type of atomic elements of the domain *)
type t
(** Contextual data that will be provided to [relation] when determining
[set_relation] between atomic elements of the domain *)
type ctx
(** Determines between atomic elements of the domain based on the given context *)
val relation : t -> ctx:ctx -> t -> Set_relation.t
end
(** An abstract representation of a set, designed specifically to determine if two
sets are disjoint. Sets consist of atomic elements ([elt]) that can be joined
using basic set operations like union, intersection and set difference. This is
only an approximate set, because there are ways to construct sets where it isn't
possible to determine definitively whether or not they are disjoint. In those cases
the implementation will default to returning [Unsat]*)
module type S = sig
(** The domain the set ranges over *)
module Domain : DomainType
(** Type of an instance of the approximate set *)
type t
(** Create an approximate set from an atomic element in the domain *)
val singleton : Domain.t -> t
(** Set union *)
val union : t -> t -> t
(** Set intersection *)
val inter : t -> t -> t
(** Set difference. Note that in some cases we cannot determine precisely if
two sets involving [diff] are disjoint. In these cases the result will
be approximated to [Unsat] *)
val diff : t -> t -> t
(** The result of testing two sets for disjointness *)
type disjoint =
| Sat (** The two sets are definitely disjoint *)
| Unsat of Domain.t * Domain.t
(** The two sets are not disjoint because of the relation between
the given pair of [Domain.t]s *)
(** Determines if a pair of sets are disjoint in the given [ctx].
If the sets cannot definitively be proven to be disjoint, will return
[Unsat] *)
val disjoint : Domain.ctx -> t -> t -> disjoint
end
(** Constructs an approximate set representation over the given [Domain] *)
module Make (Domain : DomainType) : S with module Domain := Domain |
OCaml | hhvm/hphp/hack/src/utils/core/bloom_filter/bloomFilter.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module BitSet = struct
type t = bytes
type idx = {
bit_idx: int;
byte_idx: int;
}
let zero = Char.unsafe_chr 0
let create number_of_bits =
if number_of_bits = 0 then
Bytes.empty
else
(* Round up to nearest byte *)
let number_of_bytes = (number_of_bits + 7) / 8 in
Bytes.make number_of_bytes zero
let index i =
{
(* i / 8 *)
byte_idx = Int.shift_right_logical i 3;
(* i % 8 *)
bit_idx = Int.logand i 7;
}
(* test if the ith bit is set in the byte *)
let is_set byte i = byte land (1 lsl i) <> 0
(* sets the ith bit in the byte *)
let set byte i = byte lor (1 lsl i)
(* checks if the ith bit is set in the bitset *)
let mem b i =
let i = index i in
let byte = Bytes.get_uint8 b i.byte_idx in
is_set byte i.bit_idx
let set (b : t) i =
let i = index i in
let byte1 = Bytes.get_uint8 b i.byte_idx in
let byte = set byte1 i.bit_idx in
Bytes.set_uint8 b i.byte_idx byte
let length b = Bytes.length b * 8
end
type t = BitSet.t
type elt = {
h1: int;
h2: int;
h3: int;
h4: int;
h5: int;
h6: int;
h7: int;
}
(* Builds a Bloomfilter that will maintain a false positive rate of ~1%
* for the given capacity
*)
let create ~(capacity : int) : t =
(* The smallest filter we use will be 64 bits. This should achieve a false positive < 1%
* if there are less than 7 items to store. Otherwise we compute roughly how many bits
* we need to maintain ~1% false positive rate for the given capacity
*)
let number_of_bits =
if capacity = 0 then
0
else if capacity < 7 then
64
else
(* see: https://en.wikipedia.org/wiki/Bloom_filter#Optimal_number_of_hash_functions
number of bits = -n * ln error_rate / (ln 2)^2
*)
let cap = float_of_int capacity in
let error_rate = 0.01 in
let n = ~-.cap *. (log error_rate /. (log 2. *. log 2.)) in
int_of_float @@ ceil n
in
BitSet.create number_of_bits
(* The most space efficient bloom filter uses ~7 hashes. Since we will use these same hashes
* to examine potentially several bloom filters we store them in a record so they can be
* reused
*)
let hash (s : string) : elt =
{
h1 = Hashtbl.seeded_hash 1 s;
h2 = Hashtbl.seeded_hash 2 s;
h3 = Hashtbl.seeded_hash 3 s;
h4 = Hashtbl.seeded_hash 4 s;
h5 = Hashtbl.seeded_hash 5 s;
h6 = Hashtbl.seeded_hash 6 s;
h7 = Hashtbl.seeded_hash 7 s;
}
let add (bf : t) (hashes : elt) : unit =
match BitSet.length bf with
| len when len > 0 ->
let set h = BitSet.set bf (h mod len) in
set hashes.h1;
set hashes.h2;
set hashes.h3;
set hashes.h4;
set hashes.h5;
set hashes.h6;
set hashes.h7
| _ -> ()
let mem (bf : t) (hashes : elt) : bool =
match BitSet.length bf with
| len when len > 0 ->
let mem h = BitSet.mem bf (h mod len) in
mem hashes.h1
&& mem hashes.h2
&& mem hashes.h3
&& mem hashes.h4
&& mem hashes.h5
&& mem hashes.h6
&& mem hashes.h7
| _ -> false |
OCaml Interface | hhvm/hphp/hack/src/utils/core/bloom_filter/bloomFilter.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
* A Bloom Filter is a probablistic data structure representing a set.
* Elements can be added to the set and supports membership queries.
* Membership will never produce false negative (saying an element
* is not a member of the set when it is), but does allow false positives
* (saying an element is a member of the set when it is not). The false
* positive rate will increase as members are added so it is important
* to choose the initial capacity to match the number of elements you
* expect to add
**)
type t
type elt
(**
* Creates a Bloom Filter of significant size to answer membership queries
* with ~1% false positive rates if `capacity` or fewer elements are
* added
**)
val create : capacity:int -> t
(**
* Transforms a string to a set of hashes, representing an potential element
* of a Bloom Filter. The hashes are independent of the capacity of the
* Bloom Filter, so the same `elt` can be used for multiple filters
**)
val hash : string -> elt
val add : t -> elt -> unit
val mem : t -> elt -> bool |
hhvm/hphp/hack/src/utils/core/config/dune | (rule
(targets build-timestamp-opt)
(action
(with-stdout-to
build-timestamp-opt
(bash "echo -DHH_BUILD_TIMESTAMP=$(date +%s)ul")))) |
|
OCaml | hhvm/hphp/hack/src/utils/disk/disk.ml | include
(val if Injector_config.use_test_stubbing then
(module TestDisk : Disk_sig.S)
else
(module RealDisk : Disk_sig.S)) |
OCaml | hhvm/hphp/hack/src/utils/disk/disk_sig.ml | module Types = struct
exception NotADirectory of string
exception No_such_file_or_directory of string
exception Rename_target_already_exists of string
exception Rename_target_dir_not_empty of string
end
module type S = sig
include module type of Types
val cat : string -> string
val write_file : file:string -> contents:string -> unit
val file_exists : string -> bool
(* Delete the given path - if it is a directory, delete recurisvely. *)
val rm_dir_tree : string -> unit
val is_directory : string -> bool
val getcwd : unit -> string
val chdir : string -> unit
(* Changes the permissions on a path, which could be a file or a directory.
The integer value is the permissions mode. If `recursive` is true, then
applies the permissions mode to the directory and all of its contents,
recursively.
See the Unix documentation for the related `chmod` command. *)
val chmod : recursive:bool -> string -> int -> unit
val mkdir : string -> int -> unit
(* Equivalent to `mkdir -p` - creates subdirectories recursively if they
don't exist. *)
val mkdir_p : string -> unit
(* Return the paths of all the regular files present in the given directory
and its subdirectories. *)
val readpath : string -> string list
(* Return the names of all files present in the given directory. *)
val readdir : string -> string array
(* Rename from old path to new path. *)
val rename : string -> string -> unit
(* Size in bytes of a file or a directory and its contents, recursively *)
val treesize : string -> int
(* Modified time of a given file *)
val filemtime : string -> float
end |
OCaml | hhvm/hphp/hack/src/utils/disk/realDisk.ml | include Disk_sig.Types
let cat (filename : string) : string =
Counters.count Counters.Category.Disk_cat @@ fun () ->
let ic = open_in_bin filename in
let len =
try in_channel_length ic with
| Sys_error _ -> 0
in
(* in_channel_length returns 0 for non-regular files; try reading it
using a fixed-sized buffer if it appears to be empty.
NOTE: JaneStreet's Core Sys module defines a function is_file which
does a proper check on whether the file exists and is regular. *)
if len > 0 then (
let buf = Buffer.create len in
Buffer.add_channel buf ic len;
close_in ic;
Buffer.contents buf
) else
let len = 1024 in
(* for Buffer, that's the initial size of the internal byte sequence *)
let buf = Buffer.create len in
let bytes = Bytes.create len in
let rec read_bytes () : unit =
try
let n = input ic bytes 0 len in
if n = 0 then
()
else (
Buffer.add_subbytes buf bytes 0 n;
(* 0 is offset *)
read_bytes ()
)
with
| End_of_file -> ()
in
read_bytes ();
close_in ic;
Buffer.contents buf
let is_file_not_exist_error ~file ~err_msg =
let msg = Printf.sprintf "%s: No such file or directory" file in
msg = err_msg
let write_file ~file ~contents =
let chan =
try open_out file with
| Sys_error err_msg when is_file_not_exist_error ~file ~err_msg ->
raise (No_such_file_or_directory file)
in
output_string chan contents;
close_out chan
let rec mkdir_p = function
| "" -> failwith "Unexpected empty directory, should never happen"
| d when not (Sys.file_exists d) ->
mkdir_p (Filename.dirname d);
let old_mask = Unix.umask 0 in
Utils.try_finally
~f:
begin
fun () ->
try Unix.mkdir d 0o777 with
| Unix.Unix_error (Unix.EEXIST, _, _) -> ()
end
~finally:(fun () -> ignore (Unix.umask old_mask))
| d when Sys.is_directory d -> ()
| d -> raise (NotADirectory d)
let rec rm_dir_tree path =
try
let stats = Unix.lstat path in
match stats.Unix.st_kind with
| Unix.S_DIR ->
let contents = Sys.readdir path in
List.iter
(fun name ->
let name = Filename.concat path name in
rm_dir_tree name)
(Array.to_list contents);
Unix.rmdir path
| Unix.S_LNK
| Unix.S_REG
| Unix.S_CHR
| Unix.S_BLK
| Unix.S_FIFO
| Unix.S_SOCK ->
Unix.unlink path
with
(* Path has been deleted out from under us - can ignore it. *)
| Sys_error s when s = Printf.sprintf "%s: No such file or directory" path ->
()
| Unix.Unix_error (Unix.ENOENT, _, _) -> ()
let is_directory x =
try Sys.is_directory x with
| Sys_error _ -> false
let file_exists = Sys.file_exists
let getcwd = Sys.getcwd
let chdir = Sys.chdir
let rec chmod ~(recursive : bool) (path : string) (mode : int) : unit =
let stats = Unix.lstat path in
match stats.Unix.st_kind with
| Unix.S_DIR ->
Unix.chmod path mode;
if recursive then
let contents = Sys.readdir path in
Core.List.iter
~f:
begin
fun name ->
let name = Filename.concat path name in
chmod ~recursive name mode
end
(Array.to_list contents)
| _ -> Unix.chmod path mode
let mkdir = Unix.mkdir
let rec readpath (path : string) : string list =
let open Unix in
let stats = lstat path in
match stats.st_kind with
| S_DIR ->
let contents = Sys.readdir path in
Core.List.fold
~init:[]
~f:
begin
fun acc name ->
let name = Filename.concat path name in
List.rev_append acc (readpath name)
end
(Array.to_list contents)
| S_REG -> [path]
| _ -> []
let readdir = Sys.readdir
let rename old target =
if not (file_exists old) then
raise (No_such_file_or_directory old)
else if not (file_exists (Filename.dirname target)) then
raise (No_such_file_or_directory (Filename.dirname target))
else
try Sys.rename old target with
| Sys_error s when s = "Directory not empty" ->
raise (Rename_target_dir_not_empty target)
let rec treesize path : int =
let open Unix in
let stats = lstat path in
let size = (stat path).st_size in
match stats.st_kind with
| S_DIR ->
let contents = Sys.readdir path in
size
+ Core.List.fold
~init:0
~f:
begin
fun acc name ->
let name = Filename.concat path name in
acc + treesize name
end
(Array.to_list contents)
| S_REG -> size
| _ -> 0
let filemtime file = (Unix.stat file).Unix.st_mtime |
OCaml | hhvm/hphp/hack/src/utils/disk/testDisk.ml | include Disk_sig.Types
module Hashtbl_base = Hashtbl
module Hashtbl = struct
include Hashtbl_base
let empty t = length t = 0
end
type file =
| Actual_file_with_contents of string
| Directory of (string, file) Hashtbl.t
module Helpers = struct
exception Relative_parent_not_supported
exception Is_not_actual_file
exception Cannot_overwrite_existing_directory_with_actual_file
end
(** Directory for "/" *)
let root = Hashtbl.create 10
let mtimes = Hashtbl.create 10
let sizes = Hashtbl.create 10
(** We avoid using Unix.getcwd () in TestDisk because:
* 1) Getting global state from this clean test environment is gross
* 2) Because of 1, CWD should actually be tracked inside this virtual FS.
* 3) Javascript doesn't support Unix.getcwd anyway, so we can't transpile
* As such, we make a fake cwd *)
let cwd = ref "/fake/initial_cwd"
let getcwd () = !cwd
let chdir s = cwd := s
let chmod ~recursive path mode = ignore (recursive, path, mode)
let rec mkdir_p path root =
match Filename.dirname path with
| "." -> mkdir_p (getcwd ()) root
| "/" -> root
| parent ->
let parent = mkdir_p parent root in
if Filename.basename path = "." then
parent
else if Filename.basename path = ".." then
raise Helpers.Relative_parent_not_supported
else (
match Hashtbl.find_opt parent (Filename.basename path) with
| None ->
let dir = Hashtbl.create 10 in
let () = Hashtbl.add parent (Filename.basename path) (Directory dir) in
dir
| Some (Directory table) -> table
| Some (Actual_file_with_contents _) -> raise (NotADirectory path)
)
and get_dir path root =
match Filename.dirname path with
| "." -> get_dir (getcwd ()) root
| "/" -> root
| parent ->
let parent = get_dir parent root in
if Filename.basename path = "." then
parent
else if Filename.basename path = ".." then
raise Helpers.Relative_parent_not_supported
else (
match Hashtbl.find_opt parent (Filename.basename path) with
| None -> raise (No_such_file_or_directory path)
| Some (Directory table) -> table
| Some (Actual_file_with_contents _) -> raise (NotADirectory path)
)
(** Returns file at path (may be an actual file or a directory). *)
and get_file path root =
let parent = get_dir (Filename.dirname path) root in
let basename = Filename.basename path in
if basename = "." then
Directory parent
else
match Hashtbl.find_opt parent basename with
| Some path -> path
| None -> raise (No_such_file_or_directory path)
(** Initialize creation of CWD. *)
let () = ignore (mkdir_p "." root)
let get x =
match get_file x root with
| Actual_file_with_contents contents -> contents
| Directory _ -> raise Helpers.Is_not_actual_file
(** Set the contents "y" for file "x". Has an option to create all parent
* directories automatically. *)
let set ?(create_parent_dirs = true) x y =
let parent =
if create_parent_dirs then
mkdir_p (Filename.dirname x) root
else
get_dir (Filename.dirname x) root
in
match Hashtbl.find_opt parent (Filename.basename x) with
| None ->
let new_file = Actual_file_with_contents y in
Hashtbl.add parent (Filename.basename x) new_file
| Some (Actual_file_with_contents _) ->
let new_file = Actual_file_with_contents y in
Hashtbl.replace parent (Filename.basename x) new_file
| Some (Directory _) ->
raise Helpers.Cannot_overwrite_existing_directory_with_actual_file
let is_directory x =
try
match get_file x root with
| Directory _ -> true
| Actual_file_with_contents _ -> false
with
| No_such_file_or_directory _ -> false
let cat x = Counters.count Counters.Category.Disk_cat @@ fun () -> get x
let file_exists x =
try
match get_file x root with
| Actual_file_with_contents _
| Directory _ ->
true
with
| No_such_file_or_directory _ -> false
let write_file ~file ~contents = set ~create_parent_dirs:false file contents
let mkdir path _perm =
let parent = get_dir (Filename.dirname path) root in
ignore (mkdir_p (Filename.basename path) parent)
let mkdir_p path = ignore (mkdir_p path root)
let rm_dir_tree path =
if path = "/" then
Hashtbl.clear root
else
try
let dir = get_dir (Filename.dirname path) root in
Hashtbl.remove dir (Filename.basename path)
with
| No_such_file_or_directory _ ->
(* File already doesn't exist; ignore. *)
()
let rec readpath (path : string) : string list =
match get_file path root with
| Actual_file_with_contents _ -> [path]
| Directory directory ->
Hashtbl.fold
(fun name _v acc ->
let name = Filename.concat path name in
List.rev_append acc (readpath name))
directory
[]
let readdir (x : string) : string array =
match get_file x root with
| Actual_file_with_contents _ -> raise (NotADirectory x)
| Directory directory ->
let names = Hashtbl.fold (fun k _v acc -> k :: acc) directory [] in
Array.of_list names
let rename old target =
if not (file_exists old) then
raise (No_such_file_or_directory old)
else if not (file_exists (Filename.dirname target)) then
raise (No_such_file_or_directory (Filename.dirname target))
else
let old_parent = get_dir (Filename.dirname old) root in
let old_file = get_file old root in
(* What if the last character in target is a "/"? What to do? *)
let target_parent = get_dir (Filename.dirname target) root in
match
(old_file, Hashtbl.find_opt target_parent (Filename.basename target))
with
| (Directory _, Some (Directory target_files))
when not (Hashtbl.empty target_files) ->
raise (Rename_target_dir_not_empty target)
| (Directory _, Some (Directory _))
| (_, None) ->
(* Rename one directory to the other. *)
Hashtbl.replace target_parent (Filename.basename target) old_file;
Hashtbl.remove old_parent (Filename.basename old)
| (_, _) -> failwith "Not sure what to do here"
let treesize (path : string) : int =
match Hashtbl.find_opt sizes path with
| None -> 0
| Some size -> size
let filemtime (file : string) : float =
match Hashtbl.find_opt mtimes file with
| None -> 0.0
| Some mtime -> mtime |
TOML | hhvm/hphp/hack/src/utils/eq_modulo_pos/Cargo.toml | # @generated by autocargo
[package]
name = "eq_modulo_pos"
version = "0.0.0"
edition = "2021"
[lib]
path = "eq_modulo_pos.rs"
[dependencies]
arena_collections = { version = "0.0.0", path = "../../arena_collections" }
bstr = { version = "1.4.0", features = ["serde", "std", "unicode"] }
eq_modulo_pos_derive = { version = "0.0.0", path = "../eq_modulo_pos_derive" }
hcons = { version = "0.0.0", path = "../../hcons" }
indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] }
ocamlrep_caml_builtins = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } |
Rust | hhvm/hphp/hack/src/utils/eq_modulo_pos/eq_modulo_pos.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::rc::Rc;
use std::sync::Arc;
pub use eq_modulo_pos_derive::EqModuloPos;
use ocamlrep_caml_builtins::Int64;
/// An implementation of `Eq` which is insensitive to positions
/// (e.g., `pos::BPos`) and reasons (e.g., `ty::reason::BReason`).
///
/// If `PartialOrd` or `Ord` are also implemented for `Self`, their methods must
/// be consistent with `EqModuloPos`. For any two values for which
/// `eq_modulo_pos` or `eq_modulo_pos_and_reason` returns `false`, it must be
/// the case that their ordering cannot be changed by modifying positions or
/// reasons inside them.
pub trait EqModuloPos {
fn eq_modulo_pos(&self, rhs: &Self) -> bool;
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool;
}
impl<T: EqModuloPos> EqModuloPos for Option<T> {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
match (self, rhs) {
(Some(lhs), Some(rhs)) => lhs.eq_modulo_pos(rhs),
(None, None) => true,
_ => false,
}
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
match (self, rhs) {
(Some(lhs), Some(rhs)) => lhs.eq_modulo_pos_and_reason(rhs),
(None, None) => true,
_ => false,
}
}
}
impl<T: EqModuloPos> EqModuloPos for [T] {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
if self.len() != rhs.len() {
false
} else {
for (lhs, rhs) in self.iter().zip(rhs.iter()) {
if !lhs.eq_modulo_pos(rhs) {
return false;
}
}
true
}
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
if self.len() != rhs.len() {
false
} else {
for (lhs, rhs) in self.iter().zip(rhs.iter()) {
if !lhs.eq_modulo_pos_and_reason(rhs) {
return false;
}
}
true
}
}
}
impl<T: EqModuloPos> EqModuloPos for hcons::Hc<T> {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
(**self).eq_modulo_pos(&**rhs)
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
(**self).eq_modulo_pos_and_reason(&**rhs)
}
}
macro_rules! impl_with_equal {
($($ty:ty,)*) => {$(
impl EqModuloPos for $ty {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
self == rhs
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
self == rhs
}
}
)*}
}
impl_with_equal! {
u8,
u16,
u32,
u64,
usize,
i8,
i16,
i32,
i64,
isize,
u128,
i128,
bool,
char,
str,
String,
std::path::Path,
std::path::PathBuf,
bstr::BStr,
bstr::BString,
Int64,
}
macro_rules! impl_deref {
($ty:ty) => {
impl<T: EqModuloPos + ?Sized> EqModuloPos for $ty {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
(**self).eq_modulo_pos(&**rhs)
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
(**self).eq_modulo_pos_and_reason(&**rhs)
}
}
};
}
impl_deref! { &T }
impl_deref! { &mut T }
impl_deref! { Box<T> }
impl_deref! { Rc<T> }
impl_deref! { Arc<T> }
macro_rules! impl_tuple {
() => (
impl EqModuloPos for () {
fn eq_modulo_pos(&self, _rhs: &Self) -> bool { true }
fn eq_modulo_pos_and_reason(&self, _rhs: &Self) -> bool { true }
}
);
( $(($name:ident, $lhs:ident, $rhs:ident))+) => (
impl< $($name: EqModuloPos),+ > EqModuloPos for ($($name,)+) {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
let ($(ref $lhs,)+) = self;
let ($(ref $rhs,)+) = rhs;
true
$(&& $lhs.eq_modulo_pos($rhs))+
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
let ($(ref $lhs,)+) = self;
let ($(ref $rhs,)+) = rhs;
true
$(&& $lhs.eq_modulo_pos_and_reason($rhs))+
}
}
);
}
impl_tuple! { (A, a1, a2) }
impl_tuple! { (A, a1, a2) (B, b1, b2) }
impl_tuple! { (A, a1, a2) (B, b1, b2) (C, c1, c2) }
impl_tuple! { (A, a1, a2) (B, b1, b2) (C, c1, c2) (D, d1, d2) }
macro_rules! impl_with_iter {
(<$($gen:ident),* $(,)?> <$($unbounded:ident),*> $ty:ty , $size:ident) => {
impl<$($gen: EqModuloPos,)* $($unbounded,)*> EqModuloPos for $ty {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
if self.$size() != rhs.$size() {
false
} else {
let mut res = true;
for (lhs, rhs) in self.iter().zip(rhs.iter()) {
res = res && lhs.eq_modulo_pos(&rhs);
}
res
}
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
if self.$size() != rhs.$size() {
false
} else {
let mut res = true;
for (lhs, rhs) in self.iter().zip(rhs.iter()) {
res = res && lhs.eq_modulo_pos_and_reason(&rhs);
}
res
}
}
}
};
(<$($gen:ident),* $(,)?> $ty:ty , $size:ident) => {
impl_with_iter! { <$($gen,)*> <> $ty , $size }
}
}
impl_with_iter! { <T> Vec<T>, len }
impl_with_iter! {
<K, V> arena_collections::SortedAssocList<'_, K, V>, len
}
// The arena_collections Set and Map are ordered collections, and rely on the
// invariant that the impl of `Ord` is consistent with the impl of
// `EqModuloPos`.
impl_with_iter! {
<T> arena_collections::set::Set<'_, T>, count
}
impl_with_iter! {
<K, V> arena_collections::map::Map<'_, K, V>, count
}
// `BTreeSet` and `BTreeMap` also rely on the invariant that the impl of `Ord`
// is consistent with the impl of `EqModuloPos`. We can iterate over both
// collections and expect their keys to be in the same order, even if they
// differ in positions.
impl_with_iter! {
<T> std::collections::BTreeSet<T>, len
}
impl_with_iter! {
<K, V> std::collections::BTreeMap<K, V>, len
}
impl<K, V, S> EqModuloPos for std::collections::HashMap<K, V, S>
where
K: Eq + std::hash::Hash,
V: EqModuloPos,
S: std::hash::BuildHasher,
// This impl (and the impls for IndexMap, {Hash,Index}Set below) is
// restricted to collections whose keys implement AsRef<str>. The intent is
// to exclude maps and sets whose key types contain positions or reasons,
// since this implementation does not compare keys modulo pos. In practice,
// we only use maps and sets with string keys in types which need to
// implement EqModuloPos.
K: AsRef<str>,
{
fn eq_modulo_pos(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
// Since we've checked that both collections have the same number of
// elements, and all keys are distinct, iterating over one set is
// sufficient. That is to say, if all members of `self` are in `other`,
// we can assume that all members of `other` are in `self`.
// c.f. the impl of PartialEq: https://github.com/rust-lang/rust/blob/1.63.0/library/std/src/collections/hash/map.rs#L1275
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| value.eq_modulo_pos(v)))
}
fn eq_modulo_pos_and_reason(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter().all(|(key, value)| {
other
.get(key)
.map_or(false, |v| value.eq_modulo_pos_and_reason(v))
})
}
}
impl<K, V, S> EqModuloPos for indexmap::IndexMap<K, V, S>
where
K: Eq + std::hash::Hash + AsRef<str>,
V: EqModuloPos,
S: std::hash::BuildHasher,
{
fn eq_modulo_pos(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| value.eq_modulo_pos(v)))
}
fn eq_modulo_pos_and_reason(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter().all(|(key, value)| {
other
.get(key)
.map_or(false, |v| value.eq_modulo_pos_and_reason(v))
})
}
}
impl<T, S> EqModuloPos for std::collections::HashSet<T, S>
where
T: Eq + std::hash::Hash + AsRef<str>,
S: std::hash::BuildHasher,
{
fn eq_modulo_pos(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter().all(|key| other.contains(key))
}
fn eq_modulo_pos_and_reason(&self, other: &Self) -> bool {
self.eq_modulo_pos(other)
}
}
impl<T, S> EqModuloPos for indexmap::IndexSet<T, S>
where
T: Eq + std::hash::Hash + AsRef<str>,
S: std::hash::BuildHasher,
{
fn eq_modulo_pos(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter().all(|key| other.contains(key))
}
fn eq_modulo_pos_and_reason(&self, other: &Self) -> bool {
self.eq_modulo_pos(other)
}
} |
TOML | hhvm/hphp/hack/src/utils/eq_modulo_pos_derive/Cargo.toml | # @generated by autocargo
[package]
name = "eq_modulo_pos_derive"
version = "0.0.0"
edition = "2021"
[lib]
path = "eq_modulo_pos_derive.rs"
test = false
doctest = false
proc-macro = true
[dependencies]
proc-macro2 = { version = "1.0.64", features = ["span-locations"] }
quote = "1.0.29"
synstructure = "0.12" |
Rust | hhvm/hphp/hack/src/utils/eq_modulo_pos_derive/eq_modulo_pos_derive.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
use synstructure::decl_derive;
use synstructure::Structure;
decl_derive!([EqModuloPos] => derive_eq_modulo_pos);
fn derive_eq_modulo_pos(mut s: Structure<'_>) -> TokenStream {
// By default, if you are deriving an impl of trait Foo for generic type
// X<T>, synstructure will add Foo as a bound not only for the type
// parameter T, but also for every type which appears as a field in X. This
// is not necessary for our use case--we can just require that the type
// parameters implement our trait.
s.add_bounds(synstructure::AddBounds::Generics);
let eq_modulo_pos = derive_eq_modulo_pos_body(&s);
let eq_modulo_pos_and_reason = derive_eq_modulo_pos_and_reason_body(&s);
s.gen_impl(quote! {
gen impl EqModuloPos for @Self {
fn eq_modulo_pos(&self, rhs: &Self) -> bool {
match self { #eq_modulo_pos }
}
fn eq_modulo_pos_and_reason(&self, rhs: &Self) -> bool {
match self { #eq_modulo_pos_and_reason }
}
}
})
}
fn derive_eq_modulo_pos_body(s: &Structure<'_>) -> TokenStream {
s.each_variant(|v| {
let mut s_rhs = s.clone();
let v_rhs = s_rhs
.variants_mut()
.iter_mut()
.find(|v2| v2.ast().ident == v.ast().ident)
.unwrap();
for (i, binding) in v_rhs.bindings_mut().iter_mut().enumerate() {
let name = format!("rhs{}", i);
binding.binding = proc_macro2::Ident::new(&name, binding.binding.span());
}
let arm = v_rhs.pat();
let mut inner = quote! {true};
for (bi, bi_rhs) in v.bindings().iter().zip(v_rhs.bindings().iter()) {
inner = quote! { #inner && #bi.eq_modulo_pos(#bi_rhs) }
}
quote!(
match rhs {
#arm => { #inner }
_ => false,
}
)
})
}
fn derive_eq_modulo_pos_and_reason_body(s: &Structure<'_>) -> TokenStream {
s.each_variant(|v| {
let mut s_rhs = s.clone();
let v_rhs = s_rhs
.variants_mut()
.iter_mut()
.find(|v2| v2.ast().ident == v.ast().ident)
.unwrap();
for (i, binding) in v_rhs.bindings_mut().iter_mut().enumerate() {
let name = format!("rhs{}", i);
binding.binding = proc_macro2::Ident::new(&name, binding.binding.span());
}
let arm = v_rhs.pat();
let mut inner = quote! {true};
for (bi, bi_rhs) in v.bindings().iter().zip(v_rhs.bindings().iter()) {
inner = quote! { #inner && #bi.eq_modulo_pos_and_reason(#bi_rhs) }
}
quote!(
match rhs {
#arm => { #inner }
_ => false,
}
)
})
} |
TOML | hhvm/hphp/hack/src/utils/escaper/Cargo.toml | # @generated by autocargo
[package]
name = "escaper"
version = "0.0.0"
edition = "2021"
[lib]
path = "../escaper.rs"
[dependencies]
bstr = { version = "1.4.0", features = ["serde", "std", "unicode"] }
bumpalo = { version = "3.11.1", features = ["collections"] }
[dev-dependencies]
pretty_assertions = { version = "1.2", features = ["alloc"], default-features = false } |
hhvm/hphp/hack/src/utils/exec_command/dune | (* -*- tuareg -*- *)
let library_entry name suffix =
Printf.sprintf
"(library
(name %s)
(wrapped false)
(modules)
(libraries %s_%s))" name name suffix
let fb_entry name =
library_entry name "fb"
let stubs_entry name =
library_entry name "stubs"
let entry is_fb name =
if is_fb then
fb_entry name
else
stubs_entry name
let () =
(* test presence of fb subfolder *)
let current_dir = Sys.getcwd () in
(* we are in src/utils/exec_command, locate src/facebook *)
let src_dir = Filename.dirname @@ Filename.dirname current_dir in
let fb_dir = Filename.concat src_dir "facebook" in
(* locate src/facebook/dune *)
let fb_dune = Filename.concat fb_dir "dune" in
let is_fb = Sys.file_exists fb_dune in
let lib_entry = entry is_fb "exec_command" in
Jbuild_plugin.V1.send lib_entry |
|
hhvm/hphp/hack/src/utils/fetcher/dune | (* -*- tuareg -*- *)
let library_entry name suffix =
Printf.sprintf
"(library
(name %s)
(wrapped false)
(modules)
(libraries %s_%s))" name name suffix
let fb_entry name =
library_entry name "fb"
let stubs_entry name =
library_entry name "stubs"
let entry is_fb name =
if is_fb then
fb_entry name
else
stubs_entry name
let () =
(* test presence of fb subfolder *)
let current_dir = Sys.getcwd () in
(* we are in src/utils/fetcher, locate src *)
let src_dir = Filename.dirname @@ Filename.dirname current_dir in
let fb_dir = Filename.concat src_dir "facebook" in
(* locate src/facebook/dune *)
let fb_dune = Filename.concat fb_dir "dune" in
let is_fb = Sys.file_exists fb_dune in
let fetcher = entry is_fb "fetcher" in
Jbuild_plugin.V1.send fetcher |
|
TOML | hhvm/hphp/hack/src/utils/ffi/Cargo.toml | # @generated by autocargo
[package]
name = "ffi"
version = "0.0.0"
edition = "2021"
[lib]
path = "lib.rs"
[dependencies]
bstr = { version = "1.4.0", features = ["serde", "std", "unicode"] }
bumpalo = { version = "3.11.1", features = ["collections"] }
serde = { version = "1.0.176", features = ["derive", "rc"] }
write_bytes = { version = "0.0.0", path = "../write_bytes/write_bytes" } |
Rust | hhvm/hphp/hack/src/utils/ffi/ffi.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::cmp::Ordering;
use std::fmt;
use std::hash::Hash;
use std::hash::Hasher;
use std::slice::from_raw_parts;
use bstr::BStr;
use serde::Serialize;
use serde::Serializer;
/// Maybe<T> is similar to C++ `std::option`. It is just like Rust `Option<T>`
/// but has repr(C) for use with with cbindgen.
#[derive(Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash, Serialize)]
#[repr(C)]
pub enum Maybe<T> {
Just(T),
Nothing,
}
pub use self::Maybe::*;
impl<T> Default for Maybe<T> {
#[inline]
fn default() -> Self {
Nothing
}
}
impl<T: Clone> Clone for Maybe<T> {
#[inline]
fn clone(&self) -> Self {
match self {
Just(x) => Just(x.clone()),
Nothing => Nothing,
}
}
#[inline]
fn clone_from(&mut self, source: &Self) {
match (self, source) {
(Just(to), Just(from)) => to.clone_from(from),
(to, from) => *to = from.clone(),
}
}
}
impl<U> Maybe<U> {
#[inline]
pub const fn as_ref(&self) -> Maybe<&U> {
match self {
Just(x) => Just(x),
Nothing => Nothing,
}
}
#[inline]
pub fn as_mut(&mut self) -> Maybe<&mut U> {
match self {
Just(x) => Just(x),
Nothing => Nothing,
}
}
#[inline]
pub const fn is_just(&self) -> bool {
matches!(self, Just(_))
}
pub fn into_option(self) -> Option<U> {
match self {
Just(t) => Some(t),
Nothing => None,
}
}
#[inline]
pub const fn is_nothing(&self) -> bool {
matches!(self, Nothing)
}
#[inline]
pub fn map<T, F: FnOnce(U) -> T>(self, f: F) -> Maybe<T> {
match self {
Just(x) => Just(f(x)),
Nothing => Nothing,
}
}
#[inline]
pub fn map_or<T, F: FnOnce(U) -> T>(self, default: T, f: F) -> T {
match self {
Just(t) => f(t),
Nothing => default,
}
}
pub fn map_or_else<T, D, F>(self, default: D, f: F) -> T
where
F: FnOnce(U) -> T,
D: FnOnce() -> T,
{
match self {
Just(t) => f(t),
Nothing => default(),
}
}
pub fn unwrap(self) -> U {
match self {
Just(t) => t,
Nothing => panic!("Expected Just(_)"),
}
}
pub fn unwrap_or(self, default: U) -> U {
match self {
Just(t) => t,
Nothing => default,
}
}
}
impl<U: Default> Maybe<U> {
pub fn unwrap_or_default(self) -> U {
match self {
Just(t) => t,
Nothing => Default::default(),
}
}
}
impl<U> std::convert::From<Option<U>> for Maybe<U> {
fn from(o: Option<U>) -> Self {
match o {
Some(x) => Just(x),
None => Nothing,
}
}
}
impl<U> std::convert::From<Maybe<U>> for Option<U> {
fn from(o: Maybe<U>) -> Self {
match o {
Just(x) => Some(x),
Nothing => None,
}
}
}
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash, Serialize)]
#[repr(C)]
/// A tuple of two elements.
pub struct Pair<U, V>(pub U, pub V);
impl<U, V> std::convert::From<(U, V)> for Pair<U, V> {
fn from((u, v): (U, V)) -> Self {
Pair(u, v)
}
}
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash, Serialize)]
#[repr(C)]
/// A tuple of three elements.
pub struct Triple<U, V, W>(pub U, pub V, pub W);
impl<U, V, W> std::convert::From<(U, V, W)> for Triple<U, V, W> {
fn from((u, v, w): (U, V, W)) -> Self {
Triple(u, v, w)
}
}
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
#[repr(C)]
/// A tuple of four elements.
pub struct Quadruple<U, V, W, X>(pub U, pub V, pub W, pub X);
impl<U, V, W, X> std::convert::From<(U, V, W, X)> for Quadruple<U, V, W, X> {
fn from((u, v, w, x): (U, V, W, X)) -> Self {
Quadruple(u, v, w, x)
}
}
// [Note: `BumpSliceMut<'a, T>` and `Slice<'a, T>` safety]
// -------------------------------------------------------
// If we assume construction via the factory functions
// `BumpSliceMut<'a, T>::new()` and `Slice<'a, T>::new()` then we know
// that the contained members are safe to use with
// `from_raw_parts_mut`/`from_raw_parts`. We rely on this in the
// implementation of traits such as `Eq` and friends.
#[repr(C)]
/// A type to substitute for `&'a[T]`.
// Safety: Must be initialized from an `&[T]`. Use `Slice<'a,
// T>::new()`.
pub struct Slice<'a, T> {
data: *const T,
len: usize,
marker: std::marker::PhantomData<&'a ()>,
}
// A Slice can be cloned even if the underlying data is non-clonable.
impl<'a, T> Clone for Slice<'a, T> {
fn clone(&self) -> Slice<'a, T> {
Slice {
data: self.data,
len: self.len,
marker: self.marker,
}
}
}
impl<'a, T> Copy for Slice<'a, T> {}
impl<'a, T: serde::Serialize> Serialize for Slice<'a, T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.as_arena_ref().serialize(serializer)
}
}
// Send+Sync Safety: Slice is no more mutable than T
unsafe impl<'a, T: Sync> Sync for Slice<'a, T> {}
unsafe impl<'a, T: Send> Send for Slice<'a, T> {}
impl<'a, T: fmt::Debug> Slice<'a, T> {
fn generic_debug_fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Slice")?;
f.debug_list().entries(self.as_ref().iter()).finish()
}
}
impl<'a, T: fmt::Debug> fmt::Debug for Slice<'a, T> {
#[cfg(UNSTABLE_DEBUG_SLICE)]
default fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.generic_debug_fmt(f)
}
#[cfg(not(UNSTABLE_DEBUG_SLICE))]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.generic_debug_fmt(f)
}
}
#[cfg(UNSTABLE_DEBUG_SLICE)]
impl<'a> fmt::Debug for Slice<'a, u8> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = String::new();
for &ch in self.as_ref() {
match ch {
b'\"' => {
s.push('\\');
s.push('\"');
}
b'\\' => {
s.push('\\');
s.push('\\');
}
ch => {
if !ch.is_ascii_graphic() {
s.push_str(&format!("\\{ch:03o}"));
} else {
s.push(ch as char);
}
}
}
}
f.write_str("Str(b\"")?;
f.write_str(&s)?;
f.write_str("\")")
}
}
impl<'a, T: 'a> Default for Slice<'a, T> {
fn default() -> Self {
Slice::empty()
}
}
impl<'a, T> AsRef<[T]> for Slice<'a, T> {
fn as_ref(&self) -> &[T] {
self.as_arena_ref()
}
}
impl<'a, T> std::ops::Deref for Slice<'a, T> {
type Target = [T];
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
impl<'a, T: 'a> Slice<'a, T> {
pub const fn new(t: &'a [T]) -> Self {
Slice {
data: t.as_ptr(),
len: t.len(),
marker: std::marker::PhantomData,
}
}
/// Like `as_ref()` but reflects the fact that the underlying ref has a
/// lifetime of `arena and not the same lifetime as `self`.
pub fn as_arena_ref(&self) -> &'a [T] {
// Safety: Assumes `self` has been constructed via `Slice<'a,
// T>::new()` from some `&'a [T]` and so the call to
// `from_raw_parts` is a valid.
unsafe { std::slice::from_raw_parts(self.data, self.len) }
}
pub fn fill_iter<I>(alloc: &'a bumpalo::Bump, iter: I) -> Slice<'a, T>
where
I: IntoIterator<Item = T>,
I::IntoIter: ExactSizeIterator,
{
Slice::new(alloc.alloc_slice_fill_iter(iter))
}
pub fn empty() -> Self {
Slice {
data: std::ptr::NonNull::dangling().as_ptr(),
len: 0,
marker: std::marker::PhantomData,
}
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
pub fn len(&self) -> usize {
self.len
}
}
impl<'a, T: 'a> Slice<'a, T> {
pub fn from_vec(alloc: &'a bumpalo::Bump, xs: Vec<T>) -> Self {
alloc.alloc_slice_fill_iter(xs.into_iter()).into()
}
}
impl<'a, T: 'a> IntoIterator for &'a Slice<'a, T> {
type Item = &'a T;
type IntoIter = std::slice::Iter<'a, T>;
fn into_iter(self) -> std::slice::Iter<'a, T> {
self.iter()
}
}
impl<'a, T> std::convert::From<&'a [T]> for Slice<'a, T> {
fn from(x: &'a [T]) -> Self {
Self::new(x)
}
}
impl<'a, T> std::convert::From<&'a mut [T]> for Slice<'a, T> {
fn from(x: &'a mut [T]) -> Self {
Self::new(x)
}
}
impl<'a, T: PartialEq> PartialEq for Slice<'a, T> {
fn eq(&self, other: &Self) -> bool {
// Safety: See [Note: `BumpSliceMut<'a, T>` and `Slice<'a, T>`
// safety].
let left = unsafe { from_raw_parts(self.data, self.len) };
let right = unsafe { from_raw_parts(other.data, other.len) };
left.eq(right)
}
}
impl<'a, T: Eq> Eq for Slice<'a, T> {}
impl<'a, T: Hash> Hash for Slice<'a, T> {
fn hash<H: Hasher>(&self, state: &mut H) {
// Safety: See [Note: `BumpSliceMut<'a, T>` and `Slice<'a, T>`
// safety].
let me = unsafe { from_raw_parts(self.data, self.len) };
me.hash(state);
}
}
impl<'a, T: Ord> Ord for Slice<'a, T> {
fn cmp(&self, other: &Self) -> Ordering {
// Safety: See [Note: `BumpSliceMut<'a, T>` and `Slice<'a, T>`
// safety].
let left = unsafe { from_raw_parts(self.data, self.len) };
let right = unsafe { from_raw_parts(other.data, other.len) };
left.cmp(right)
}
}
impl<'a, T: PartialOrd> PartialOrd for Slice<'a, T> {
// Safety: See [Note: `BumpSliceMut<'a, T>` and `Slice<'a, T>`
// safety].
fn partial_cmp(&self, other: &Self) -> std::option::Option<Ordering> {
let left = unsafe { from_raw_parts(self.data, self.len) };
let right = unsafe { from_raw_parts(other.data, other.len) };
left.partial_cmp(right)
}
}
/// An alias for a type that substitutes for `&'str`.
pub type Str<'a> = Slice<'a, u8>;
// C++:
// std::string slice_to_string(Str s) {
// return std::string{s.data, s.data + s.len};
// }
impl<'a> Str<'a> {
/// Make a copy of a `&str` in an `'a Bump` and return it as a `Str<'a>`.
// Don't use this if you have an `&'a str` already, prefer
// `Str::from` in that case and avoid a copy.
pub fn new_str(alloc: &'a bumpalo::Bump, src: &str) -> Str<'a> {
Slice::new(alloc.alloc_str(src.as_ref()).as_bytes())
}
/// Make a copy of a slice of bytes in an `'a Bump' and return it as a `Str<'a>`.
pub fn new_slice(alloc: &'a bumpalo::Bump, src: &[u8]) -> Str<'a> {
Slice::new(alloc.alloc_slice_copy(src))
}
/// Cast a `Str<'a>` back into a `&'a str`.
pub fn unsafe_as_str(&self) -> &'a str {
// Safety: Assumes `self` has been constructed via `Slice<'a,
// T>::new()` from some `&'a str` and so the calls to
// `from_raw_parts` and `from_utf8_unchecked` are valid.
unsafe { std::str::from_utf8_unchecked(std::slice::from_raw_parts(self.data, self.len)) }
}
/// Cast a `Str<'a>` back into a `&'a BStr`.
pub fn as_bstr(&self) -> &'a BStr {
// Safety: Assumes `self` has been constructed via `Slice<'a,
// T>::new()` from some `&'a BStr` and so the call to
// `from_raw_parts` is valid.
unsafe { std::slice::from_raw_parts(self.data, self.len).into() }
}
}
impl std::borrow::Borrow<[u8]> for Str<'_> {
fn borrow(&self) -> &[u8] {
self.as_ref()
}
}
impl<'a> write_bytes::DisplayBytes for Str<'a> {
fn fmt(&self, f: &mut write_bytes::BytesFormatter<'_>) -> std::io::Result<()> {
use std::io::Write;
f.write_all(self.as_ref())
}
}
impl<'a> std::convert::From<&'a String> for Slice<'a, u8> {
fn from(s: &'a String) -> Self {
Self::new(s.as_bytes())
}
}
impl<'a> std::convert::From<&'a str> for Slice<'a, u8> {
fn from(s: &'a str) -> Self {
Self::new(s.as_bytes())
}
}
impl<'a> std::convert::From<&'a mut str> for Slice<'a, u8> {
fn from(s: &'a mut str) -> Self {
Self::new(s.as_bytes())
}
}
#[derive(Debug)]
#[repr(C)]
/// A type for an arena backed `&'a mut[T]`. Similar to `Slice<'a, T>`
/// but with mutable contents and an allocator reference (enabling
/// `Clone` support).
// Safety: Initialize from an `&'arena [T]` where the memory is owned
// by `alloc`. Use `BumpSliceMut<'a, T>::new()`.
pub struct BumpSliceMut<'a, T> {
data: *mut T,
len: usize,
alloc: &'a bumpalo::Bump,
marker: std::marker::PhantomData<&'a ()>,
}
impl<'a, T> BumpSliceMut<'a, T> {
// Safety: `t` must be owned by `alloc`.
pub fn new(alloc: &'a bumpalo::Bump, t: &'a mut [T]) -> Self {
BumpSliceMut {
data: t.as_mut_ptr(),
len: t.len(),
alloc,
marker: std::marker::PhantomData,
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.as_ref().is_empty()
}
pub fn alloc(&self) -> &'a bumpalo::Bump {
self.alloc
}
#[inline]
pub fn len(&self) -> usize {
self.as_ref().len()
}
#[inline]
pub fn iter(&self) -> std::slice::Iter<'_, T> {
self.as_ref().iter()
}
#[inline]
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
self.as_mut().iter_mut()
}
#[inline]
pub fn get(&self, index: usize) -> Option<&T> {
self.as_ref().get(index)
}
}
impl<'a, T> std::ops::Index<usize> for BumpSliceMut<'a, T> {
type Output = T;
#[inline]
fn index(&self, i: usize) -> &T {
&self.as_ref()[i]
}
}
impl<'a, T> std::ops::IndexMut<usize> for BumpSliceMut<'a, T> {
#[inline]
fn index_mut(&mut self, i: usize) -> &mut T {
&mut self.as_mut()[i]
}
}
impl<'a, T: PartialEq> PartialEq for BumpSliceMut<'a, T> {
fn eq(&self, other: &Self) -> bool {
self.iter().zip(other.iter()).all(|(a, b)| a == b)
}
}
impl<'a, T: Eq> Eq for BumpSliceMut<'a, T> {}
impl<'a, T: Hash> Hash for BumpSliceMut<'a, T> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
for i in self.iter() {
i.hash(hasher);
}
}
}
impl<'a, T> AsRef<[T]> for BumpSliceMut<'a, T> {
fn as_ref<'r>(&'r self) -> &'r [T] {
// Safety:
// - We assume 'a: 'r
// - Assumes `self` has been constructed via
// `BumpSliceMut<'a, T>::new()` from some `&'a[T]` and so the
// call to `from_raw_parts` is a valid.
unsafe { std::slice::from_raw_parts(self.data, self.len) }
}
}
impl<'a, T> AsMut<[T]> for BumpSliceMut<'a, T> {
fn as_mut<'r>(&'r mut self) -> &'r mut [T] {
// Safety:
// - We assume 'a: 'r
// - Assumes `self` has been constructed via
// `BumpSliceMut<'a, T>::new()` from some `&'a[T]` and so the
// call to `from_raw_parts_mut` is a valid.
unsafe { std::slice::from_raw_parts_mut(self.data, self.len) }
}
}
impl<'arena, T: 'arena + Clone> Clone for BumpSliceMut<'arena, T> {
fn clone(&self) -> Self {
let alloc = self.alloc();
BumpSliceMut::new(alloc, alloc.alloc_slice_clone(self.as_ref()))
}
}
/// A ReprC view of Vec<u8>. The underlying Vec is owned by this object.
#[repr(C)]
pub struct Bytes {
pub data: *mut u8,
pub len: usize,
pub cap: usize,
}
impl From<Vec<u8>> for Bytes {
fn from(bytes: Vec<u8>) -> Self {
let mut leaked_bytes = std::mem::ManuallyDrop::new(bytes);
Self {
data: leaked_bytes.as_mut_ptr(),
len: leaked_bytes.len(),
cap: leaked_bytes.capacity(),
}
}
}
impl Bytes {
pub unsafe fn as_slice(&self) -> &[u8] {
std::slice::from_raw_parts(self.data, self.len)
}
}
impl std::ops::Drop for Bytes {
fn drop(&mut self) {
let _ = unsafe { Vec::from_raw_parts(self.data, self.len, self.cap) };
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_00() {
let foo = Just(2);
match foo {
Just(i) => assert_eq!(i, 2),
Nothing => {}
}
}
#[test]
fn test_01() {
let Pair(u, v) = Pair::from((2, "foo"));
assert_eq!(u, 2);
assert_eq!(v, "foo")
}
#[test]
fn test_02() {
let alloc: bumpalo::Bump = bumpalo::Bump::new();
let mut buf = bumpalo::vec![in &alloc; 1, 2, 3];
let _s = BumpSliceMut::new(&alloc, buf.as_mut_slice());
}
#[test]
fn test_03() {
let alloc: bumpalo::Bump = bumpalo::Bump::new();
let data = bumpalo::vec![in &alloc; 1, 2, 3].into_bump_slice();
let s = Slice::new(data);
let t = Slice::new(data);
assert_eq!(s, t)
}
#[test]
fn test_04() {
let Triple(u, v, w) = Triple::from((2, "foo", 1.0e-2));
assert_eq!(u, 2);
assert_eq!(v, "foo");
assert_eq!(w, 1.0e-2);
}
} |
C/C++ | hhvm/hphp/hack/src/utils/ffi/ffi_extra.h | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#pragma once
// Forward declare bumpalo::Bump
struct Bump; |
Rust | hhvm/hphp/hack/src/utils/ffi/ffi_ffi_cbindgen.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
/// This definition exists for ffi_cbindgen C++ header generation. Any
/// attempt to call this function will result in an intentional
/// unresolved symbol at link time.
#[no_mangle]
pub extern "C" fn no_call_compile_only_USED_TYPES_ffi<'arena>(
_: Str<'arena>,
_: Maybe<i32>,
_: Pair<i32, i32>,
_: Triple<i32, i32, i32>,
_: Quadruple<i32, i32, i32, i32>,
_: BumpSliceMut<'arena, i32>,
) {
unimplemented!()
} |
Rust | hhvm/hphp/hack/src/utils/ffi/lib.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
//! There are basically two kinds of types can't be made `repr(C)`:
//! * builtin types making no guaratees about binary layout (e.g.
//! wide pointers, tuples)
//! * type with definitions that are beyond our reach to annotate
//! with `repr(C)` (e.g. standard or vendored types).
//!
//! The types in this crate are C friendly substitutions for a
//! handful of these types.
#![cfg_attr(UNSTABLE_DEBUG_SLICE, feature(min_specialization))]
pub mod ffi;
pub use crate::ffi::BumpSliceMut;
pub use crate::ffi::Bytes;
pub use crate::ffi::Maybe;
pub use crate::ffi::Maybe::*;
pub use crate::ffi::Pair;
pub use crate::ffi::Quadruple;
pub use crate::ffi::Slice;
pub use crate::ffi::Str;
pub use crate::ffi::Triple; |
TOML | hhvm/hphp/hack/src/utils/ffi_cbindgen/Cargo.toml | # @generated by autocargo
[package]
name = "ffi_cbindgen"
version = "0.0.0"
edition = "2021"
[[bin]]
name = "ffi_cbindgen"
path = "ffi_cbindgen.rs"
[dependencies]
anyhow = "1.0.71"
cbindgen = "0.22.0"
clap = { version = "3.2.25", features = ["derive", "env", "regex", "unicode", "wrap_help"] } |
Rust | hhvm/hphp/hack/src/utils/ffi_cbindgen/ffi_cbindgen.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::path::PathBuf;
use anyhow::Result;
use cbindgen::Builder;
use cbindgen::Config;
use cbindgen::EnumConfig;
use cbindgen::Language;
use cbindgen::MacroExpansionConfig;
use cbindgen::RenameRule;
use clap::Parser;
#[derive(Debug, Parser)]
#[clap(
name = "ffi_cbindgen",
about = r#"
Generate a cbindgen style C++ header for a list of .rs sources.
Example invocation:
$ ffi_cbindgen --header foo-bar.h \
--namespaces HPHP,hackc \
--includes hphp/hack/src/baz/baz.h \
hphp/hack/src/foo/foo.rs \
hphp/hack/src/bar/bar.rs
"#
)]
struct Opt {
/// Input files
srcs: Vec<PathBuf>,
/// The header file to write
#[clap(long = "header")]
header: PathBuf,
/// Any namespaces to wrap the generated code in
#[clap(long = "namespaces", use_value_delimiter = true)]
namespaces: Vec<String>,
/// Any headers to add to the top of the header
#[clap(long = "includes", use_value_delimiter = true)]
includes: Vec<String>,
}
fn builder(opts: &Opt) -> Builder {
let mut builder = Builder::new()
.with_config(Config {
language: Language::Cxx,
macro_expansion: MacroExpansionConfig { bitflags: true },
enumeration: EnumConfig {
rename_variant_name_fields: RenameRule::None,
..Default::default()
},
..Default::default()
})
.with_header(
"\
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the \"hack\" directory of this source tree.
",
)
.with_autogen_warning("// Warning, this file is autogenerated. Don't modify it manually!")
.with_pragma_once(true);
for src in opts.srcs.iter() {
builder = builder.with_src(src)
}
for inc in opts.includes.iter().filter(|f| !f.is_empty()) {
builder = builder.with_include(inc)
}
match &opts.namespaces[..] {
[namespace, namespaces @ ..] => builder
.with_namespace(namespace)
.with_namespaces(namespaces),
[] => builder,
}
}
fn main() -> Result<()> {
let opts = Opt::parse();
let header = &opts.header;
builder(&opts).generate()?.write_to_file(header);
Ok(())
} |
TOML | hhvm/hphp/hack/src/utils/files_to_ignore/Cargo.toml | # @generated by autocargo
[package]
name = "files_to_ignore"
version = "0.0.0"
edition = "2021"
[lib]
path = "../files_to_ignore.rs"
[dependencies]
regex = "1.9.2" |
OCaml | hhvm/hphp/hack/src/utils/file_content/file_content.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type position = {
line: int;
(* 1-based *)
column: int; (* 1-based *)
}
type range = {
st: position;
ed: position;
}
type text_edit = {
range: range option;
text: string;
}
(* UTF-8 encoding character lengths.
*
* NOTE: at the moment, edit commands are the only place where we count
* UTF-8 encoded characters as opposed to ASCII bytes - in all of the other
* places (column numbers in errors, positions in IDE commands) we still use the
* latter.
*
* We make an exception here because that's the way Nuclide counts characters,
* and the consequences of mishandling it are much more dire than in other
* places - we'll not only fail the current single request, but diverge
* the synchronized state forever.
*)
let get_char_length c =
let c = Char.to_int c in
if c lsr 7 = 0b0 then
1
else if c lsr 5 = 0b110 then
2
else if c lsr 4 = 0b1110 then
3
else if c lsr 3 = 0b11110 then
4
else
raise (Failure (Printf.sprintf "Invalid UTF-8 leading byte: %d" c))
let is_target t line column = t.line = line && t.column = column
let get_char content offset =
(* sentinel newline to make things easier *)
if offset = String.length content then
'\n'
else
content.[offset]
let rec get_offsets content queries line column offset acc =
match acc with
| (Some _, Some _) -> acc
| (None, r2) when is_target (fst queries) line column ->
get_offsets content queries line column offset (Some offset, r2)
| ((Some _ as r1), None) when is_target (snd queries) line column ->
get_offsets content queries line column offset (r1, Some offset)
| acc ->
let (line, column, offset) =
match get_char content offset with
| '\n' -> (line + 1, 1, offset + 1)
| c -> (line, column + 1, offset + get_char_length c)
in
get_offsets content queries line column offset acc
let invalid_position p =
raise
(Failure
(Printf.sprintf
"Invalid position: {line: %d; column: %d}"
p.line
p.column))
(* this returns 0-based offsets *)
let get_offsets (content : string) (queries : position * position) : int * int =
match get_offsets content queries 1 1 0 (None, None) with
| (Some r1, Some r2) -> (r1, r2)
| (None, _) -> invalid_position (fst queries)
| (_, None) -> invalid_position (snd queries)
(* This returns a 0-based offset. If you need to get two offsets, use
`get_offsets` instead. *)
let get_offset (content : string) (position : position) : int =
fst (get_offsets content (position, position))
(* This takes 0-based offsets and returns 1-based positions. *)
(* It gives the position of the character *immediately after* this offset, *)
(* e.g. "offset_to_position s 0" gives the 1-based position {line=1,col=1}. *)
(* It sounds confusing but is natural when you work with half-open ranges! *)
(* It is okay to ask for the position of the offset of the end of the file. *)
(* In case of multi-byte characters, if you give an offset inside a character,*)
(* it still gives the position immediately after. *)
let offset_to_position (content : string) (offset : int) : position =
let rec helper ~(line : int) ~(column : int) ~(index : int) =
if index >= offset then
{ line; column }
else
let c = get_char content index in
let clen = get_char_length c in
if Char.equal c '\n' then
helper ~line:(line + 1) ~column:1 ~index:(index + clen)
else
helper ~line ~column:(column + 1) ~index:(index + clen)
in
if offset > String.length content then
raise (Failure (Printf.sprintf "Invalid offset: %d" offset))
else
helper ~line:1 ~column:1 ~index:0
let apply_edit content { range; text } =
match range with
| None -> text
| Some { st; ed } ->
let (start_offset, end_offset) = get_offsets content (st, ed) in
let prefix = Str.string_before content start_offset in
let suffix = Str.string_after content end_offset in
prefix ^ text ^ suffix
let print_edit b edit =
let range =
match edit.range with
| None -> "None"
| Some range ->
Printf.sprintf
"%d:%d - %d:%d"
range.st.line
range.st.column
range.ed.line
range.ed.column
in
Printf.bprintf b "range = %s\n text = \n%s\n" range edit.text
let edit_file content (edits : text_edit list) :
(string, string * Exception.t) result =
try Ok (List.fold ~init:content ~f:apply_edit edits) with
| exn ->
let e = Exception.wrap exn in
let b = Buffer.create 1024 in
Printf.bprintf b "Invalid edit: %s\n" (Exception.get_ctor_string e);
Printf.bprintf b "Original content:\n%s\n" content;
Printf.bprintf b "Edits:\n";
List.iter edits ~f:(print_edit b);
Error (Buffer.contents b, e)
let edit_file_unsafe fc edits =
match edit_file fc edits with
| Ok r -> r
| Error (e, _stack) ->
Printf.eprintf "%s" e;
failwith e |
OCaml Interface | hhvm/hphp/hack/src/utils/file_content/file_content.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type position = {
line: int;
(* 1-based *)
column: int; (* 1-based *)
}
type range = {
st: position;
ed: position;
}
type text_edit = {
range: range option;
text: string;
}
val edit_file :
string -> text_edit list -> (string, string * Exception.t) result
val edit_file_unsafe : string -> text_edit list -> string
(* NOTE: If you need two offsets, use `get_offsets` below instead. *)
val get_offset : string -> position -> int
(* May raise Invalid_argument "out of bounds" if out of bounds *)
val get_offsets : string -> position * position -> int * int
val offset_to_position : string -> int -> position
val get_char : string -> int -> char |
OCaml | hhvm/hphp/hack/src/utils/file_url/file_url.ml | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
(* File urls: https://tools.ietf.org/html/rfc8089 *)
(* Related definitions: https://tools.ietf.org/html/rfc3986 *)
(* Notes on UNC file urls and edge-cases: *)
(* https://foswiki.org/Support/Faq72 *)
(* https://blogs.msdn.microsoft.com/ie/2006/12/06/file-uris-in-windows/ *)
(* https://msdn.microsoft.com/en-us/library/windows/desktop/ff819129(v=vs.85).aspx *)
let percent_re = Str.regexp {|%\([0-9a-fA-F]?[0-9a-fA-F]?\)|}
let slash_re = Str.regexp {|/|} (* matches a single slash *)
let dos_url_re =
(* e.g. c:\ or z|/ *)
Str.regexp {|^\([a-zA-Z]\)[:|]\([/\].*\)$|}
let url_re = Str.regexp {|^file://\([^/?#]*\)/\([^?#]*\)\(.*\)$|}
let dos_re =
(* e.g. c:\ or z:/ *)
Str.regexp {|^\([a-zA-Z]\):\([/\].*\)$|}
let path_safe_chars =
"/-._~!$&'()*+,;=@0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
(**
* Unescapes %-escapes like "foo%4Abar", and normalizes path separators to `\`
* on Windows.
*
* Throws if there are incorrect %-escapes (not followed by two hex digits)
* and for %-escapes that are outside 7-bit printable ascii.
*)
let decode s =
let subst _ =
let hex = Str.matched_group 1 s in
if String.length hex <> 2 then failwith ("incorrect %-escape in " ^ s);
let code = int_of_string ("0x" ^ hex) in
if code < 32 || code > 127 then failwith ("only 7bit ascii allowed in " ^ s);
String.make 1 (Char.of_int_exn code)
in
let s = Str.global_substitute percent_re subst s in
if Sys.win32 then
Str.global_replace slash_re {|\\|} s
else
s
(**
* Escapes characters that are not allowed in URIs using %-escaping, and
* converts path separators to `/`.
*
* Throws if asked to escape something outside 7-bit printable ascii.
*)
let encode ~(safe_chars : string) (s : string) : string =
let buf = Buffer.create (String.length s * 2) in
let f (c : char) : unit =
if Sys.win32 && Char.equal c '\\' then
Buffer.add_char buf '/'
else if String.contains safe_chars c then
Buffer.add_char buf c
else
let code = Char.to_int c in
if code < 32 || code > 127 then
failwith ("only 7bit ascii allowed in " ^ s);
Buffer.add_string buf (Printf.sprintf "%%%02X" code)
in
String.iter ~f s;
Buffer.contents buf
(**
* Turns a file url into an absolute path.
*
* - It will turn a unix-style url "file://localhost/path" into "/path", and will
* turn a dos-style url "file://localhost/C|/path" into "C:/path".
* - It rejects unc urls that use five-slash encoding "file://///server/path".
* - The host can be either "localhost" or empty.
* - It will unescape %-encoding, but throws if that was used to encode something
* outside 7-bit ascii.
* - It doesn't attempt to validate the escaping of the url:
* doesn't complain if the uri has %-encoding where it wasn't needed, nor if
* the uri fails to %-encode where it should.
*)
let parse uri =
if not (Str.string_match url_re uri 0) then
failwith ("not a file url - " ^ uri);
let host = Str.matched_group 1 uri in
let path = Str.matched_group 2 uri in
let query_fragment = Str.matched_group 3 uri in
let path = decode path in
let ( <> ) = String.( <> ) in
(* this uses regexp internally *)
if host <> "" && host <> "localhost" then failwith ("not localhost - " ^ uri);
if query_fragment <> "" then
failwith ("file url can't have query/fragment - " ^ uri);
if Str.string_match dos_url_re path 0 then
let drive_letter = Str.matched_group 1 path in
let rest = Str.matched_group 2 path in
drive_letter ^ ":" ^ rest
else if String.length path > 0 && Char.equal path.[0] '/' then
failwith ("UNC file urls not supported - " ^ uri)
else
"/" ^ path
(**
* Turns an absolute path into a file uri
*
* The absolute path must be either unix-style absolute path "/path" or
* dos-style "c:\path" (in which case it treats both forward- and back-slashes
* as path separators, and converts both to forward-slash, as per URL rules).
*
* It rejects unc-style paths "\\?\c:\path" or "\\server\share\path".
* This function merely guarantees that valid absolute paths will give valid
* file URLs; it doesn't also validate that what's been given is a perfectly
* well-formed path. For instance, if the path has ? or * or : characters,
* it will accept them and render them as %3F and * and :. This function also
* doesn't do escaping - e.g. if given "he\\o" it treats this as a filename
* with two backslashes rather than one. It's therefore impossible to create
* a file url which has forward slashes as part of a file/directory-name; all
* slashes will be interpreted as path separators.
*)
let create path =
let absolute_path =
if Str.string_match dos_re path 0 then
let drive_letter = Str.matched_group 1 path in
let rest = Str.matched_group 2 path in
Printf.sprintf "%s:%s" drive_letter rest
else if String.is_prefix path ~prefix:"/" then
String_utils.lstrip path "/"
else
failwith ("Not an absolute filepath - " ^ path)
in
"file:///" ^ encode ~safe_chars:path_safe_chars absolute_path |
OCaml Interface | hhvm/hphp/hack/src/utils/file_url/file_url.mli | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val parse : string -> string
val create : string -> string |
TOML | hhvm/hphp/hack/src/utils/find_utils/Cargo.toml | # @generated by autocargo
[package]
name = "find_utils"
version = "0.0.0"
edition = "2021"
[lib]
path = "../find_utils.rs"
[dependencies]
anyhow = "1.0.71"
files_to_ignore = { version = "0.0.0", path = "../files_to_ignore" }
jwalk = "0.6"
relative_path = { version = "0.0.0", path = "../rust/relative_path" }
[dev-dependencies]
pretty_assertions = { version = "1.2", features = ["alloc"], default-features = false } |
hhvm/hphp/hack/src/utils/full_fidelity_refactor/dune | (library
(name full_fidelity_refactor)
(wrapped true)
(flags
(:standard -linkall))
(libraries
full_fidelity
server_command_types
relative_path
)
(preprocess
(pps visitors.ppx ppx_deriving.std ppx_sexp_conv ppx_hash))) |
|
OCaml | hhvm/hphp/hack/src/utils/full_fidelity_refactor/full_fidelity_refactor.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Syn = Full_fidelity_editable_positioned_syntax
module SyntaxKind = Full_fidelity_syntax_kind
module Trivia = Full_fidelity_positioned_trivia
module TriviaKind = Full_fidelity_trivia_kind
module RT = ServerRenameTypes
let find_width_hh_fixme token =
let rec find_width_hh_fixme_rec acc last sd =
match sd with
| sde :: sd ->
(match Trivia.kind sde with
| TriviaKind.FixMe
| TriviaKind.IgnoreError ->
let new_width = Trivia.width sde + acc in
find_width_hh_fixme_rec new_width new_width sd
| TriviaKind.SingleLineComment
| TriviaKind.DelimitedComment ->
last
| _ -> find_width_hh_fixme_rec (Trivia.width sde + acc) last sd)
| [] -> last
in
match token.Syn.Token.token_data with
| Syn.Token.Original sd ->
find_width_hh_fixme_rec 0 0 (List.rev sd.Syn.SourceData.leading)
| _ -> 0
let insert_before_leading_fixme file ~keyword ~text =
match Syn.syntax keyword with
| Syn.Token token ->
Syn.Value.(
let value = from_token token in
(match value with
| Positioned source_data ->
let source_text = Syn.SourceData.source_text source_data in
let start_offset = Syn.SourceData.start_offset source_data in
let end_offset = Syn.SourceData.end_offset source_data in
let leading_text_width = find_width_hh_fixme token in
let pos =
Syn.SourceText.relative_pos
file
source_text
(start_offset - leading_text_width)
end_offset
in
Some RT.(Insert { pos = Pos.to_absolute pos; text })
| Synthetic -> None))
| _ -> None
let insert_attribute file ~attribute ~enclosing_node ~attributes_node =
let open Syn in
match syntax attributes_node with
| OldAttributeSpecification { old_attribute_specification_attributes; _ } ->
(* if other attributes are already present, add to that list *)
let attributes =
syntax_node_to_list old_attribute_specification_attributes
in
if
List.exists
~f:(fun node ->
match syntax node with
| ListItem { list_item; _ } -> String.equal (text list_item) attribute
| _ -> false)
attributes
then
None
else
Option.Monad_infix.(
position_exclusive file old_attribute_specification_attributes
>>| fun pos ->
RT.(Insert { pos = Pos.to_absolute pos; text = attribute ^ ", " }))
| _ ->
(* there are no other attributes, but we must distinguish
* if there is a leading string (eg HHFIXME) or not *)
let default_patch inline node =
Option.Monad_infix.(
position_exclusive file node >>| fun pos ->
RT.(
Insert
{
pos = Pos.to_absolute pos;
text =
(if inline then
" <<" ^ attribute ^ ">> "
else
"<<" ^ attribute ^ ">>\n");
}))
in
let insert_attribute_before_leading_fixme keyword =
Option.first_some
(insert_before_leading_fixme
file
~keyword
~text:("<<" ^ attribute ^ ">>\n"))
(default_patch false keyword)
in
(match enclosing_node with
| Some enclosing_node ->
(* if there is a leading string (eg. HHFIXME) and no other attributes,
* put the attribute before the leading string *)
(match syntax enclosing_node with
| FunctionDeclaration { function_declaration_header; _ } ->
(match syntax function_declaration_header with
| FunctionDeclarationHeader { function_modifiers; function_keyword; _ }
->
(match Syntax.kind function_modifiers with
| SyntaxKind.Missing ->
(* there are no modifiers, so analyise the "function" keyword *)
insert_attribute_before_leading_fixme function_keyword
| _ ->
let modifiers = syntax_node_to_list function_modifiers in
insert_attribute_before_leading_fixme (List.hd_exn modifiers))
| _ -> default_patch false function_declaration_header)
| ClassishDeclaration
{ classish_modifiers; classish_xhp; classish_keyword; _ } ->
(match Syntax.kind classish_modifiers with
| SyntaxKind.Missing ->
(match Syntax.kind classish_xhp with
| SyntaxKind.Missing ->
(* there are no modifiers, so analyise the "class" keyword *)
insert_attribute_before_leading_fixme classish_keyword
| _ -> insert_attribute_before_leading_fixme classish_xhp)
| _ ->
let modifiers = syntax_node_to_list classish_modifiers in
insert_attribute_before_leading_fixme (List.hd_exn modifiers))
| _ -> default_patch false enclosing_node)
| None -> default_patch true attributes_node) |
OCaml Interface | hhvm/hphp/hack/src/utils/full_fidelity_refactor/full_fidelity_refactor.mli | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(*
* Utilities for generating `ServerRenameTypes.patch`s
* from `full_fidelity` parse trees.
*)
val insert_attribute :
Relative_path.t ->
attribute:string ->
enclosing_node:Full_fidelity_editable_positioned_syntax.t option ->
attributes_node:Full_fidelity_editable_positioned_syntax.t ->
ServerRenameTypes.patch option
val insert_before_leading_fixme :
Relative_path.t ->
keyword:Full_fidelity_editable_positioned_syntax.t ->
text:string ->
ServerRenameTypes.patch option |
Rust | hhvm/hphp/hack/src/utils/hack_macros/ast_writer.rs | // Copyright (c) Facebook, Inc. and its affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::HashMap;
use std::fmt;
use std::rc::Rc;
use proc_macro2::Delimiter;
use proc_macro2::Group;
use proc_macro2::Ident;
use proc_macro2::Literal;
use proc_macro2::Punct;
use proc_macro2::Spacing;
use proc_macro2::Span;
use proc_macro2::TokenStream;
use proc_macro2::TokenTree;
use quote::quote;
use quote::quote_spanned;
use quote::ToTokens;
use quote::TokenStreamExt;
use serde::ser;
use serde::ser::Serialize;
use thiserror::Error;
type Result<T = AstValue, E = AstError> = std::result::Result<T, E>;
/// Given an AST write out a TokenStream that builds it. Processes variable
/// replacements in the AST while traversing.
///
/// We use Serialize to drive the conversion. This causes a couple of oddities:
///
/// 1. Box<T> is transparent to serde - `F(Box<T>)` is serialized as `F(T)`
/// so we need to synthetically add `Box` where we know it's needed.
///
/// 2. We can't "look down" at a tree during serialization (we only know
/// something is a 'T' and can't downcast_ref it). So if a node is a possible
/// substitution ref we need to keep it in a form that preserves that
/// information and pass it up until we determine that it is or isn't
/// actually part of a substitution.
///
pub(crate) fn write_ast<T: Serialize + fmt::Debug>(
exports: syn::Path,
span: Span,
replacements: HashMap<String, Replacement>,
pos_src: TokenStream,
pos_id: TokenStream,
t: T,
) -> Result<TokenStream, AstError> {
let state = Rc::new(AstState::new(exports, pos_id, replacements));
let exports = &state.exports;
let t = t.serialize(AstWriter::new(state.clone()))?.into_tokens()?;
// If this fires it means that we tried to swap out a replacement
// but the emitter didn't convert it properly.
debug_assert!(!t.to_string().contains("__hack_repl_"));
let pos_id = &state.pos;
// We assign to a temp and then return so that we can silence some
// clippy lints (you can't use an attribute on an expression).
let tmp = Ident::new("__hygienic_tmp", span);
Ok(quote!({
use #exports::ast::*;
let #pos_id: Pos = #pos_src;
#[allow(clippy::redundant_clone)]
let #tmp = #t;
#tmp
}))
}
pub(crate) fn hygienic_pos(span: Span) -> TokenStream {
Ident::new("__hygienic_pos", span).to_token_stream()
}
/// Describes replacements that should occur while we build the AST.
#[derive(Debug, Clone)]
pub(crate) enum Replacement {
// #foo
Simple {
pat: TokenStream,
span: Span,
},
// #{as_expr(foo)}
AsExpr {
pat: TokenStream,
span: Span,
},
// #{id(foo)}
Id {
pat: TokenStream,
pos: TokenStream,
span: Span,
},
// #{lvar(foo)}
Lvar {
pat: TokenStream,
pos: TokenStream,
span: Span,
},
// #{foo*}
Repeat {
pat: TokenStream,
span: Span,
},
// #{str(foo)}
Str {
pat: TokenStream,
pos: TokenStream,
span: Span,
},
}
impl Replacement {
fn into_expr(self) -> Result<TokenStream> {
Ok(match self {
Replacement::Simple { pat, span } => {
// Variable should already be an Expr.
let tmp = Ident::new("tmp", span);
quote_spanned!(span=> {
let #tmp: Expr = #pat;
#tmp
})
}
Replacement::AsExpr { .. } => todo!(),
Replacement::Id {
ref pat,
ref pos,
span,
} => {
// Variable should be a String and we want to build an
// Expr_::Ident from it.
let tmp = Ident::new("tmp", span);
quote_spanned!(span=> {
let #tmp: String = #pat;
Expr((), #pos.clone(), Expr_::Id(Box::new(Id(#pos.clone(), #tmp))))
})
}
Replacement::Lvar {
ref pat,
ref pos,
span,
} => {
// Variable should be a LocalId and we want to build an
// Expr_::Lvar from it.
let tmp = Ident::new("tmp", span);
quote_spanned!(span=> {
let #tmp: LocalId = #pat;
Expr((), #pos.clone(), Expr_::Lvar(Box::new(Lid(#pos.clone(), #tmp))))
})
}
Replacement::Str {
ref pat,
ref pos,
span,
} => {
// Variable should be a String and we want to build an
// Expr_::String from it.
let tmp = Ident::new("tmp", span);
quote_spanned!(span=> {
let #tmp: String = #pat;
Expr((), #pos.clone(), Expr_::String(#tmp.into()))
})
}
Replacement::Repeat { span, .. } => {
// This can't be turned into a simple Expr.
return Err(
syn::Error::new(span, "Unable to expand repeat in Expr position").into(),
);
}
})
}
}
#[derive(Clone, Debug)]
enum AstValue {
// This node produced raw tokens.
Tokens(TokenStream),
// This node represents a replacement state.
Replace(ReplaceState),
}
// Because of the way serialization works we need to view replacements as a
// little state machine where we use the state to determine if we have a
// replacement or not.
//
// For example - for the expression:
//
// #a + $b
//
// We get the AST:
//
// Expr((), Pos, Binop(Binop { bop: Plus, lhs: Expr((), Pos, Lvar(Lid(Pos, (0, "$__hack_repl_0")))), rhs: Expr((), Pos, Lvar(Lid(Pos, (0, "$b")))) }))
//
// As we serialize we see:
//
// serialize_str("$__hack_repl_0")
//
// so we return a `Str` indicating that we've seen some replacement. Later when
// we get the tuple:
//
// serialize_tuple((0, Str))
//
// we turn that into a Tuple. This continues until we either see:
//
// serialize_tuple_struct("Expr", ((), _, Lvar))
//
// and we replace it with the desired tokens or we unroll it back into the
// original token stream (other states are possible as well).
//
//
#[derive(Clone, Debug)]
enum ReplaceState {
/// AsExpr::AsV(Expr)
AsV(Replacement),
/// Expr(_, _, Lvar)
Expr(Replacement),
/// Stmt_::Expr(Expr)
ExprStmt(Replacement),
/// Lid(_, Tuple)
Lid(Replacement),
/// Expr_::Lvar(Lid)
Lvar(Replacement),
/// Stmt(_, ExprStmt)
Stmt(Replacement),
/// "$__hack_repl_0"
Str(Replacement),
/// "(_, Str)
Tuple(Replacement),
/// (_, Expr)
Tuple2(Box<(AstValue, Replacement)>),
}
impl AstValue {
fn into_tokens(self) -> Result<TokenStream> {
Ok(match self {
AstValue::Tokens(s) => s,
AstValue::Replace(ReplaceState::AsV(repl)) => match repl {
Replacement::AsExpr { pat, span } => {
let tmp = Ident::new("tmp", Span::mixed_site());
quote_spanned!(span=> {
let #tmp: AsExpr = #pat;
#tmp
})
}
Replacement::Id { .. } => todo!(),
Replacement::Lvar { .. } => todo!(),
Replacement::Repeat { .. } => todo!(),
Replacement::Simple { pat, span } => {
// This is something like: `foreach ($x as <replacement>)`
// - they probably meant to replace the AsV expr rather than
// the binding.
let tmp = Ident::new("tmp", Span::mixed_site());
quote_spanned!(span=> {
let #tmp: Expr = #pat;
AsExpr::AsV(tmp)
})
}
Replacement::Str { .. } => todo!(),
},
AstValue::Replace(ReplaceState::Str(_)) => todo!(),
AstValue::Replace(ReplaceState::Tuple(_)) => todo!(),
AstValue::Replace(ReplaceState::Lid(repl)) => match repl {
Replacement::AsExpr { .. } => todo!(),
Replacement::Id { .. } => todo!(),
Replacement::Lvar { pat, pos, span } => {
let tmp = Ident::new("tmp", Span::mixed_site());
quote_spanned!(span=> {
let #tmp: LocalId = #pat;
Lid(#pos.clone(), #tmp)
})
}
Replacement::Repeat { .. } => todo!(),
Replacement::Simple { .. } => todo!(),
Replacement::Str { .. } => todo!(),
},
AstValue::Replace(ReplaceState::Lvar(_)) => todo!(),
AstValue::Replace(ReplaceState::Expr(repl)) => repl.into_expr()?,
AstValue::Replace(ReplaceState::ExprStmt(_)) => todo!(),
AstValue::Replace(ReplaceState::Stmt(repl)) => match repl {
Replacement::AsExpr { .. } => todo!(),
Replacement::Id { .. } => todo!(),
Replacement::Lvar { .. } => todo!(),
Replacement::Repeat { .. } => todo!(),
Replacement::Simple { pat, span } => {
let tmp = Ident::new("tmp", span);
quote_spanned!(span=> {
let #tmp: Stmt = #pat;
#tmp
})
}
Replacement::Str { .. } => todo!(),
},
AstValue::Replace(ReplaceState::Tuple2(_)) => todo!(),
})
}
}
struct AstState {
exports: syn::Path,
pos: TokenStream,
replacements: HashMap<String, Replacement>,
}
impl AstState {
fn new(
exports: syn::Path,
pos: TokenStream,
replacements: HashMap<String, Replacement>,
) -> Self {
Self {
exports,
pos,
replacements,
}
}
fn lookup_replacement(&self, name: &str) -> Option<&Replacement> {
self.replacements.get(name)
}
}
struct AstWriter {
state: Rc<AstState>,
}
impl AstWriter {
fn new(state: Rc<AstState>) -> Self {
Self { state }
}
}
impl ser::Serializer for AstWriter {
type Ok = AstValue;
type Error = AstError;
type SerializeSeq = SerializeSeq;
type SerializeTuple = SerializeTuple;
type SerializeTupleStruct = SerializeTupleStruct;
type SerializeTupleVariant = SerializeTupleVariant;
type SerializeMap = SerializeMap;
type SerializeStruct = SerializeStruct;
type SerializeStructVariant = SerializeStructVariant;
fn serialize_bool(self, value: bool) -> Result {
let span = Span::call_site();
let mut s = TokenStream::new();
s.push_ident(span, if value { "true" } else { "false" });
Ok(AstValue::Tokens(s))
}
fn serialize_i8(self, _: i8) -> Result {
todo!()
}
fn serialize_i16(self, _: i16) -> Result {
todo!()
}
fn serialize_i32(self, _: i32) -> Result {
todo!()
}
fn serialize_i64(self, i: i64) -> Result {
Ok(AstValue::Tokens(
TokenTree::Literal(Literal::isize_suffixed(i as isize)).into(),
))
}
fn serialize_u8(self, _: u8) -> Result {
todo!()
}
fn serialize_u16(self, _: u16) -> Result {
todo!()
}
fn serialize_u32(self, _: u32) -> Result {
todo!()
}
fn serialize_u64(self, _: u64) -> Result {
todo!()
}
fn serialize_f32(self, _: f32) -> Result {
todo!()
}
fn serialize_f64(self, _: f64) -> Result {
todo!()
}
fn serialize_char(self, _: char) -> Result {
todo!()
}
fn serialize_str(self, value: &str) -> Result {
if let Some(repl) = self.state.lookup_replacement(value) {
return Ok(AstValue::Replace(ReplaceState::Str(repl.clone())));
}
let mut s = TokenStream::new();
let span = Span::call_site();
s.append(TokenTree::Literal(Literal::string(value)));
s.push_dot();
s.push_ident(span, "to_owned");
s.push_paren_group(TokenStream::new());
Ok(AstValue::Tokens(s))
}
fn serialize_bytes(self, _: &[u8]) -> Result {
todo!()
}
fn serialize_none(self) -> Result {
Ok(AstValue::Tokens(quote!(None)))
}
fn serialize_some<T: Serialize + ?Sized>(self, value: &T) -> Result {
let inner = value.serialize(AstWriter::new(self.state))?.into_tokens()?;
let mut s = TokenStream::new();
let span = Span::call_site();
s.push_ident(span, "Some");
s.push_paren_group(inner);
Ok(AstValue::Tokens(s))
}
fn serialize_unit(self) -> Result {
let mut s = TokenStream::new();
s.push_paren_group(TokenStream::new());
Ok(AstValue::Tokens(s))
}
fn serialize_unit_struct(self, _: &'static str) -> Result {
todo!()
}
fn serialize_unit_variant(
self,
name: &'static str,
_index: u32,
variant: &'static str,
) -> Result {
let mut s = TokenStream::new();
let span = Span::call_site();
s.push_ident(span, name);
s.push_colon2();
s.push_ident(span, variant);
Ok(AstValue::Tokens(s))
}
fn serialize_newtype_struct<T: Serialize + ?Sized>(
self,
name: &'static str,
value: &T,
) -> Result {
if name == "Pos" {
// Use the user-supplied `pos` instead of the original Pos (which
// would be from our macro's AST parse).
let pos = &self.state.pos;
return Ok(AstValue::Tokens(quote!(#pos.clone())));
}
let value = value.serialize(AstWriter::new(self.state))?;
let value = match (name, value) {
("Block" | "FinallyBlock", AstValue::Replace(_)) => {
todo!();
}
(_, value) => value.into_tokens()?,
};
let span = Span::call_site();
let mut s = TokenStream::new();
s.push_ident(span, name);
s.push_paren_group(value);
Ok(AstValue::Tokens(s))
}
fn serialize_newtype_variant<T: Serialize + ?Sized>(
self,
name: &'static str,
_variant_index: u32,
variant: &'static str,
value: &T,
) -> Result {
let inner = value.serialize(AstWriter::new(self.state))?;
let inner = match (name, variant, inner) {
("AsExpr", "AsV", AstValue::Replace(ReplaceState::Expr(repl))) => {
return Ok(AstValue::Replace(ReplaceState::AsV(repl)));
}
("Expr_", "Lvar", AstValue::Replace(ReplaceState::Lid(repl))) => {
return Ok(AstValue::Replace(ReplaceState::Lvar(repl)));
}
("Stmt_", "Expr", AstValue::Replace(ReplaceState::Expr(repl))) => {
return Ok(AstValue::Replace(ReplaceState::ExprStmt(repl)));
}
(_, _, inner) => inner,
};
let mut s = TokenStream::new();
let span = Span::call_site();
s.push_ident(span, name);
s.push_colon2();
s.push_ident(span, variant);
let inner = inner.into_tokens()?;
// Handle boxing.
let inner = match (name, variant) {
("Expr_", "Float")
| ("Expr_", "Int")
| ("Expr_", "List")
| ("Expr_", "Shape")
| ("Expr_", "String")
| ("Expr_", "String2")
| ("Expr_", "Tuple")
| ("Stmt_", "Block") => inner,
("Expr_", _) | ("Stmt_", _) => box_stream(inner, span),
_ => inner,
};
s.push_paren_group(inner);
Ok(AstValue::Tokens(s))
}
fn serialize_seq(self, _: std::option::Option<usize>) -> Result<SerializeSeq> {
Ok(SerializeSeq {
seq: Vec::new(),
state: self.state,
has_repeat: false,
})
}
fn serialize_tuple(self, _: usize) -> Result<SerializeTuple> {
Ok(SerializeTuple {
state: self.state,
fields: Vec::new(),
})
}
fn serialize_tuple_struct(
self,
name: &'static str,
_length: usize,
) -> Result<SerializeTupleStruct> {
Ok(SerializeTupleStruct {
state: self.state,
name,
fields: Vec::new(),
})
}
fn serialize_tuple_variant(
self,
name: &'static str,
_variant_index: u32,
variant: &'static str,
_len: usize,
) -> Result<SerializeTupleVariant> {
Ok(SerializeTupleVariant {
state: self.state,
name,
variant,
fields: Vec::new(),
})
}
fn serialize_map(self, _: std::option::Option<usize>) -> Result<SerializeMap> {
todo!()
}
fn serialize_struct(self, name: &'static str, _len: usize) -> Result<SerializeStruct> {
Ok(SerializeStruct {
state: self.state,
name,
fields: Default::default(),
})
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<SerializeStructVariant> {
Ok(SerializeStructVariant {})
}
}
struct SerializeMap {}
impl ser::SerializeMap for SerializeMap {
type Ok = AstValue;
type Error = AstError;
fn serialize_key<T: Serialize + ?Sized>(&mut self, _: &T) -> Result<()> {
todo!()
}
fn serialize_value<T: Serialize + ?Sized>(&mut self, _: &T) -> Result<()> {
todo!()
}
fn end(self) -> Result {
todo!()
}
}
struct SerializeStruct {
state: Rc<AstState>,
name: &'static str,
fields: Vec<(&'static str, AstValue)>,
}
impl ser::SerializeStruct for SerializeStruct {
type Ok = AstValue;
type Error = AstError;
fn serialize_field<T: Serialize + ?Sized>(
&mut self,
name: &'static str,
value: &T,
) -> Result<()> {
let value = value.serialize(AstWriter::new(self.state.clone()))?;
self.fields.push((name, value));
Ok(())
}
fn end(self) -> Result {
let span = Span::call_site();
let mut inner = TokenStream::new();
let mut it = self.fields.into_iter();
if let Some((name, field)) = it.next() {
inner.push_ident(span, name);
inner.push_colon();
inner.extend(field.into_tokens()?);
}
for (name, field) in it {
inner.push_comma();
inner.push_ident(span, name);
inner.push_colon();
inner.extend(field.into_tokens()?);
}
let mut s = TokenStream::new();
s.push_ident(span, self.name);
s.push_brace_group(inner);
Ok(AstValue::Tokens(s))
}
}
/// Helpers for TokenStream manipulation.
trait TokenStreamEx {
fn push_boxed(&mut self, span: Span, inner: TokenStream);
fn push_colon(&mut self);
fn push_colon2(&mut self);
fn push_comma(&mut self);
fn push_comma_sequence(&mut self, seq: impl IntoIterator<Item = TokenStream>);
fn push_dot(&mut self);
fn push_ident(&mut self, span: Span, name: &str);
fn push_brace_group(&mut self, inner: TokenStream);
fn push_paren_group(&mut self, inner: TokenStream);
}
impl TokenStreamEx for TokenStream {
fn push_boxed(&mut self, span: Span, inner: TokenStream) {
self.push_ident(span, "Box");
self.push_colon2();
self.push_ident(span, "new");
self.push_paren_group(inner);
}
fn push_colon(&mut self) {
self.append(TokenTree::Punct(Punct::new(':', Spacing::Alone)));
}
fn push_colon2(&mut self) {
self.append(TokenTree::Punct(Punct::new(':', Spacing::Joint)));
self.append(TokenTree::Punct(Punct::new(':', Spacing::Alone)));
}
fn push_comma(&mut self) {
self.append(TokenTree::Punct(Punct::new(',', Spacing::Alone)));
}
fn push_comma_sequence(&mut self, seq: impl IntoIterator<Item = TokenStream>) {
let mut seq = seq.into_iter();
if let Some(item) = seq.next() {
self.extend(item);
}
for item in seq {
self.push_comma();
self.extend(item);
}
}
fn push_dot(&mut self) {
self.append(TokenTree::Punct(Punct::new('.', Spacing::Alone)));
}
fn push_ident(&mut self, span: Span, name: &str) {
self.append(Ident::new(name, span))
}
fn push_brace_group(&mut self, inner: TokenStream) {
self.append(TokenTree::Group(Group::new(Delimiter::Brace, inner)))
}
fn push_paren_group(&mut self, inner: TokenStream) {
self.append(TokenTree::Group(Group::new(Delimiter::Parenthesis, inner)))
}
}
fn box_stream(inner: TokenStream, span: Span) -> TokenStream {
let mut b = TokenStream::new();
b.push_boxed(span, inner);
b
}
// A wrapper around syn::Error that provides Debug, Display, and Error which are
// needed for Serializer.
#[derive(Error, Debug)]
pub(crate) enum AstError {
#[error("Custom error '{0}'")]
Custom(String),
#[error("Syntax error")]
Syn(#[from] syn::Error),
}
impl ser::Error for AstError {
fn custom<T: fmt::Display>(err: T) -> Self {
Self::Custom(err.to_string())
}
}
impl From<AstError> for syn::Error {
fn from(err: AstError) -> Self {
match err {
AstError::Custom(s) => Self::new(Span::call_site(), s),
AstError::Syn(s) => s,
}
}
}
struct SerializeSeq {
seq: Vec<AstValue>,
state: Rc<AstState>,
has_repeat: bool,
}
impl ser::SerializeSeq for SerializeSeq {
type Ok = AstValue;
type Error = AstError;
fn serialize_element<T: Serialize + ?Sized>(&mut self, value: &T) -> Result<()> {
let inner = value.serialize(AstWriter::new(self.state.clone()))?;
let has_repeat = match &inner {
AstValue::Replace(
ReplaceState::Expr(repl)
| ReplaceState::Stmt(repl)
| ReplaceState::Tuple2(box (_, repl)),
) => {
matches!(repl, Replacement::Repeat { .. })
}
_ => false,
};
self.has_repeat |= has_repeat;
self.seq.push(inner);
Ok(())
}
fn end(self) -> Result {
// vec![a, b, c...]
match self.has_repeat {
false => {
let seq = self
.seq
.into_iter()
.map(AstValue::into_tokens)
.collect::<Result<Vec<TokenStream>>>()?;
Ok(AstValue::Tokens(quote!(vec![#(#seq),*])))
}
true => {
// #{args*} replacement...
// Generates something that looks like:
// std::iter::empty()
// .chain([a, b, c].into_iter())
// .chain(args.into_iter())
// .chain([d, e, f].into_iter())
// .collect::<Vec<_>>()
let mut outer = TokenStream::new();
fn flush(outer: &mut TokenStream, cur: &mut Vec<TokenStream>) {
if !cur.is_empty() {
outer.extend(quote!(.chain([#(#cur),*].into_iter())));
cur.clear();
}
}
let mut cur = Vec::new();
for item in self.seq {
match item {
AstValue::Replace(
ReplaceState::Expr(Replacement::Repeat { pat, .. })
| ReplaceState::Stmt(Replacement::Repeat { pat, .. }),
)
| AstValue::Replace(ReplaceState::Tuple2(box (
_,
Replacement::Repeat { pat, .. },
))) => {
flush(&mut outer, &mut cur);
outer.extend(quote!(.chain(#pat.into_iter())));
}
_ => cur.push(item.into_tokens()?),
}
}
flush(&mut outer, &mut cur);
Ok(AstValue::Tokens(
quote!(std::iter::empty() #outer .collect::<Vec<_>>()),
))
}
}
}
}
struct SerializeTuple {
state: Rc<AstState>,
fields: Vec<AstValue>,
}
impl ser::SerializeTuple for SerializeTuple {
type Ok = AstValue;
type Error = AstError;
fn serialize_element<T: Serialize + ?Sized>(&mut self, f: &T) -> Result<()> {
let inner = f.serialize(AstWriter::new(self.state.clone()))?;
self.fields.push(inner);
Ok(())
}
fn end(self) -> Result {
Ok(match &self.fields[..] {
[
v0,
AstValue::Replace(ReplaceState::Expr(repl @ Replacement::Repeat { .. })),
] => AstValue::Replace(ReplaceState::Tuple2(Box::new((v0.clone(), repl.clone())))),
[_, AstValue::Replace(ReplaceState::Str(repl))] => {
AstValue::Replace(ReplaceState::Tuple(repl.clone()))
}
_ => {
let mut inner = TokenStream::new();
inner.push_comma_sequence(
self.fields
.into_iter()
.map(AstValue::into_tokens)
.collect::<Result<Vec<TokenStream>>>()?,
);
let mut s = TokenStream::new();
s.push_paren_group(inner);
AstValue::Tokens(s)
}
})
}
}
struct SerializeTupleStruct {
state: Rc<AstState>,
name: &'static str,
fields: Vec<AstValue>,
}
impl ser::SerializeTupleStruct for SerializeTupleStruct {
type Ok = AstValue;
type Error = AstError;
fn serialize_field<T: Serialize + ?Sized>(&mut self, f: &T) -> Result<()> {
let inner = f.serialize(AstWriter::new(self.state.clone()))?;
self.fields.push(inner);
Ok(())
}
fn end(self) -> Result {
match (self.name, &self.fields[..]) {
(
"Expr",
[
_,
_,
AstValue::Replace(ReplaceState::Lvar(repl @ Replacement::Repeat { .. })),
],
)
| ("Expr", [_, _, AstValue::Replace(ReplaceState::Lvar(repl))]) => {
return Ok(AstValue::Replace(ReplaceState::Expr(repl.clone())));
}
("Lid", [_, AstValue::Replace(ReplaceState::Tuple(repl))]) => {
return Ok(AstValue::Replace(ReplaceState::Lid(repl.clone())));
}
("Stmt", [_, AstValue::Replace(ReplaceState::ExprStmt(repl))]) => {
return Ok(AstValue::Replace(ReplaceState::Stmt(repl.clone())));
}
_ => {}
}
let mut fields = Vec::with_capacity(self.fields.len());
for field in self.fields {
fields.push(field.into_tokens()?);
}
let span = Span::call_site();
match self.name {
"Hint" if fields.len() == 2 => {
let unboxed = fields.pop().unwrap();
fields.push(box_stream(unboxed, span));
}
_ => {}
}
let mut inner = TokenStream::new();
inner.push_comma_sequence(fields);
let mut s = TokenStream::new();
s.push_ident(span, self.name);
s.push_paren_group(inner);
Ok(AstValue::Tokens(s))
}
}
struct SerializeTupleVariant {
state: Rc<AstState>,
name: &'static str,
variant: &'static str,
fields: Vec<AstValue>,
}
impl ser::SerializeTupleVariant for SerializeTupleVariant {
type Ok = AstValue;
type Error = AstError;
fn serialize_field<T: Serialize + ?Sized>(&mut self, f: &T) -> Result<()> {
let inner = f.serialize(AstWriter::new(self.state.clone()))?;
self.fields.push(inner);
Ok(())
}
fn end(self) -> Result {
let mut fields = Vec::with_capacity(self.fields.len());
for field in self.fields {
fields.push(field.into_tokens()?);
}
let span = Span::call_site();
let mut inner = TokenStream::new();
inner.push_comma_sequence(fields);
let mut s = TokenStream::new();
s.push_ident(span, self.name);
s.push_colon2();
s.push_ident(span, self.variant);
s.push_paren_group(inner);
Ok(AstValue::Tokens(s))
}
}
struct SerializeStructVariant {}
impl ser::SerializeStructVariant for SerializeStructVariant {
type Ok = AstValue;
type Error = AstError;
fn serialize_field<T: Serialize + ?Sized>(&mut self, _: &'static str, _: &T) -> Result<()> {
todo!()
}
fn end(self) -> Result {
todo!()
}
} |
Rust | hhvm/hphp/hack/src/utils/hack_macros/hack_macros.rs | // Copyright (c) Facebook, Inc. and its affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
/// A macro to build Hack Expr trees.
///
/// Usage:
/// hack_expr!(pos = p, "#foo + $bar")
///
/// Returns an ast::Expr representing the given code.
///
/// The `pos` parameter is optional. If not provided then a Pos::none() will be
/// used.
///
/// The code string must be a string literal. It cannot be a dynamically
/// generated string.
///
/// The code string can contain replacements to inject values into the generated
/// tree:
///
/// hack_expr!("$foo + #bar + $baz")
///
/// The replacements have various forms:
/// - `#name` Inject the Expr or Stmt in local variable `name`.
/// - `#{args*}` As a parameter to a call this inserts a Vec<Expr> as
/// ParamKind::Pnormal parameters.
/// - `#{clone(name)}` Clone the Expr `name` instead of consuming it.
/// - `#{cmd(name)}` Convert name using 'cmd' (see below).
/// - `#{cmd(clone(name))}` Clone `name` and then convert using 'cmd' (see below).
///
/// Conversion commands:
///
/// - `#{id(name)}` builds an Expr_::Ident from a string.
/// - `#{lvar(name)}` builds an Expr_::LVar from a LocalId.
/// - `#{str(name)}` builds an Expr_::String from a String.
///
/// All of the commands can also take an optional position override parameter:
/// - `#{str(name, pos)}`
/// - `#{str(clone(name), pos)}`
///
///
/// Technical note:
///
/// The transformation is done at hackc compile time - the macro takes the input
/// string and uses the aast parser to parse it and generate an Expr. It then
/// traverses the the Expr (again - at compile time) to generate code to
/// construct the Expr at runtime. The Hack code needs to be quoted because the
/// Rust parser doesn't like some Hack constructs (in particular backslash
/// separated identifiers).
///
/// hack_expr!(pos = p, "#foo + $bar")
///
/// transforms into something like this:
///
/// Expr(
/// (),
/// p.clone(),
/// Expr_::Binop(Box::new((
/// Bop::Plus,
/// foo,
/// Expr((), p.clone(), Expr_::Lvar(Lid(p.clone(), (0, "$bar")))),
/// )))
/// )
///
#[macro_export]
macro_rules! hack_expr {
($($input:tt)*) => {
$crate::exports::hack_macros_impl::hack_expr_proc! {
// See the comment in `hack_expr_impl` for what this is.
$crate::exports
$($input)*
}
}
}
/// Like `hack_expr!` but produces an ast::Stmt value (see `hack_expr!` for
/// full docs).
#[macro_export]
macro_rules! hack_stmt {
($($input:tt)*) => {
$crate::exports::hack_macros_impl::hack_stmt_proc! {
// See the comment in `hack_expr_impl` for what this is.
$crate::exports
$($input)*
}
}
}
/// Like `hack_stmt!` but produces a `Vec<ast::Stmt>` value (see `hack_expr!`
/// for full docs).
#[macro_export]
macro_rules! hack_stmts {
($($input:tt)*) => {
$crate::exports::hack_macros_impl::hack_stmts_proc! {
// See the comment in `hack_expr_impl` for what this is.
$crate::exports
$($input)*
}
}
}
pub mod exports {
pub use hack_macros_impl;
pub use oxidized::ast;
} |
Rust | hhvm/hphp/hack/src/utils/hack_macros/hack_macros_impl.rs | // Copyright (c) Facebook, Inc. and its affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#![feature(box_patterns)]
mod ast_writer;
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use aast_parser::rust_aast_parser_types::Env;
use aast_parser::Error as AastError;
use once_cell::sync::OnceCell;
use oxidized::ast;
use oxidized::ast::Def;
use oxidized::ast::Pos;
use oxidized::ast::Program;
use oxidized::errors;
use oxidized::parser_options::ParserOptions;
use parser_core_types::indexed_source_text::IndexedSourceText;
use parser_core_types::source_text::SourceText;
use parser_core_types::syntax_error::SyntaxError;
use proc_macro2::Literal;
use proc_macro2::Span;
use proc_macro2::TokenStream;
use quote::quote;
use quote::ToTokens;
use regex::Match;
use regex::Regex;
use relative_path::Prefix;
use relative_path::RelativePath;
use rust_parser_errors::UnstableFeatures;
use syn::parse::ParseStream;
use syn::parse::Parser;
use syn::punctuated::Punctuated;
use syn::spanned::Spanned;
use syn::Error;
use syn::Expr;
use syn::ExprLit;
use syn::Ident;
use syn::Lit;
use syn::LitStr;
use syn::Path;
use syn::Result;
use syn::Token;
use crate::ast_writer::Replacement;
/// See hack_expr! for docs.
#[proc_macro]
pub fn hack_expr_proc(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: TokenStream = input.into();
match hack_expr_impl.parse2(input) {
Ok(res) => res.into(),
Err(err) => err.into_compile_error().into(),
}
}
#[proc_macro]
pub fn hack_stmt_proc(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: TokenStream = input.into();
match hack_stmt_impl.parse2(input) {
Ok(res) => res.into(),
Err(err) => err.into_compile_error().into(),
}
}
#[proc_macro]
pub fn hack_stmts_proc(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: TokenStream = input.into();
match hack_stmts_impl.parse2(input) {
Ok(res) => res.into(),
Err(err) => err.into_compile_error().into(),
}
}
fn hack_stmts_impl(input: ParseStream<'_>) -> Result<TokenStream> {
let exports: Path = input.parse()?;
let input = Input::parse(input)?;
let stmts = parse_stmts(&input.hack_src, 0, input.span)?;
crate::ast_writer::write_ast(
exports,
input.span,
input.replacements,
input.pos_src,
input.pos_id,
stmts,
)
.map_err(Into::into)
}
fn hack_stmt_impl(input: ParseStream<'_>) -> Result<TokenStream> {
let exports: Path = input.parse()?;
let input = Input::parse(input)?;
let stmt = parse_stmt(&input.hack_src, 0, input.span)?;
crate::ast_writer::write_ast(
exports,
input.span,
input.replacements,
input.pos_src,
input.pos_id,
stmt,
)
.map_err(Into::into)
}
fn parse_stmts(src: &str, internal_offset: usize, span: Span) -> Result<Vec<ast::Stmt>> {
// Parse the statements within a method (so we can use `$this` legally).
let prefix = "class C { public function f(): void { ";
let postfix = " } }";
let src = format!("{}{}{}", prefix, src, postfix);
let internal_offset = internal_offset + prefix.len();
let mut program = parse_aast_from_string(&src, internal_offset, span)?;
// Expect a single Def::Class.
if program.0.len() != 1 {
panic!("Expected a single Def");
}
let def = program.0.pop().unwrap();
let cls = match def {
Def::Class(cls) => cls,
_ => panic!("Expected a Def::Class"),
};
let mut methods = cls.methods;
if methods.len() != 1 {
panic!("Expecting a single method");
}
let method = methods.pop().unwrap();
Ok(method.body.fb_ast.0)
}
fn parse_stmt(src: &str, internal_offset: usize, span: Span) -> Result<ast::Stmt> {
let mut stmts = parse_stmts(src, internal_offset, span)?;
if stmts.len() != 1 {
panic!("Expecting a single Stmt");
}
let stmt = stmts.pop().unwrap();
Ok(stmt)
}
fn hack_expr_impl(input: ParseStream<'_>) -> Result<TokenStream> {
// Unlike macro_rules macros, procedural macros can't refer to `$crate`. We
// wrap the proc_macros with a macro_rules that passes `$crate::exports` as
// the first value so the procedural macro can explicitly refer to types
// exported by the hack_macros crate.
let exports: Path = input.parse()?;
let input = Input::parse(input)?;
let expr = parse_expr(&input.hack_src, input.span)?;
crate::ast_writer::write_ast(
exports,
input.span,
input.replacements,
input.pos_src,
input.pos_id,
expr,
)
.map_err(Into::into)
}
fn parse_expr(src: &str, span: Span) -> Result<ast::Expr> {
// Parse the expression as a statement.
let stmt = parse_stmt(&format!("{};", src), 0, span)?;
match stmt.1 {
ast::Stmt_::Expr(expr) => Ok(*expr),
_ => Err(Error::new(span, "Expression expected")),
}
}
struct Input {
pos_src: TokenStream,
pos_id: TokenStream,
span: Span,
hack_src: String,
replacements: HashMap<String, Replacement>,
}
impl Input {
fn parse(input: ParseStream<'_>) -> Result<Input> {
let input: Punctuated<Expr, Token![,]> = Punctuated::parse_terminated(input)?;
let mut pos_src = None;
let mut hack_src = None;
for expr in input.into_iter() {
match expr {
Expr::Assign(assign) => {
let left = match *assign.left {
Expr::Path(path) => path.path,
left => return Err(Error::new(left.span(), "Identifier expected")),
};
let target = if left.is_ident("pos") {
&mut pos_src
} else {
return Err(Error::new(left.span(), "Unknown keyword"));
};
if target.is_some() {
return Err(Error::new(
left.span(),
format!("{} cannot be set twice", left.to_token_stream()),
));
}
*target = Some(assign.right);
}
Expr::Lit(ExprLit {
lit: Lit::Str(s), ..
}) => {
if hack_src.is_some() {
return Err(Error::new(s.span(), "Unepected string"));
}
hack_src = Some(s);
}
_ => return Err(Error::new(expr.span(), "String expected")),
}
}
let pos_src = pos_src.map_or_else(|| quote!(Pos::NONE), |p| p.to_token_stream());
let hack_src =
hack_src.ok_or_else(|| Error::new(Span::call_site(), "Missing hack source string"))?;
let span = hack_src.span();
let pos_id = crate::ast_writer::hygienic_pos(span.clone());
let (hack_src, replacements) = prepare_hack(hack_src, &pos_id)?;
Ok(Input {
pos_src,
pos_id,
span,
hack_src,
replacements,
})
}
}
fn prepare_hack(
input: LitStr,
default_pos: &TokenStream,
) -> Result<(String, HashMap<String, Replacement>)> {
static RE_VAR: OnceCell<Regex> = OnceCell::new();
let re_var = RE_VAR.get_or_init(|| {
let short = r"#(\w+)";
let long = r"#\{([^}*]+)\}";
let args = r"#\{(\w+)\*\}";
let pat = [short, long, args]
.into_iter()
.map(|s| format!("(:?{})", s))
.collect::<Vec<_>>()
.join("|");
Regex::new(&pat).unwrap()
});
let span = input.span();
let mut replacements: HashMap<String, Replacement> = HashMap::default();
let input_str = input.value();
let mut output = String::with_capacity(input_str.len());
let mut last_match = 0;
for (idx, caps) in re_var.captures_iter(&input_str).enumerate() {
assert_eq!(caps.len(), 7);
// caps[0] = whole string
// caps[1] = short form - full capture
// caps[2] = short form - substring
// caps[3] = long form - full capture
// caps[4] = long form - substring
// caps[5] = args form - full capture
// caps[6] = args form - substring
let m = caps.get(0).unwrap();
output.push_str(&input_str[last_match..m.start()]);
last_match = m.end();
fn compute_subspan<'a>(input_str: &'a str, span: Span, mat: Match<'a>) -> (&'a str, Span) {
// TODO: Our version of syn is old (1.0.75) and doesn't have the
// `token()` function which would let us figure out how much to properly
// add - so we'll have to be "close enough".
let offset = 1;
let range = (mat.start() + offset)..(mat.end() + offset);
let subspan = span_for_range(input_str, 0, span, range);
(mat.as_str(), subspan)
}
let var = { format!("$__hack_repl_{}", idx) };
output.push_str(&var);
// The last match is always the most interesting to us (prior matches
// tend to be things like 'full string' matches instead of the captured
// content).
let (idx, match_) = caps
.iter()
.enumerate()
.filter_map(|(i, m)| m.map(|s| (i, s)))
.last()
.unwrap();
let (name_str, subspan) = compute_subspan(&input_str, span, match_);
let repl = match idx {
0 => {
panic!("No matches found");
}
2 => {
// short form - #name
let pat = Ident::new(name_str, subspan).to_token_stream();
Replacement::Simple { pat, span: subspan }
}
4 => {
// long form - #{cmd(...)}
parse_repl_var(name_str, subspan, default_pos)?
}
6 => {
// args form - #{name*}
let pat = Ident::new(name_str, subspan).to_token_stream();
Replacement::Repeat { pat, span: subspan }
}
_ => {
panic!("Unexpected match index {} in {:?}", idx, caps);
}
};
replacements.insert(var, repl);
}
output.push_str(&input_str[last_match..]);
Ok((output, replacements))
}
fn unwrap_call(input: &str, span: Span) -> Result<(&str, Vec<&str>)> {
let open = input
.find('(')
.ok_or_else(|| Error::new(span, "'(' not found"))?;
if !input.ends_with(')') {
return Err(Error::new(span, "')' not at end"));
}
fn is_sep(c: char, in_paren: &mut isize) -> bool {
if *in_paren > 0 {
match c {
'(' => *in_paren += 1,
')' => *in_paren -= 1,
_ => {}
}
false
} else {
match c {
'(' => {
*in_paren = 1;
false
}
')' => {
// Force a detection at the end.
*in_paren = 100;
false
}
',' => true,
_ => false,
}
}
}
let cmd = input[..open].trim();
let args_text = &input[open + 1..input.len() - 1];
let mut args = Vec::new();
if !args_text.is_empty() {
let mut in_paren = 0;
let mut last_start = 0;
for (i, c) in args_text.chars().enumerate() {
if is_sep(c, &mut in_paren) {
args.push(args_text[last_start..i].trim());
last_start = i + 1;
}
}
if in_paren != 0 {
return Err(Error::new(span, "Unbalanced parentheses"));
}
args.push(args_text[last_start..].trim());
}
Ok((cmd, args))
}
fn is_word_char(c: char) -> bool {
c.is_alphanumeric() || c == '_'
}
fn is_word(input: &str) -> bool {
input.chars().all(is_word_char)
}
fn parse_repl_var(input: &str, span: Span, default_pos: &TokenStream) -> Result<Replacement> {
let (cmd, args) = unwrap_call(input, span)?;
fn parse_pos(
has_convert: bool,
cmd: &str,
args: &[&str],
span: Span,
default_pos: &TokenStream,
) -> Result<TokenStream> {
match args.len() {
0 => Err(Error::new(span, format!("Too few arguments to '{}'", cmd))),
1 => Ok(default_pos.clone()),
2 if has_convert => Ok(Ident::new(args[1], span).to_token_stream()),
_ => Err(Error::new(span, format!("Too many arguments to '{}'", cmd))),
}
}
fn parse_expr<F: FnOnce(Ident) -> TokenStream>(
inner: &str,
span: Span,
f: F,
) -> Result<TokenStream> {
if is_word(inner) {
Ok(Ident::new(inner, span).to_token_stream())
} else {
let (cmd, args) = unwrap_call(inner, span)?;
if cmd != "clone" {
return Err(Error::new(span, "Inner command can only be 'clone'"));
}
match args.len() {
0 => return Err(Error::new(span, "Too few arguments to 'clone'")),
1 => {}
_ => return Err(Error::new(span, "Too many arguments to 'clone'")),
}
let var = Ident::new(args[0], span);
Ok(f(var))
}
}
match cmd {
"as_expr" => {
let pat = parse_expr(args[0], span, |var| quote!(#var.clone()))?;
Ok(Replacement::AsExpr { pat, span })
}
"clone" => {
let pat = parse_expr(input, span, |var| quote!(#var.clone()))?;
Ok(Replacement::Simple { pat, span })
}
"id" => {
let pat = parse_expr(args[0], span, |var| quote!(#var.to_string()))?;
let pos = parse_pos(true, cmd, &args, span, default_pos)?;
Ok(Replacement::Id { pat, pos, span })
}
"lvar" => {
let pat = parse_expr(args[0], span, |var| quote!(#var.clone()))?;
let pos = parse_pos(true, cmd, &args, span, default_pos)?;
Ok(Replacement::Lvar { pat, pos, span })
}
"str" => {
let pat = parse_expr(args[0], span, |var| quote!(#var.to_owned()))?;
let pos = parse_pos(true, cmd, &args, span, default_pos)?;
Ok(Replacement::Str { pat, pos, span })
}
_ => Err(Error::new(span, format!("Unknown command '{}'", cmd))),
}
}
fn parse_aast_from_string(input: &str, internal_offset: usize, span: Span) -> Result<Program> {
let parser_options = ParserOptions {
tco_union_intersection_type_hints: true,
po_allow_unstable_features: true,
..ParserOptions::default()
};
let env = Env {
codegen: true,
elaborate_namespaces: false,
include_line_comments: false,
parser_options,
php5_compat_mode: false,
quick_mode: false,
show_all_errors: true,
is_systemlib: true,
for_debugger_eval: false,
scour_comments: false,
};
let rel_path = RelativePath::make(Prefix::Dummy, "".into());
let source_text = SourceText::make(Arc::new(rel_path), input.as_bytes());
let indexed_source_text = IndexedSourceText::new(source_text);
let mut default_unstable_features = HashSet::default();
default_unstable_features.insert(UnstableFeatures::TypedLocalVariables);
let aast =
aast_parser::AastParser::from_text(&env, &indexed_source_text, default_unstable_features)
.map_err(|err| convert_aast_error(err, input, internal_offset, span))?;
aast.errors
.iter()
.try_for_each(|e| convert_error(e, input, internal_offset, span))?;
aast.syntax_errors
.iter()
.try_for_each(|e| convert_syntax_error(e, input, internal_offset, span))?;
aast.lowerer_parsing_errors
.iter()
.try_for_each(|e| convert_lowerer_parsing_error(e, input, internal_offset, span))?;
Ok(aast.aast)
}
fn span_for_pos(src: &str, internal_offset: usize, span: Span, pos: &Pos) -> Span {
let range = pos.info_raw();
span_for_range(src, internal_offset, span, range.0..range.1)
}
fn span_for_range(
src: &str,
internal_offset: usize,
span: Span,
mut range: std::ops::Range<usize>,
) -> Span {
if internal_offset <= range.start {
range.start -= internal_offset;
} else {
range.start = 0;
}
if internal_offset <= range.end {
range.end -= internal_offset;
} else {
range.end = 0;
}
let mut tmp = Literal::string(&src[internal_offset..]);
tmp.set_span(span);
tmp.subspan(range).unwrap_or(span)
}
fn convert_aast_error(err: AastError, src: &str, internal_offset: usize, span: Span) -> Error {
match err {
AastError::NotAHackFile() => {
Error::new(Span::call_site(), "Internal Error: Not a Hack File")
}
AastError::ParserFatal(syn, _) => convert_syntax_error(&syn, src, internal_offset, span)
.err()
.unwrap(),
AastError::Other(msg) => Error::new(Span::call_site(), msg),
}
}
fn convert_error(err: &errors::Error, src: &str, internal_offset: usize, span: Span) -> Result<()> {
let err_span = span_for_pos(src, internal_offset, span, &err.claim.0);
Err(Error::new(err_span, err.claim.1.to_string()))
}
fn convert_syntax_error(
err: &SyntaxError,
src: &str,
internal_offset: usize,
span: Span,
) -> Result<()> {
let err_span = span_for_range(src, internal_offset, span, err.start_offset..err.end_offset);
Err(Error::new(
err_span,
format!("[{}] {}", err.start_offset, err.message),
))
}
fn convert_lowerer_parsing_error(
err: &(Pos, String),
src: &str,
internal_offset: usize,
span: Span,
) -> Result<()> {
let err_span = span_for_pos(src, internal_offset, span, &err.0);
Err(Error::new(err_span, err.1.to_string()))
}
#[cfg(test)]
mod tests {
use macro_test_util::assert_pat_eq;
use super::*;
#[test]
fn test_basic() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX pos = p, "#foo + $bar")),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = p;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Binop(Box::new(Binop {
bop: Bop::Plus,
lhs: {
let tmp: Expr = foo;
tmp
},
rhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(
__hygienic_pos.clone(),
(0isize, "$bar".to_owned()),
))),
),
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex1() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX
pos = pos(),
r#"#obj_lvar->#meth_lvar(...#{lvar(args_var)})"#
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = pos();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::ObjGet(Box::new((
{
let tmp: Expr = obj_lvar;
tmp
},
{
let tmp: Expr = meth_lvar;
tmp
},
OgNullFlavor::OGNullthrows,
PropOrMethod::IsMethod,
))),
),
targs: vec![],
args: vec![],
unpacked_arg: Some({
let tmp: LocalId = args_var;
Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(__hygienic_pos.clone(), tmp))),
)
}),
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex2() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX
pos = pos(),
r#"\__SystemLib\dynamic_meth_caller(
#{clone(cexpr)},
#{clone(fexpr)},
#efun,
#force_val_expr
)
"#
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = pos();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"\\__SystemLib\\dynamic_meth_caller".to_owned(),
))),
),
targs: vec![],
args: vec![
(ParamKind::Pnormal, {
let tmp: Expr = cexpr.clone();
tmp
}),
(ParamKind::Pnormal, {
let tmp: Expr = fexpr.clone();
tmp
}),
(ParamKind::Pnormal, {
let tmp: Expr = efun;
tmp
}),
(ParamKind::Pnormal, {
let tmp: Expr = force_val_expr;
tmp
}),
],
unpacked_arg: None,
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex3() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX
pos = pos(),
r#"\__SystemLib\meth_caller(#{str(clone(mangle_name))})"#
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = pos();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"\\__SystemLib\\meth_caller".to_owned(),
))),
),
targs: vec![],
args: vec![(ParamKind::Pnormal, {
let tmp: String = mangle_name.to_owned();
Expr((), __hygienic_pos.clone(), Expr_::String(tmp.into()))
})],
unpacked_arg: None,
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex4() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX
pos = pos(),
r#"\HH\invariant(
\is_a(#{clone(obj_lvar)}, #{str(clone(cls), pc)}),
#{str(msg)}
)
"#
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = pos();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"\\HH\\invariant".to_owned(),
))),
),
targs: vec![],
args: vec![
(
ParamKind::Pnormal,
Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"\\is_a".to_owned(),
))),
),
targs: vec![],
args: vec![
(ParamKind::Pnormal, {
let tmp: Expr = obj_lvar.clone();
tmp
}),
(ParamKind::Pnormal, {
let tmp: String = cls.to_owned();
Expr((), pc.clone(), Expr_::String(tmp.into()))
}),
],
unpacked_arg: None,
})),
),
),
(ParamKind::Pnormal, {
let tmp: String = msg;
Expr((), __hygienic_pos.clone(), Expr_::String(tmp.into()))
}),
],
unpacked_arg: None,
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex5() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX
pos = pos(),
r#"#obj_lvar->#{id(clone(fname), pf)}(...#{lvar(args_var)})"#
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = pos();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::ObjGet(Box::new((
{
let tmp: Expr = obj_lvar;
tmp
},
{
let tmp: String = fname.to_string();
Expr((), pf.clone(), Expr_::Id(Box::new(Id(pf.clone(), tmp))))
},
OgNullFlavor::OGNullthrows,
PropOrMethod::IsMethod,
))),
),
targs: vec![],
args: vec![],
unpacked_arg: Some({
let tmp: LocalId = args_var;
Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(__hygienic_pos.clone(), tmp))),
)
}),
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex6() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX "echo #{str(tail)}")),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = Pos::NONE;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "echo".to_owned()))),
),
targs: vec![],
args: vec![(ParamKind::Pnormal, {
let tmp: String = tail;
Expr((), __hygienic_pos.clone(), Expr_::String(tmp.into()))
})],
unpacked_arg: None,
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex7() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX "parent::__xhpAttributeDeclaration()")),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = Pos::NONE;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::ClassConst(Box::new((
ClassId(
(),
__hygienic_pos.clone(),
ClassId_::CIexpr(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"parent".to_owned(),
))),
)),
),
(
__hygienic_pos.clone(),
"__xhpAttributeDeclaration".to_owned(),
),
))),
),
targs: vec![],
args: vec![],
unpacked_arg: None,
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex8() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX "#{id(s)}::__xhpAttributeDeclaration()")),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = Pos::NONE;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::ClassConst(Box::new((
ClassId(
(),
__hygienic_pos.clone(),
ClassId_::CIexpr({
let tmp: String = s;
Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), tmp))),
)
}),
),
(
__hygienic_pos.clone(),
"__xhpAttributeDeclaration".to_owned(),
),
))),
),
targs: vec![],
args: vec![],
unpacked_arg: None,
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex9() {
assert_pat_eq(
hack_stmt_impl.parse2(quote!(EX
"if (#{lvar(clone(name))} is __uninitSentinel) { unset(#{lvar(name)}); }"
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = Pos::NONE;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Stmt(
__hygienic_pos.clone(),
Stmt_::If(Box::new((
Expr(
(),
__hygienic_pos.clone(),
Expr_::Is(Box::new((
{
let tmp: LocalId = name.clone();
Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(__hygienic_pos.clone(), tmp))),
)
},
Hint(
__hygienic_pos.clone(),
Box::new(Hint_::Happly(
Id(__hygienic_pos.clone(), "__uninitSentinel".to_owned()),
vec![],
)),
),
))),
),
Block(vec![Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"unset".to_owned(),
))),
),
targs: vec![],
args: vec![(ParamKind::Pnormal, {
let tmp: LocalId = name;
Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(__hygienic_pos.clone(), tmp))),
)
})],
unpacked_arg: None,
})),
))),
)]),
Block(vec![Stmt(__hygienic_pos.clone(), Stmt_::Noop)]),
))),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex10() {
assert_pat_eq(
hack_stmt_impl.parse2(quote!(EX
pos = p(),
r#"if (\__SystemLib\__debugger_is_uninit(#{lvar(clone(name))})) {
#{lvar(name)} = new __uninitSentinel();
}
"#
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = p();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Stmt(
__hygienic_pos.clone(),
Stmt_::If(Box::new((
Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"\\__SystemLib\\__debugger_is_uninit".to_owned(),
))),
),
targs: vec![],
args: vec![(ParamKind::Pnormal, {
let tmp: LocalId = name.clone();
Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(__hygienic_pos.clone(), tmp))),
)
})],
unpacked_arg: None,
})),
),
Block(vec![Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Binop(Box::new(Binop {
bop: Bop::Eq(None),
lhs: {
let tmp: LocalId = name;
Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(__hygienic_pos.clone(), tmp))),
)
},
rhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::New(Box::new((
ClassId(
(),
__hygienic_pos.clone(),
ClassId_::CIexpr(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"__uninitSentinel".to_owned(),
))),
)),
),
vec![],
vec![],
None,
(),
))),
),
})),
))),
)]),
Block(vec![Stmt(__hygienic_pos.clone(), Stmt_::Noop)]),
))),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex11() {
assert_pat_eq(
hack_stmt_impl.parse2(quote!(EX
pos = p(),
r#"
try {
#{stmts*};
} catch (Throwable #{lvar(exnvar)}) {
/* no-op */
} finally {
#{sets*};
}
"#
)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = p();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Stmt(
__hygienic_pos.clone(),
Stmt_::Try(Box::new((
Block(
std::iter::empty()
.chain(stmts.into_iter())
.collect::<Vec<_>>(),
),
vec![Catch(
Id(__hygienic_pos.clone(), "Throwable".to_owned()),
{
let tmp: LocalId = exnvar;
Lid(__hygienic_pos.clone(), tmp)
},
Block(vec![]),
)],
FinallyBlock(
std::iter::empty()
.chain(sets.into_iter())
.collect::<Vec<_>>(),
),
))),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_ex12() {
assert_pat_eq(
hack_stmts_impl.parse2(quote!(EX
r#"
$r = self::$__xhpAttributeDeclarationCache;
if ($r === null) {
self::$__xhpAttributeDeclarationCache =
__SystemLib\merge_xhp_attr_declarations(#{args*});
$r = self::$__xhpAttributeDeclarationCache;
}
return $r;
"#
)),
{
let stmt1 = quote!(Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Binop(Box::new(Binop {
bop: Bop::Eq(None),
lhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(
__hygienic_pos.clone(),
(0isize, "$r".to_owned())
)))
),
rhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::ClassGet(Box::new((
ClassId(
(),
__hygienic_pos.clone(),
ClassId_::CIexpr(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"self".to_owned()
)))
))
),
ClassGetExpr::CGstring((
__hygienic_pos.clone(),
"$__xhpAttributeDeclarationCache".to_owned()
)),
PropOrMethod::IsProp
)))
)
}))
)))
));
let stmt2 = quote!(Stmt(
__hygienic_pos.clone(),
Stmt_::If(Box::new((
Expr(
(),
__hygienic_pos.clone(),
Expr_::Binop(Box::new(Binop {
bop: Bop::Eqeqeq,
lhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(
__hygienic_pos.clone(),
(0isize, "$r".to_owned())
)))
),
rhs: Expr((), __hygienic_pos.clone(), Expr_::Null)
}))
),
Block(vec![
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Binop(Box::new(Binop {
bop: Bop::Eq(None),
lhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::ClassGet(Box::new((
ClassId(
(),
__hygienic_pos.clone(),
ClassId_::CIexpr(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"self".to_owned()
)))
))
),
ClassGetExpr::CGstring((
__hygienic_pos.clone(),
"$__xhpAttributeDeclarationCache".to_owned()
)),
PropOrMethod::IsProp
)))
),
rhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"__SystemLib\\merge_xhp_attr_declarations"
.to_owned()
)))
),
targs: vec![],
args: std::iter::empty()
.chain(args.into_iter())
.collect::<Vec<_>>(),
unpacked_arg: None
}))
)
}))
)))
),
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Binop(Box::new(Binop {
bop: Bop::Eq(None),
lhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(
__hygienic_pos.clone(),
(0isize, "$r".to_owned())
)))
),
rhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::ClassGet(Box::new((
ClassId(
(),
__hygienic_pos.clone(),
ClassId_::CIexpr(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(
__hygienic_pos.clone(),
"self".to_owned()
)))
))
),
ClassGetExpr::CGstring((
__hygienic_pos.clone(),
"$__xhpAttributeDeclarationCache".to_owned()
)),
PropOrMethod::IsProp
)))
)
}))
)))
)
]),
Block(vec![Stmt(__hygienic_pos.clone(), Stmt_::Noop)])
)))
));
let stmt3 = quote!(Stmt(
__hygienic_pos.clone(),
Stmt_::Return(Box::new(Some(Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(
__hygienic_pos.clone(),
(0isize, "$r".to_owned())
)))
))))
));
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = Pos::NONE;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = vec![#stmt1, #stmt2, #stmt3];
__hygienic_tmp
})
},
);
}
#[test]
fn test_ex13() {
assert_pat_eq(hack_expr_impl.parse2(quote!(EX r#"darray[a => 42]"#)), {
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = Pos::NONE;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Darray(Box::new((
None,
vec![(
Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "a".to_owned()))),
),
Expr((), __hygienic_pos.clone(), Expr_::Int("42".to_owned())),
)],
))),
);
__hygienic_tmp
})
});
}
#[test]
fn test_stmt() {
assert_pat_eq(
hack_stmt_impl.parse2(quote!(EX pos = p, "{ a; #b; c; }")),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = p;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Stmt(
__hygienic_pos.clone(),
Stmt_::Block(Block(vec![
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "a".to_owned()))),
))),
),
{
let tmp: Stmt = b;
tmp
},
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "c".to_owned()))),
))),
),
])),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_stmts() {
assert_pat_eq(
hack_stmt_impl.parse2(quote!(EX pos = p, "{ a; b; #{c*}; d; e; }")),
{
let before = quote!([
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "a".to_owned()))),
))),
),
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "b".to_owned()))),
))),
),
]);
let after = quote!([
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "d".to_owned(),))),
))),
),
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Id(Box::new(Id(__hygienic_pos.clone(), "e".to_owned(),))),
))),
),
]);
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = p;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Stmt(
__hygienic_pos.clone(),
Stmt_::Block(Block(
std::iter::empty()
.chain(#before.into_iter())
.chain(c.into_iter())
.chain(#after.into_iter())
.collect::<Vec<_>>(),
)),
);
__hygienic_tmp
})
},
);
}
#[test]
fn test_ex14() {
assert_pat_eq(
hack_expr_impl.parse2(quote!(EX pos = pos.clone(),
r#"() ==> {
$result = #kind;
foreach (#{clone(collection)} as #{as_expr(clone(binding))}) {
#inner_body;
}
return $result;
}()
"#)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = pos.clone();
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Expr(
(),
__hygienic_pos.clone(),
Expr_::Call(Box::new(CallExpr {
func: Expr(
(),
__hygienic_pos.clone(),
Expr_::Lfun(Box::new((
Fun_ {
span: __hygienic_pos.clone(),
readonly_this: None,
annotation: (),
readonly_ret: None,
ret: TypeHint((), None),
params: vec![],
ctxs: None,
unsafe_ctxs: None,
body: FuncBody {
fb_ast: Block(vec![
Stmt(
__hygienic_pos.clone(),
Stmt_::Expr(Box::new(Expr(
(),
__hygienic_pos.clone(),
Expr_::Binop(Box::new(Binop {
bop: Bop::Eq(None),
lhs: Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(
__hygienic_pos.clone(),
(0isize, "$result".to_owned()),
))),
),
rhs: {
let tmp: Expr = kind;
tmp
},
})),
))),
),
Stmt(
__hygienic_pos.clone(),
Stmt_::Foreach(Box::new((
{
let tmp: Expr = collection.clone();
tmp
},
{
let tmp: AsExpr = binding.clone();
tmp
},
Block(vec![{
let tmp: Stmt = inner_body;
tmp
}]),
))),
),
Stmt(
__hygienic_pos.clone(),
Stmt_::Return(Box::new(Some(Expr(
(),
__hygienic_pos.clone(),
Expr_::Lvar(Box::new(Lid(
__hygienic_pos.clone(),
(0isize, "$result".to_owned()),
))),
)))),
),
]),
},
fun_kind: FunKind::FSync,
user_attributes: UserAttributes(vec![]),
external: false,
doc_comment: None,
},
vec![],
))),
),
targs: vec![],
args: vec![],
unpacked_arg: None,
})),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_exprs_expand() {
assert_pat_eq(
hack_stmt_impl.parse2(quote!(EX r#"return vec[#a, #{b*}, #c];"#)),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = Pos::NONE;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Stmt(
__hygienic_pos.clone(),
Stmt_::Return(Box::new(Some(Expr(
(),
__hygienic_pos.clone(),
Expr_::ValCollection(Box::new((
(__hygienic_pos.clone(), VcKind::Vec),
None,
std::iter::empty()
.chain(
[{
let tmp: Expr = a;
tmp
}]
.into_iter(),
)
.chain(b.into_iter())
.chain(
[{
let tmp: Expr = c;
tmp
}]
.into_iter(),
)
.collect::<Vec<_>>(),
))),
)))),
);
__hygienic_tmp
}),
);
}
#[test]
fn test_typed_local_stmt() {
assert_pat_eq(
hack_stmt_impl.parse2(quote!(EX pos = p, "let $x: t = #e;")),
quote!({
use EX::ast::*;
let __hygienic_pos: Pos = p;
#[allow(clippy::redundant_clone)]
let __hygienic_tmp = Stmt(
__hygienic_pos.clone(),
Stmt_::DeclareLocal(Box::new((
Lid(__hygienic_pos.clone(), (0isize, "$x".to_owned())),
Hint(
__hygienic_pos.clone(),
Box::new(Hint_::Happly(
Id(__hygienic_pos.clone(), "t".to_owned()),
vec![],
)),
),
Some({
let tmp: Expr = e;
tmp
}),
))),
);
__hygienic_tmp
}),
);
}
} |
TOML | hhvm/hphp/hack/src/utils/hack_macros/cargo/hack_macros/Cargo.toml | # @generated by autocargo
[package]
name = "hack_macros"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../hack_macros.rs"
[dependencies]
hack_macros_impl = { version = "0.0.0", path = "../hack_macros_impl" }
oxidized = { version = "0.0.0", path = "../../../../oxidized" } |
TOML | hhvm/hphp/hack/src/utils/hack_macros/cargo/hack_macros_impl/Cargo.toml | # @generated by autocargo
[package]
name = "hack_macros_impl"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../hack_macros_impl.rs"
test = false
doctest = false
proc-macro = true
[dependencies]
aast_parser = { version = "0.0.0", path = "../../../../parser/cargo/aast_parser" }
once_cell = "1.12"
oxidized = { version = "0.0.0", path = "../../../../oxidized" }
parser_core_types = { version = "0.0.0", path = "../../../../parser/cargo/core_types" }
proc-macro2 = { version = "1.0.64", features = ["span-locations"] }
quote = "1.0.29"
regex = "1.9.2"
relative_path = { version = "0.0.0", path = "../../../rust/relative_path" }
rust_parser_errors = { version = "0.0.0", path = "../../../../parser/cargo/errors" }
serde = { version = "1.0.176", features = ["derive", "rc"] }
syn = { version = "1.0.109", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] }
thiserror = "1.0.43"
[dev-dependencies]
macro_test_util = { version = "0.0.0", path = "../../../test/macro_test_util" } |
TOML | hhvm/hphp/hack/src/utils/hash/Cargo.toml | # @generated by autocargo
[package]
name = "hash"
version = "0.0.0"
edition = "2021"
[lib]
path = "lib.rs"
[dependencies]
dashmap = { version = "5.4", features = ["rayon", "serde"] }
indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] }
rustc-hash = "1.1.0" |
Rust | hhvm/hphp/hack/src/utils/hash/lib.rs | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
pub type Hasher = rustc_hash::FxHasher;
pub type BuildHasher = std::hash::BuildHasherDefault<Hasher>;
pub type HashMap<K, V> = rustc_hash::FxHashMap<K, V>;
pub type HashSet<K> = rustc_hash::FxHashSet<K>;
pub type IndexMap<K, V> = indexmap::map::IndexMap<K, V, BuildHasher>;
pub type IndexSet<K> = indexmap::set::IndexSet<K, BuildHasher>;
pub type DashMap<K, V> = dashmap::DashMap<K, V, BuildHasher>;
pub type DashSet<K> = dashmap::DashSet<K, BuildHasher>; |
C/C++ | hhvm/hphp/hack/src/utils/hashlib/bitfn.h | /*
* Copyright (C) 2006-2009 Vincent Hanquez <[email protected]>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* SHA implementation low level operation
*/
#ifndef BITFN_H
#define BITFN_H
#include <stdint.h>
static inline unsigned int rol32(unsigned int word, unsigned int shift)
{
return (word << shift) | (word >> (32 - shift));
}
static inline unsigned int ror32(unsigned int word, unsigned int shift)
{
return (word >> shift) | (word << (32 - shift));
}
static inline uint64_t rol64(uint64_t word, unsigned int shift)
{
return (word << shift) | (word >> (64 - shift));
}
static inline uint64_t ror64(uint64_t word, unsigned int shift)
{
return (word >> shift) | (word << (64 - shift));
}
#if (defined(__i386__) || defined(__x86_64__)) && !defined(NO_INLINE_ASM)
static inline unsigned int swap32(unsigned int a)
{
asm ("bswap %0" : "=r" (a) : "0" (a));
return a;
}
#else
static inline unsigned int swap32(unsigned int a)
{
return (a << 24) | ((a & 0xff00) << 8) | ((a >> 8) & 0xff00) | (a >> 24);
}
#endif
#if defined(__x86_64__) && !defined(NO_INLINE_ASM)
static inline uint64_t swap64(uint64_t a)
{
asm ("bswap %0" : "=r" (a) : "0" (a));
return a;
}
#else
static inline uint64_t swap64(uint64_t a)
{
return ((uint64_t) swap32((unsigned int) (a >> 32))) |
(((uint64_t) swap32((unsigned int) a)) << 32);
}
#endif
/* big endian to cpu */
#ifdef __APPLE__
#include <architecture/byte_order.h>
#elif defined(__FreeBSD__)
#include <sys/endian.h>
#elif WIN32
/* nothing */
#else
#include <endian.h>
#endif
#if LITTLE_ENDIAN == BYTE_ORDER
#define be32_to_cpu(a) swap32(a)
#define cpu_to_be32(a) swap32(a)
#define be64_to_cpu(a) swap64(a)
#define cpu_to_be64(a) swap64(a)
#elif BIG_ENDIAN == BYTE_ORDER
#define be32_to_cpu(a) (a)
#define cpu_to_be32(a) (a)
#define be64_to_cpu(a) (a)
#define cpu_to_be64(a) (a)
#else
#error "endian not supported"
#endif
#endif /* !BITFN_H */ |
hhvm/hphp/hack/src/utils/hashlib/dune | (library
(name utils_hash)
(wrapped false)
(foreign_stubs
(language c)
(names sha1_ocaml sha1c))
(libraries core_kernel)) |
|
OCaml | hhvm/hphp/hack/src/utils/hashlib/sha1.ml | (*
* Copyright (c) 2017, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Tools for making sha1 digests. *)
external digest : string -> string = "sha1sum" |
C | hhvm/hphp/hack/src/utils/hashlib/sha1c.c | /*
* Copyright (C) 2006-2009 Vincent Hanquez <[email protected]>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* SHA1 implementation as describe in wikipedia.
*/
#include <string.h>
#include <stdio.h>
#include "sha1c.h"
#include "bitfn.h"
/**
* sha1_init - Init SHA1 context
*/
void sha1_init(struct sha1_ctx *ctx)
{
memset(ctx, 0, sizeof(*ctx));
/* initialize H */
ctx->h[0] = 0x67452301;
ctx->h[1] = 0xEFCDAB89;
ctx->h[2] = 0x98BADCFE;
ctx->h[3] = 0x10325476;
ctx->h[4] = 0xC3D2E1F0;
}
/**
* sha1_copy - Copy SHA1 context
*/
void sha1_copy(struct sha1_ctx *dst, struct sha1_ctx *src)
{
memcpy(dst, src, sizeof(*dst));
}
#define f1(x, y, z) (z ^ (x & (y ^ z))) /* x ? y : z */
#define f2(x, y, z) (x ^ y ^ z) /* XOR */
#define f3(x, y, z) ((x & y) + (z & (x ^ y))) /* majority */
#define f4(x, y, z) f2(x, y, z)
#define K1 0x5A827999L /* Rounds 0-19: sqrt(2) * 2^30 */
#define K2 0x6ED9EBA1L /* Rounds 20-39: sqrt(3) * 2^30 */
#define K3 0x8F1BBCDCL /* Rounds 40-59: sqrt(5) * 2^30 */
#define K4 0xCA62C1D6L /* Rounds 60-79: sqrt(10) * 2^30 */
#define R(a, b, c, d, e, f, k, w) e += rol32(a, 5) + f(b, c, d) + k + w; \
b = rol32(b, 30)
#define M(i) (w[i & 0x0f] = rol32(w[i & 0x0f] ^ w[(i - 14) & 0x0f] \
^ w[(i - 8) & 0x0f] ^ w[(i - 3) & 0x0f], 1))
static inline void sha1_do_chunk(unsigned char W[], unsigned int h[])
{
unsigned int a, b, c, d, e;
unsigned int w[80];
#define CPY(i) w[i] = be32_to_cpu(((unsigned int *) W)[i])
CPY(0); CPY(1); CPY(2); CPY(3); CPY(4); CPY(5); CPY(6); CPY(7);
CPY(8); CPY(9); CPY(10); CPY(11); CPY(12); CPY(13); CPY(14); CPY(15);
#undef CPY
a = h[0];
b = h[1];
c = h[2];
d = h[3];
e = h[4];
/* following unrolled from:
* for (i = 0; i < 20; i++) {
* t = f1(b, c, d) + K1 + rol32(a, 5) + e + M(i);
* e = d; d = c; c = rol32(b, 30); b = a; a = t;
* }
*/
R(a, b, c, d, e, f1, K1, w[0]);
R(e, a, b, c, d, f1, K1, w[1]);
R(d, e, a, b, c, f1, K1, w[2]);
R(c, d, e, a, b, f1, K1, w[3]);
R(b, c, d, e, a, f1, K1, w[4]);
R(a, b, c, d, e, f1, K1, w[5]);
R(e, a, b, c, d, f1, K1, w[6]);
R(d, e, a, b, c, f1, K1, w[7]);
R(c, d, e, a, b, f1, K1, w[8]);
R(b, c, d, e, a, f1, K1, w[9]);
R(a, b, c, d, e, f1, K1, w[10]);
R(e, a, b, c, d, f1, K1, w[11]);
R(d, e, a, b, c, f1, K1, w[12]);
R(c, d, e, a, b, f1, K1, w[13]);
R(b, c, d, e, a, f1, K1, w[14]);
R(a, b, c, d, e, f1, K1, w[15]);
R(e, a, b, c, d, f1, K1, M(16));
R(d, e, a, b, c, f1, K1, M(17));
R(c, d, e, a, b, f1, K1, M(18));
R(b, c, d, e, a, f1, K1, M(19));
/* following unrolled from:
* for (i = 20; i < 40; i++) {
* t = f2(b, c, d) + K2 + rol32(a, 5) + e + M(i);
* e = d; d = c; c = rol32(b, 30); b = a; a = t;
* }
*/
R(a, b, c, d, e, f2, K2, M(20));
R(e, a, b, c, d, f2, K2, M(21));
R(d, e, a, b, c, f2, K2, M(22));
R(c, d, e, a, b, f2, K2, M(23));
R(b, c, d, e, a, f2, K2, M(24));
R(a, b, c, d, e, f2, K2, M(25));
R(e, a, b, c, d, f2, K2, M(26));
R(d, e, a, b, c, f2, K2, M(27));
R(c, d, e, a, b, f2, K2, M(28));
R(b, c, d, e, a, f2, K2, M(29));
R(a, b, c, d, e, f2, K2, M(30));
R(e, a, b, c, d, f2, K2, M(31));
R(d, e, a, b, c, f2, K2, M(32));
R(c, d, e, a, b, f2, K2, M(33));
R(b, c, d, e, a, f2, K2, M(34));
R(a, b, c, d, e, f2, K2, M(35));
R(e, a, b, c, d, f2, K2, M(36));
R(d, e, a, b, c, f2, K2, M(37));
R(c, d, e, a, b, f2, K2, M(38));
R(b, c, d, e, a, f2, K2, M(39));
/* following unrolled from:
* for (i = 40; i < 60; i++) {
* t = f3(b, c, d) + K3 + rol32(a, 5) + e + M(i);
* e = d; d = c; c = rol32(b, 30); b = a; a = t;
* }
*/
R(a, b, c, d, e, f3, K3, M(40));
R(e, a, b, c, d, f3, K3, M(41));
R(d, e, a, b, c, f3, K3, M(42));
R(c, d, e, a, b, f3, K3, M(43));
R(b, c, d, e, a, f3, K3, M(44));
R(a, b, c, d, e, f3, K3, M(45));
R(e, a, b, c, d, f3, K3, M(46));
R(d, e, a, b, c, f3, K3, M(47));
R(c, d, e, a, b, f3, K3, M(48));
R(b, c, d, e, a, f3, K3, M(49));
R(a, b, c, d, e, f3, K3, M(50));
R(e, a, b, c, d, f3, K3, M(51));
R(d, e, a, b, c, f3, K3, M(52));
R(c, d, e, a, b, f3, K3, M(53));
R(b, c, d, e, a, f3, K3, M(54));
R(a, b, c, d, e, f3, K3, M(55));
R(e, a, b, c, d, f3, K3, M(56));
R(d, e, a, b, c, f3, K3, M(57));
R(c, d, e, a, b, f3, K3, M(58));
R(b, c, d, e, a, f3, K3, M(59));
/* following unrolled from:
* for (i = 60; i < 80; i++) {
* t = f2(b, c, d) + K4 + rol32(a, 5) + e + M(i);
* e = d; d = c; c = rol32(b, 30); b = a; a = t;
* }
*/
R(a, b, c, d, e, f4, K4, M(60));
R(e, a, b, c, d, f4, K4, M(61));
R(d, e, a, b, c, f4, K4, M(62));
R(c, d, e, a, b, f4, K4, M(63));
R(b, c, d, e, a, f4, K4, M(64));
R(a, b, c, d, e, f4, K4, M(65));
R(e, a, b, c, d, f4, K4, M(66));
R(d, e, a, b, c, f4, K4, M(67));
R(c, d, e, a, b, f4, K4, M(68));
R(b, c, d, e, a, f4, K4, M(69));
R(a, b, c, d, e, f4, K4, M(70));
R(e, a, b, c, d, f4, K4, M(71));
R(d, e, a, b, c, f4, K4, M(72));
R(c, d, e, a, b, f4, K4, M(73));
R(b, c, d, e, a, f4, K4, M(74));
R(a, b, c, d, e, f4, K4, M(75));
R(e, a, b, c, d, f4, K4, M(76));
R(d, e, a, b, c, f4, K4, M(77));
R(c, d, e, a, b, f4, K4, M(78));
R(b, c, d, e, a, f4, K4, M(79));
h[0] += a;
h[1] += b;
h[2] += c;
h[3] += d;
h[4] += e;
}
/**
* sha1_update - Update the SHA1 context values with length bytes of data
*/
void sha1_update(struct sha1_ctx *ctx, unsigned char *data, int len)
{
unsigned int index, to_fill;
index = (unsigned int) (ctx->sz & 0x3f);
to_fill = 64 - index;
ctx->sz += len;
/* process partial buffer if there's enough data to make a block */
if (index && len >= to_fill) {
memcpy(ctx->buf + index, data, to_fill);
sha1_do_chunk(ctx->buf, ctx->h);
len -= to_fill;
data += to_fill;
index = 0;
}
/* process as much 64-block as possible */
for (; len >= 64; len -= 64, data += 64)
sha1_do_chunk(data, ctx->h);
/* append data into buf */
if (len)
memcpy(ctx->buf + index, data, len);
}
/**
* sha1_finalize - Finalize the context and create the SHA1 digest
*/
void sha1_finalize(struct sha1_ctx *ctx, sha1_digest *out)
{
static unsigned char padding[64] = { 0x80, };
unsigned int bits[2];
unsigned int index, padlen;
/* add padding and update data with it */
bits[0] = cpu_to_be32((unsigned int) (ctx->sz >> 29));
bits[1] = cpu_to_be32((unsigned int) (ctx->sz << 3));
/* pad out to 56 */
index = (unsigned int) (ctx->sz & 0x3f);
padlen = (index < 56) ? (56 - index) : ((64 + 56) - index);
sha1_update(ctx, padding, padlen);
/* append length */
sha1_update(ctx, (unsigned char *) bits, sizeof(bits));
/* output hash */
out->digest[0] = cpu_to_be32(ctx->h[0]);
out->digest[1] = cpu_to_be32(ctx->h[1]);
out->digest[2] = cpu_to_be32(ctx->h[2]);
out->digest[3] = cpu_to_be32(ctx->h[3]);
out->digest[4] = cpu_to_be32(ctx->h[4]);
}
/**
* sha1_to_bin - Transform the SHA1 digest into a binary data
*/
void sha1_to_bin(sha1_digest *digest, char *out)
{
uint32_t *ptr = (uint32_t *) out;
ptr[0] = digest->digest[0];
ptr[1] = digest->digest[1];
ptr[2] = digest->digest[2];
ptr[3] = digest->digest[3];
ptr[4] = digest->digest[4];
}
/**
* sha1_to_hex - Transform the SHA1 digest into a readable data
*/
void sha1_to_hex(sha1_digest *digest, char *out)
{
#define D(i) (cpu_to_be32(digest->digest[i]))
snprintf(out, 41, "%08x%08x%08x%08x%08x",
D(0), D(1), D(2), D(3), D(4));
#undef D
} |
C/C++ | hhvm/hphp/hack/src/utils/hashlib/sha1c.h | /*
* Copyright (C) 2006-2009 Vincent Hanquez <[email protected]>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* SHA1 implementation as describe in wikipedia.
*/
#ifndef SHA1_C_H
#define SHA1_C_H
struct sha1_ctx
{
unsigned int h[5];
unsigned char buf[64];
unsigned long long sz;
};
typedef struct { unsigned int digest[5]; } sha1_digest;
void sha1_init(struct sha1_ctx *ctx);
void sha1_copy(struct sha1_ctx *dst, struct sha1_ctx *src);
void sha1_update(struct sha1_ctx *ctx, unsigned char *data, int len);
void sha1_finalize(struct sha1_ctx *ctx, sha1_digest *out);
void sha1_to_bin(sha1_digest *digest, char *out);
void sha1_to_hex(sha1_digest *digest, char *out);
#endif // SHA1_C_H |
C | hhvm/hphp/hack/src/utils/hashlib/sha1_ocaml.c | /**
* Copyright (c) 2019, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*/
#define CAML_NAME_SPACE
#include <caml/alloc.h>
#include <caml/memory.h>
#include <caml/mlvalues.h>
#include "sha1c.h"
value sha1sum(value data) {
CAMLparam1(data);
const char *msg = String_val(data);
size_t msglen = caml_string_length(data);
struct sha1_ctx ctx;
sha1_init(&ctx);
sha1_update(&ctx, (unsigned char *)msg, msglen);
sha1_digest digest;
sha1_finalize(&ctx, &digest);
char hex[41];
sha1_to_hex(&digest, hex);
CAMLlocal1(result);
result = caml_copy_string(hex);
CAMLreturn(result);
} |
Rust | hhvm/hphp/hack/src/utils/hdf/build.rs | use std::path::Path;
use std::path::PathBuf;
fn main() {
const ROOT_PATH: &str = "../../../../..";
let root_path = Path::new(ROOT_PATH);
let hphp_path = root_path.join("hphp");
let neo_path = hphp_path.join("neo");
let neo_files: Vec<PathBuf> = glob::glob(neo_path.join("*.c").to_str().unwrap())
.unwrap()
.collect::<Result<_, _>>()
.unwrap();
cc::Build::new()
.files(&neo_files)
.cpp(false)
.include(&root_path)
.warnings(false)
.flag("-Wno-format")
.flag_if_supported("-Wno-format-security")
.compile("neo_hdf");
neo_files.iter().for_each(rerun_if_changed);
let hdf_files = vec![
PathBuf::from("hdf.rs"),
PathBuf::from("hdf-wrap.cpp"),
PathBuf::from("hdf-wrap.h"),
hphp_path.join("util/hdf.cpp"),
hphp_path.join("util/hdf.h"),
hphp_path.join("util/exception.cpp"),
hphp_path.join("util/exception.h"),
hphp_path.join("util/string-vsnprintf.cpp"),
hphp_path.join("util/string-vsnprintf.h"),
];
cxx_build::bridge("hdf.rs")
.files(hdf_files.iter().filter(is_cpp))
.include(&root_path)
.define("NO_FOLLY", "1")
.cpp(true)
.flag("-std=c++17")
.warnings(false)
.compile("hdf");
hdf_files.iter().for_each(rerun_if_changed);
rerun_if_changed("build.rs");
}
fn rerun_if_changed<P: AsRef<Path>>(f: P) {
println!("cargo:rerun-if-changed={}", f.as_ref().to_str().unwrap());
}
fn is_cpp<P: AsRef<Path>>(path: &P) -> bool {
path.as_ref().extension().map_or(false, |e| e == "cpp")
} |
TOML | hhvm/hphp/hack/src/utils/hdf/Cargo.toml | # @generated by autocargo
[package]
name = "hdf"
version = "0.0.0"
edition = "2021"
[lib]
path = "lib.rs"
crate-type = ["lib", "staticlib"]
[dependencies]
cxx = "1.0.100"
thiserror = "1.0.43"
[build-dependencies]
cc = ">=1.0.79"
cxx-build = "1.0.100"
glob = ">=0.3.0" |
C++ | hhvm/hphp/hack/src/utils/hdf/hdf-wrap.cpp | // Copyright (c) Meta Platforms, Inc. and affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#include "hphp/hack/src/utils/hdf/hdf-wrap.h"
namespace HPHP {
std::unique_ptr<Hdf> hdf_new() {
return std::make_unique<Hdf>();
}
std::unique_ptr<Hdf> hdf_new_child(const Hdf& hdf, const std::string& name) {
return std::make_unique<Hdf>(&hdf, name.c_str());
}
std::unique_ptr<Hdf> hdf_first_child(const Hdf& hdf) {
return std::make_unique<Hdf>(hdf.firstChild());
}
std::unique_ptr<Hdf> hdf_next(const Hdf& hdf) {
return std::make_unique<Hdf>(hdf.next());
}
rust::String hdf_name(const Hdf& hdf) {
return rust::String{hdf.getName()};
}
} |
C/C++ | hhvm/hphp/hack/src/utils/hdf/hdf-wrap.h | // Copyright (c) Meta Platforms, Inc. and affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#pragma once
#include "hphp/util/hdf.h"
#include "rust/cxx.h"
#include <memory>
namespace HPHP {
std::unique_ptr<Hdf> hdf_new();
std::unique_ptr<Hdf> hdf_new_child(const Hdf& hdf, const std::string& name);
std::unique_ptr<Hdf> hdf_first_child(const Hdf& hdf);
std::unique_ptr<Hdf> hdf_next(const Hdf& hdf);
rust::String hdf_name(const Hdf& hdf);
} |
Rust | hhvm/hphp/hack/src/utils/hdf/hdf.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#[cxx::bridge(namespace = "HPHP")]
pub(crate) mod ffi {
unsafe extern "C++" {
include!("hphp/hack/src/utils/hdf/hdf-wrap.h");
type Hdf;
fn hdf_new() -> UniquePtr<Hdf>;
fn hdf_new_child(parent: &Hdf, name: &CxxString) -> UniquePtr<Hdf>;
fn hdf_first_child(parent: &Hdf) -> Result<UniquePtr<Hdf>>;
fn hdf_next(hdf: &Hdf) -> Result<UniquePtr<Hdf>>;
fn hdf_name(hdf: &Hdf) -> Result<String>;
fn append(self: Pin<&mut Hdf>, filename: &CxxString) -> Result<()>;
fn fromString(self: Pin<&mut Hdf>, input: &CxxString) -> Result<()>;
fn remove(self: &Hdf, name: &CxxString) -> Result<()>;
fn configGetBool(self: &Hdf, or_default: bool) -> Result<bool>;
fn configGetUInt32(self: &Hdf, or_default: u32) -> Result<u32>;
unsafe fn configGet(self: &Hdf, or_default: *const c_char) -> Result<*const c_char>;
fn exists(self: &Hdf) -> Result<bool>;
fn toString(self: &Hdf) -> Result<*const c_char>;
// Only used in tests
#[allow(dead_code)]
fn isEmpty(self: &Hdf) -> bool;
}
}
#[cfg(test)]
mod test {
use std::ffi::CStr;
use cxx::let_cxx_string;
use cxx::UniquePtr;
use super::*;
fn abc() -> UniquePtr<ffi::Hdf> {
let mut hdf = ffi::hdf_new();
assert!(hdf.isEmpty());
let_cxx_string!(opt = "a.b.c=true");
hdf.pin_mut().fromString(&opt).unwrap();
hdf
}
#[test]
fn test1() {
let hdf = abc();
assert_eq!(ffi::hdf_name(&hdf).unwrap(), "");
let_cxx_string!(abc = "a.b.c");
let abc = ffi::hdf_new_child(&hdf, &abc);
assert_eq!(ffi::hdf_name(&abc).unwrap(), "c");
assert!(abc.configGetBool(false).unwrap());
assert!(abc.configGetBool(false).unwrap());
let_cxx_string!(a = "a");
let a = ffi::hdf_new_child(&hdf, &a);
assert!(!a.isEmpty());
let_cxx_string!(b = "b");
let b = ffi::hdf_new_child(&a, &b);
assert!(!b.isEmpty());
let_cxx_string!(c = "c");
let c = ffi::hdf_new_child(&b, &c);
assert_eq!(ffi::hdf_name(&c).unwrap(), "c");
assert!(!c.isEmpty());
assert!(!b.configGetBool(false).unwrap());
assert!(b.configGetBool(true).unwrap());
assert!(c.configGetBool(false).unwrap());
}
#[test]
fn test2() {
let hdf = abc();
let cstr = hdf.toString().unwrap();
assert_ne!(cstr, std::ptr::null());
let cstr = unsafe { CStr::from_ptr(cstr) };
assert_eq!(cstr.to_str().unwrap(), "a {\n b {\n c = true\n }\n}\n");
}
} |
Rust | hhvm/hphp/hack/src/utils/hdf/lib.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod hdf;
mod value;
pub use value::*; |
Rust | hhvm/hphp/hack/src/utils/hdf/value.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::borrow::Cow;
use std::ffi::CStr;
use std::os::unix::ffi::OsStrExt;
use std::path::Path;
use cxx::let_cxx_string;
use cxx::UniquePtr;
use thiserror::Error;
use crate::hdf::ffi;
pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(Debug, Error)]
pub enum Error {
#[error("Missing ']'")]
MissingBracket,
#[error(transparent)]
Exn(#[from] cxx::Exception),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Utf8(#[from] std::str::Utf8Error),
}
/// Value represents a node in our Hdf tree. This is a wrapper around
/// the C++ HPHP::Hdf class defined in hphp/util/hdf.h. A Value represents
/// a node in an HDF configuration tree. The underlying state for nodes
/// is represented by a single-threaded refcounted HdfRaw node, that
/// suports inner mutability.
///
/// This Rust API uses `&mut self` for operations that directly mutate a node,
/// but because of the refcounted nature of node ownership, &mut self does not
/// provide a guarantee that child (or parent) nodes are exclusively owned.
pub struct Value {
inner: UniquePtr<ffi::Hdf>,
}
impl Default for Value {
fn default() -> Self {
Self {
inner: ffi::hdf_new(),
}
}
}
/// Impl Debug using the underlying C++ Hdf pretty printer.
impl std::fmt::Debug for Value {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.inner.toString().map_err(|_| std::fmt::Error)? {
cstr if cstr.is_null() => f.write_str("{null}"),
cstr => f.write_str(&unsafe { CStr::from_ptr(cstr) }.to_string_lossy()),
}
}
}
impl Value {
/// Construct an HDF value from the given hdf-format file.
pub fn from_file(filename: &Path) -> Result<Value> {
let mut v = Value::default();
let_cxx_string!(cxx_filename = filename.as_os_str().as_bytes());
v.inner.pin_mut().append(&cxx_filename)?;
Ok(v)
}
/// Construct an HDF value from the given INI file.
pub fn from_ini_file(filename: &Path) -> Result<Value> {
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
let input = BufReader::new(File::open(filename)?);
let mut kv = Value::default();
for line in input.lines() {
kv.set_ini(&line?)?;
}
Ok(kv)
}
/// Construct an HDF value from the given INI string.
pub fn from_ini_string(input: &str) -> Result<Value> {
let mut kv = Value::default();
for line in input.lines() {
kv.set_ini(line)?;
}
Ok(kv)
}
/// Set one HDF value from the given INI string.
pub fn set_ini(&mut self, line: &str) -> Result<()> {
if let Err(e) = self.try_set_ini(line) {
eprintln!("Warning: unable to convert INI to Hdf: {}: {}", line, e);
}
Ok(())
}
fn try_set_ini(&mut self, line: &str) -> Result<()> {
// This should match HPHP::Config::ParseIniString
match IniLine::parse(line)? {
IniLine::Empty => {}
IniLine::Key(key) => {
self.set_hdf(&format!("{} =", key))?;
}
IniLine::KeyValue(key, value) => {
self.set_hdf(&format!("{} = {}", key, value))?;
}
IniLine::Section(_) => {
// section markers are ignored
}
}
Ok(())
}
/// Set this node's value and/or subvalues from the HDF-format `input` str.
pub fn set_hdf(&mut self, input: &str) -> Result<()> {
let_cxx_string!(cxx_input = input);
Ok(self.inner.pin_mut().fromString(&cxx_input)?)
}
/// Return a Value representing the referenced node, if it exists,
/// or an Error in case of an internal Hdf format error.
pub fn get(&self, name: &str) -> Result<Option<Value>> {
let_cxx_string!(name = name);
let inner = ffi::hdf_new_child(&self.inner, &name);
match inner.exists()? {
true => Ok(Some(Self { inner })),
false => Ok(None),
}
}
/// Gets the value and converts the value to a boolean according to HDF rules.
/// If the value doesn't exist, return Ok<None>.
pub fn get_bool(&self, name: &str) -> Result<Option<bool>> {
match self.get(name)? {
Some(v) => Ok(Some(v.inner.configGetBool(false)?)),
None => Ok(None),
}
}
/// Gets the value and converts the value to a boolean according to HDF rules.
/// If the value doesn't exist, return the default value.
pub fn get_bool_or(&self, name: &str, default: bool) -> Result<bool> {
match self.get(name)? {
Some(v) => Ok(v.inner.configGetBool(default)?),
None => Ok(default),
}
}
/// Gets the value and converts the value to a uint32 according to HDF rules.
/// If the value doesn't exist, return the default value.
pub fn get_uint32(&self, name: &str) -> Result<Option<u32>> {
match self.get(name)? {
Some(v) => Ok(Some(v.inner.configGetUInt32(0)?)),
None => Ok(None),
}
}
/// Return the utf8 string value of this node, if it exists.
/// Returns an error for Hdf internal errors or failed utf8 validation.
pub fn as_str(&self) -> Result<Option<String>> {
match unsafe { self.inner.configGet(std::ptr::null()) }? {
cstr if cstr.is_null() => Ok(None),
cstr => Ok(Some(unsafe { CStr::from_ptr(cstr) }.to_str()?.into())),
}
}
/// Lookup the node with the given name.
/// If it exists, return it's string value, otherwise return None.
/// Fails on internal Hdf parsing errors or Utf8 validation checks.
pub fn get_str(&self, name: &str) -> Result<Option<String>> {
match self.get(name)? {
Some(v) => v.as_str(),
None => Ok(None),
}
}
/// Return this node's name.
/// Fails on internal Hdf parsing errors or Utf8 validation checks.
pub fn name(&self) -> Result<String> {
Ok(ffi::hdf_name(&self.inner)?)
}
/// Convert self to an iterator over children, if possible.
/// Fails on internal Hdf parsing errors.
pub fn into_children(self) -> Result<Children> {
Ok(Children {
next: ffi::hdf_first_child(&self.inner)?,
})
}
/// Return the string values of child nodes.
/// Fails on internal Hdf parsing errors or Utf8 validation checks.
pub fn values(self) -> Result<Vec<String>> {
self.into_children()?
.map(|v| Ok(v?.as_str()?.unwrap_or_default()))
.collect::<Result<_>>()
}
/// Return whether a node with the given name exists.
/// Fails on internal Hdf parsing errors.
pub fn contains_key(&self, name: &str) -> Result<bool> {
match self.get(name)? {
Some(v) => Ok(v.inner.exists()?),
None => Ok(false),
}
}
/// Delete the node with the given name, if it exists.
/// Does nothing if the node does not exist.
/// Fails on internal Hdf errors.
pub fn remove(&mut self, name: &str) -> Result<()> {
let_cxx_string!(name = name);
Ok(self.inner.remove(&name)?)
}
}
pub struct Children {
next: UniquePtr<ffi::Hdf>,
}
impl Iterator for Children {
type Item = Result<Value>;
fn next(&mut self) -> Option<Self::Item> {
match self.next.exists() {
Ok(false) => None,
Ok(true) => match ffi::hdf_next(&self.next) {
Ok(next) => Some(Ok(Value {
inner: std::mem::replace(&mut self.next, next),
})),
Err(e) => Some(Err(e.into())),
},
Err(e) => Some(Err(e.into())),
}
}
}
#[derive(Eq, PartialEq, Debug)]
enum IniLine<'a> {
Empty,
Key(&'a str),
KeyValue(&'a str, Cow<'a, str>),
Section(&'a str),
}
impl IniLine<'_> {
fn parse(input: &str) -> Result<IniLine<'_>> {
// This minimal parser is good enough to handle what we currently need.
let input = input.trim();
if input.starts_with('[') {
// [section]
let input = input.strip_prefix('[').unwrap();
// Check for comment.
let input = input.split_once(';').map_or(input, |(k, _)| k).trim();
if let Some(section) = input.strip_suffix(']') {
Ok(IniLine::Section(section))
} else {
Err(Error::MissingBracket)
}
} else if let Some((key, value)) = input.split_once('=') {
// key=value
let key = key.trim_end();
let value = Self::parse_value(value)?;
Ok(IniLine::KeyValue(key, value))
} else {
// No '=' so no value. Check for a comment too.
let key = input.split_once(';').map_or(input, |(k, _)| k).trim();
if key.is_empty() {
Ok(IniLine::Empty)
} else {
Ok(IniLine::Key(key))
}
}
}
fn parse_value(value: &str) -> Result<Cow<'_, str>> {
let value = value.trim_start();
// Check for double-quoted string.
if !value.contains('"') {
// Not double-quoted. Check for comment.
let value = value.split_once(';').map_or(value, |(k, _)| k).trim();
return Ok(value.into());
}
let mut out = String::new();
let mut in_quote = false;
let mut prev_escape = false;
// We use trailing_junk to figure out extra whitespace that appeared at
// the end of a line outside of a double-quoted string.
let mut trailing_junk = 0;
for c in value.chars() {
match c {
';' if !in_quote => {
// This starts a comment - ignore the rest of the
// line.
break;
}
'\\' if in_quote => {
if prev_escape {
out.push('\\');
}
}
'"' => {
if prev_escape {
// This is an escaped quote.
out.push('"');
} else {
in_quote = !in_quote;
trailing_junk = 0;
}
}
_ => {
out.push(c);
if !in_quote && c.is_whitespace() {
trailing_junk += 1;
} else {
trailing_junk = 0;
}
}
}
prev_escape = !prev_escape && (c == '\\');
}
out.truncate(out.len() - trailing_junk);
Ok(out.into())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_parse_value() {
fn chk(a: &str, b: &str) {
assert_eq!(
IniLine::parse_value(a).map(|s| s.into_owned()).ok(),
Some(b.into())
);
}
chk("xyzzy", "xyzzy");
chk("xyzzy ; this is a comment", "xyzzy");
chk(r#" "xyzzy" ; this is a comment"#, "xyzzy");
chk(r#""xyz\"zy""#, "xyz\"zy");
chk(r#""xyzzy " ; this is a comment"#, "xyzzy ");
chk(r#""xyz;zy""#, "xyz;zy");
}
#[test]
fn test_value() {
let mut hdf = Value::default();
hdf.set_hdf("a.b.c=d").unwrap();
assert_eq!(format!("{:?}", hdf), "a {\n b {\n c = d\n }\n}\n");
assert_eq!(hdf.get_str("a").unwrap(), None);
assert_eq!(hdf.get_str("a.b").unwrap(), None);
assert_eq!(hdf.get_str("a.b.c").unwrap(), Some("d".into()));
assert_eq!(hdf.get_str("e").unwrap(), None);
assert!(hdf.get("x").unwrap().is_none());
assert!(hdf.get("x.x").unwrap().is_none());
hdf.set_hdf("q=h").unwrap();
hdf.set_hdf("a.q=g").unwrap();
hdf.set_hdf("a.c.q=f").unwrap();
assert_eq!(
format!("{:?}", hdf),
"a {\n b {\n c = d\n }\n q = g\n c {\n q = f\n }\n}\nq = h\n"
);
}
#[test]
fn test_ini() {
let a = Value::from_ini_string("a.b.c=d").expect("expected to parse");
assert_eq!(format!("{:?}", a), "a {\n b {\n c = d\n }\n}\n");
let a = Value::from_ini_string("hhvm.php7.all\nhhvm.php7.all=false")
.expect("expected to parse");
assert_eq!(
format!("{:?}", a),
"hhvm {\n php7 {\n all = false\n }\n}\n"
);
}
#[test]
fn test_ini_comment() {
let a = Value::from_ini_string(
"
; this is a comment
a.b.c=d ; this is also a comment",
)
.expect("expected to parse");
assert_eq!(format!("{:?}", a), "a {\n b {\n c = d\n }\n}\n");
}
#[test]
fn test_ini_section() {
let a = Value::from_ini_string(
"
[php] ; section markers are ignored
a.b.c=d",
)
.expect("expected to parse");
assert_eq!(format!("{:?}", a), "a {\n b {\n c = d\n }\n}\n");
}
#[test]
fn test_ini_quotes() {
let a = Value::from_ini_string(
r#"
a.b.c="d e"
"#,
)
.expect("expected to parse");
assert_eq!(format!("{:?}", a), "a {\n b {\n c = d e\n }\n}\n");
assert_eq!(a.get_str("a.b.c").unwrap().as_deref(), Some("d e"));
let a = Value::from_ini_string(
r#"
a.b.c="d;e"
"#,
)
.expect("expected to parse");
assert_eq!(format!("{:?}", a), "a {\n b {\n c = d;e\n }\n}\n");
}
#[test]
fn test_ini_line() {
assert_eq!(
IniLine::parse("a=b").ok(),
Some(IniLine::KeyValue("a", "b".into()))
);
}
} |
hhvm/hphp/hack/src/utils/hg/dune | (library
(name hg)
(wrapped false)
(modules hg hg_sig)
(libraries exec_command injector_config process process_types future)
(preprocess
(pps lwt_ppx ppx_deriving.std)))
(executable
(name hg_runner)
(modules hg_runner)
(link_flags
(:standard
(:include ../../dune_config/ld-opts.sexp)))
(modes exe byte_complete)
(libraries hg default_injector_config)
(preprocess
(pps lwt_ppx ppx_deriving.std))) |
|
OCaml | hhvm/hphp/hack/src/utils/hg/hg.ml | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Tools for shelling out to Mercurial. *)
module Hg_actual = struct
include Hg_sig.Types
let rev_string rev =
match rev with
| Hg_rev hash -> hash
| Global_rev rev -> Printf.sprintf "r%d" rev
let exec_hg args =
let env =
(* Disable user aliases or configs. *)
Process_types.Augment ["HGPLAIN=1"]
in
Process.exec Exec_command.Hg ~env args
(** Given a list of files and their revisions, saves the files to the output
* directory. For example,
* get_old_version_of_files ~rev:"X" ~files:["file1.php"]
* ~out:"/tmp/hh_server/%s" ~repo:"~/www"
* runs the command
*
* hg cat -r X file1.php -o "/tmp/hh_server/%s" --cwd ~/www
*
* which saves the version of file1.php at revision X in directory
* /tmp/hh_server/file1.php
*)
let get_old_version_of_files ~rev ~files ~out ~repo =
let process =
exec_hg
(["cat"; "-r"; rev_string rev] @ files @ ["-o"; out; "--cwd"; repo])
in
FutureProcess.make process ignore
(** Returns the closest global ancestor in master to the given rev.
*
* hg log -r 'ancestor(master,rev)' -T '{globalrev}\n'
*)
let get_closest_global_ancestor rev repo : global_rev Future.t =
let global_rev_query rev =
exec_hg ["log"; "-r"; rev; "-T"; "{globalrev}\n"; "--cwd"; repo]
in
let global_rev_process rev =
FutureProcess.make (global_rev_query rev) (fun s ->
int_of_string (String.trim s))
in
(* If we are on public commit, it should have global_rev field and we are done *)
let (q1 : global_rev Future.t) = global_rev_process rev in
(* Otherwise, we want the closest public commit. It returns empty set when
* we are on a public commit, hence the need to still do q1 too *)
let (q2 : global_rev Future.t) =
global_rev_process (Printf.sprintf "parents(roots(draft() & ::%s))" rev)
in
(* q2 can also fail in case of merge conflicts, in which case let's fall back to
* what we always used to do, closest mergebase with master bookmark *)
let (q3 : global_rev Future.t) =
global_rev_process (Printf.sprintf "ancestor(master,%s)" rev)
in
let take_first
(r1 : (global_rev, Future.error) result)
(r2 : (global_rev, Future.error) result) :
(global_rev, Future.error) result =
match r1 with
| Ok _ -> r1
| _ -> r2
in
let (r1 : global_rev Future.t) = Future.merge q2 q3 take_first in
let (r2 : global_rev Future.t) = Future.merge q1 r1 take_first in
r2
let current_mergebase_hg_rev repo =
let process =
exec_hg
["log"; "--rev"; "ancestor(master,.)"; "-T"; "{node}"; "--cwd"; repo]
in
FutureProcess.make process @@ fun result ->
let result = String.trim result in
if String.length result < 1 then
raise Malformed_result
else
result
(* Get the hg revision hash of the current working copy in the repo dir.
*
* hg id -i --cwd <repo> *)
let current_working_copy_hg_rev repo =
let process = exec_hg ["id"; "-i"; "--cwd"; repo] in
FutureProcess.make process @@ fun result ->
let result = String.trim result in
if String.length result < 1 then
raise Malformed_result
else if result.[String.length result - 1] = '+' then
(String.sub result 0 (String.length result - 1), true)
else
(result, false)
(** Return the timestamp of a specific hg revision in seconds since Unix epoch.
* Manually removing timezone offset.
* hg log -r rev -T "{date|hgdate} --cwd repo"
*)
let get_hg_revision_time rev repo =
let process =
exec_hg
["log"; "-r"; rev_string rev; "-T"; "{date|hgdate}"; "--cwd"; repo]
in
FutureProcess.make process @@ fun date_string ->
let date_list = String.split_on_char ' ' (String.trim date_string) in
date_list |> List.hd |> int_of_string
(* hg log -r 'p2()' -T '{node}' *)
let get_p2_node repo =
let process =
exec_hg ["log"; "-r"; "p2()"; "-T"; "{node}"; "--cwd"; repo]
in
let future = FutureProcess.make process String.trim in
match Future.get future with
| Ok "" -> None
| Ok s -> Some s
| Error _ -> None
(**
* Returns the global base revision. If the current node is a normal
* commit, this is simply the closest_global_ancestor.
*
* If the current node is a merge commit (for example during a merge-conflict
* state), then it computes the two merge bases with master (one for each
* parent) and uses the greater of the two.
* *)
let current_working_copy_base_rev repo =
let primary_mergebase = get_closest_global_ancestor "." repo in
(* Ok, since (get_closest_global_ancestor p2) depends on getting p2, we
* actually block on getting p2 first. *)
match get_p2_node repo with
| None -> primary_mergebase
| Some p2 ->
let p2_mergebase = get_closest_global_ancestor p2 repo in
let max_global_rev primary p2 =
match (primary, p2) with
| (Error x, _) -> Error x
| (_, Error y) -> Error y
| (Ok x, Ok y) -> Ok (max x y)
in
Future.merge primary_mergebase p2_mergebase max_global_rev
(** Returns the files changed since the given global_rev
*
* hg status -n --rev r<global_rev> --cwd <repo> *)
let files_changed_since_rev rev repo =
let process =
exec_hg ["status"; "-n"; "--rev"; rev_string rev; "--cwd"; repo]
in
FutureProcess.make process Sys_utils.split_lines
(** Returns the files changed in rev
*
* hg status --change <rev> --cwd <repo> *)
let files_changed_in_rev rev repo =
let process =
exec_hg ["status"; "-n"; "--change"; rev_string rev; "--cwd"; repo]
in
FutureProcess.make process Sys_utils.split_lines
(** Similar to above, except instead of listing files to get us to
* the repo's current state, it gets us to the given "finish" revision.
*
* i.e. If we start at "start" revision, what files need be changed to get us
* to "finish" revision.
*
* hg status -n --rev start --rev end --cwd repo
*)
let files_changed_since_rev_to_rev ~start ~finish repo =
if String.equal (rev_string start) (rev_string finish) then
(* Optimization: start and finish are syntactically the same.
* They may still be the same revision but just written out
* differently - this will be caught below.
* *)
Future.of_value []
else
let process =
exec_hg
[
"status";
"-n";
"--rev";
rev_string start;
"--rev";
rev_string finish;
"--cwd";
repo;
]
in
FutureProcess.make process Sys_utils.split_lines
(** hg update --rev r<global_rev> --cwd <repo> *)
let update_to_rev rev repo =
let process = exec_hg ["update"; "--rev"; rev_string rev; "--cwd"; repo] in
FutureProcess.make process ignore
module Mocking = struct
exception Cannot_set_when_mocks_disabled
let current_working_copy_hg_rev_returns _ =
raise Cannot_set_when_mocks_disabled
let current_working_copy_base_rev_returns _ =
raise Cannot_set_when_mocks_disabled
let reset_current_working_copy_base_rev_returns _ =
raise Cannot_set_when_mocks_disabled
let closest_global_ancestor_bind_value _ _ =
raise Cannot_set_when_mocks_disabled
let files_changed_since_rev_returns ~rev:_ _ =
raise Cannot_set_when_mocks_disabled
let files_changed_in_rev_returns ~rev:_ _ =
raise Cannot_set_when_mocks_disabled
let get_hg_revision_time _ _ = raise Cannot_set_when_mocks_disabled
let files_changed_since_rev_to_rev_returns ~start:_ ~finish:_ _ =
raise Cannot_set_when_mocks_disabled
let reset_files_changed_since_rev_to_rev_returns _ =
raise Cannot_set_when_mocks_disabled
let reset_files_changed_since_rev_returns _ =
raise Cannot_set_when_mocks_disabled
let reset_files_changed_in_rev_returns _ =
raise Cannot_set_when_mocks_disabled
end
end
module Hg_mock = struct
include Hg_sig.Types
module Mocking = struct
let current_working_copy_hg_rev = ref @@ Future.of_value ("", false)
let current_working_copy_base_rev = ref @@ Future.of_value 0
let current_mergebase_hg_rev = ref @@ Future.of_value ""
let get_hg_revision_time _ _ = Future.of_value 123
let closest_global_ancestor = Hashtbl.create 10
let files_changed_since_rev = Hashtbl.create 10
let files_changed_in_rev = Hashtbl.create 10
let files_changed_since_rev_to_rev = Hashtbl.create 10
let current_working_copy_hg_rev_returns v = current_working_copy_hg_rev := v
let current_working_copy_base_rev_returns v =
current_working_copy_base_rev := v
let reset_current_working_copy_base_rev_returns () =
current_working_copy_base_rev := Future.of_value 0
let closest_global_ancestor_bind_value hg_rev global_rev =
Hashtbl.replace closest_global_ancestor hg_rev global_rev
let files_changed_since_rev_returns ~rev v =
Hashtbl.replace files_changed_since_rev rev v
let files_changed_in_rev_returns ~rev v =
Hashtbl.replace files_changed_in_rev rev v
let reset_files_changed_since_rev_returns () =
Hashtbl.reset files_changed_since_rev
let reset_files_changed_in_rev_returns () =
Hashtbl.reset files_changed_in_rev
let files_changed_since_rev_to_rev_returns ~start ~finish v =
Hashtbl.replace files_changed_since_rev_to_rev (start, finish) v
let reset_files_changed_since_rev_to_rev_returns () =
Hashtbl.reset files_changed_since_rev_to_rev
end
let current_mergebase_hg_rev _ = !Mocking.current_mergebase_hg_rev
let current_working_copy_hg_rev _ = !Mocking.current_working_copy_hg_rev
let current_working_copy_base_rev _ = !Mocking.current_working_copy_base_rev
let get_hg_revision_time rev repo = Mocking.get_hg_revision_time rev repo
let get_closest_global_ancestor hg_rev _ =
Hashtbl.find Mocking.closest_global_ancestor hg_rev
let files_changed_since_rev rev _ =
Hashtbl.find Mocking.files_changed_since_rev rev
let files_changed_in_rev rev _ = Hashtbl.find Mocking.files_changed_in_rev rev
let files_changed_since_rev_to_rev ~start ~finish _ =
Hashtbl.find Mocking.files_changed_since_rev_to_rev (start, finish)
let update_to_rev _ _ = Future.of_value ()
let get_old_version_of_files ~rev:_ ~files:_ ~out:_ ~repo:_ =
Future.of_value ()
end
include
(val if Injector_config.use_test_stubbing then
(module Hg_mock : Hg_sig.S)
else
(module Hg_actual : Hg_sig.S)) |
OCaml Interface | hhvm/hphp/hack/src/utils/hg/hg.mli | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
include Hg_sig.S |
OCaml | hhvm/hphp/hack/src/utils/hg/hg_runner.ml | (*
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
* Prints out the current HG revision, closest SVN ancestor, and the files
* changed between that HG revision and the SVN ancestor.
*
* This tool is particularly useful to manually test the Hg module.
*)
module Args = struct
type t = { root: string }
let usage = Printf.sprintf "Usage: %s [REPO DIRECTORY]\n" Sys.argv.(0)
let parse () =
let root = ref None in
let () = Arg.parse [] (fun s -> root := Some s) usage in
match !root with
| None ->
Printf.eprintf "%s" usage;
exit 1
| Some root -> { root }
let root args = args.root
end
let () =
Sys_utils.set_signal Sys.sigint (Sys.Signal_handle (fun _ -> exit 0));
let args = Args.parse () in
let current_hg_rev = Hg.current_working_copy_hg_rev @@ Args.root args in
let (current_hg_rev, _) = Future.get_exn current_hg_rev in
Printf.eprintf "Current HG rev: %s\n" current_hg_rev;
let ancestor =
Hg.get_closest_global_ancestor current_hg_rev @@ Args.root args
in
let ancestor = Future.get_exn ancestor in
Printf.eprintf "SVN ancestor: %d\n" ancestor;
let changes =
Hg.files_changed_since_rev (Hg.Global_rev ancestor) @@ Args.root args
in
let changes = Future.get_exn changes in
let changes = String.concat "\n" changes in
Printf.eprintf "Changes: %s\n" changes;
let changes_between_current_and_ancestor =
Hg.files_changed_since_rev_to_rev
~start:(Hg.Global_rev ancestor)
~finish:(Hg.Hg_rev current_hg_rev)
(Args.root args)
|> Future.get_exn ~timeout:30
|> String.concat ","
in
Printf.eprintf
"Changes between global and hg rev: %s\n"
changes_between_current_and_ancestor |
OCaml | hhvm/hphp/hack/src/utils/hg/hg_sig.ml | module Types = struct
exception Malformed_result
type hg_rev = string [@@deriving eq, show]
(** This is a monotonically increasing revision number. *)
type global_rev = int [@@deriving eq, show]
type rev =
| Hg_rev of hg_rev
| Global_rev of global_rev
[@@deriving eq, show]
module Rev_comparator = struct
type t = rev
let to_string v =
match v with
| Hg_rev s -> Printf.sprintf "Hg_rev %s" s
| Global_rev i -> Printf.sprintf "Global_rev %d" i
let is_equal exp actual =
(* Avoid polymorphic equal. *)
match (exp, actual) with
| (Hg_rev exp, Hg_rev actual) -> exp = actual
| (Global_rev exp, Global_rev actual) -> exp = actual
| _ -> false
end
end
module type S = sig
include module type of Types
val get_old_version_of_files :
rev:rev -> files:string list -> out:string -> repo:string -> unit Future.t
val get_hg_revision_time : rev -> string -> int Future.t
val current_mergebase_hg_rev : string -> hg_rev Future.t
(** [current_working_copy_hg_rev repo] gets the hg revision hash of the
current working copy in the repo dir.
The boolean returned indicates if there are working copy changes.
Similar to
hg id -i --cwd <repo> *)
val current_working_copy_hg_rev : string -> (hg_rev * bool) Future.t
(** Get the global base revision of the current working copy in the given
* repo dir. *)
val current_working_copy_base_rev : string -> global_rev Future.t
val get_closest_global_ancestor : hg_rev -> string -> global_rev Future.t
val files_changed_since_rev :
rev -> (* repository path. *)
string -> string list Future.t
val files_changed_in_rev :
rev -> (* repository path. *)
string -> string list Future.t
val files_changed_since_rev_to_rev :
start:rev ->
finish:rev ->
(* repository path. *)
string ->
string list Future.t
(** hg update to the base global revision. *)
val update_to_rev : rev -> string -> unit Future.t
module Mocking : sig
val current_working_copy_hg_rev_returns : (hg_rev * bool) Future.t -> unit
val current_working_copy_base_rev_returns : global_rev Future.t -> unit
val get_hg_revision_time : hg_rev -> string -> int Future.t
val reset_current_working_copy_base_rev_returns : unit -> unit
val closest_global_ancestor_bind_value :
hg_rev -> global_rev Future.t -> unit
val files_changed_since_rev_returns :
rev:rev -> string list Future.t -> unit
val files_changed_in_rev_returns : rev:rev -> string list Future.t -> unit
val files_changed_since_rev_to_rev_returns :
start:rev -> finish:rev -> string list Future.t -> unit
val reset_files_changed_since_rev_to_rev_returns : unit -> unit
val reset_files_changed_since_rev_returns : unit -> unit
val reset_files_changed_in_rev_returns : unit -> unit
end
end |
TOML | hhvm/hphp/hack/src/utils/hh24_types/Cargo.toml | # @generated by autocargo
[package]
name = "hh24_types"
version = "0.0.0"
edition = "2021"
[lib]
path = "hh24_types.rs"
[dependencies]
anyhow = "1.0.71"
derive_more = "0.99.17"
file_info = { version = "0.0.0", path = "../../deps/rust/file_info" }
hh_hash = { version = "0.0.0", path = "../hh_hash" }
relative_path = { version = "0.0.0", path = "../rust/relative_path" }
rusqlite = { version = "0.29.0", features = ["backup", "blob", "column_decltype", "limits"] }
serde = { version = "1.0.176", features = ["derive", "rc"] }
serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] }
thiserror = "1.0.43"
typing_deps_hash = { version = "0.0.0", path = "../../deps/cargo/typing_deps_hash" } |
Rust | hhvm/hphp/hack/src/utils/hh24_types/hh24_types.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
//! Common types used in the HH24 Hack typechecker rearchitecture.
// Common impls for types which wrap a hash value represented by u64.
macro_rules! u64_hash_wrapper_impls {
($name:ident) => {
impl $name {
#[inline]
pub fn from_u64(hash: u64) -> Self {
Self(hash)
}
#[inline]
pub fn as_u64(self) -> u64 {
self.0
}
}
impl std::fmt::Debug for $name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, concat!(stringify!($name), "({:x})"), self.0)
}
}
impl std::str::FromStr for $name {
type Err = std::num::ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(u64::from_str_radix(s, 16)?))
}
}
impl rusqlite::ToSql for $name {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
Ok(rusqlite::types::ToSqlOutput::from(self.0 as i64))
}
}
impl rusqlite::types::FromSql for $name {
fn column_result(
value: rusqlite::types::ValueRef<'_>,
) -> rusqlite::types::FromSqlResult<Self> {
Ok(Self(value.as_i64()? as u64))
}
}
};
}
/// TODO(ljw): add backtraces to the three expected cases.
/// But let's hold off until we've adopted thiserror 1.0.34 and rustc post backtrace stabilization
#[derive(thiserror::Error, Debug)]
pub enum HhError {
/// This error is used for all unexpected scenarios - anything where we want a callstack
/// and telemetry. This includes disk io errors, json parsing errors.
#[error("Unexpected: {0:#}")]
Unexpected(anyhow::Error),
/// This means that hh_decl was asked for a decl, and it had to read it off disk, but
/// what it read off disk is different from what it expected in the naming table.
/// This means that the disk has changed in a material way since the last time anyone
/// invoked "hh_decl change", or its user-facing caller "hh update". Therefore,
/// any facts or derived facts (shallow or folded decs, or depgraph edges) that we
/// attempted to deduce based on reading a disk file run the risk of being invalid in ways
/// that we won't subsequently be able to invalidate. Therefore, we must not commit such
/// facts to memory or anywhere. In the face of such a situation, no useful work is
/// possible by hh_decl nor by the caller of it (hh_fanout, hh_worker, hh). The only thing
/// that hh_decl or its callers can do in this situation is propagate the DiskChanged error
/// upwards for now, and terminate themselves. What's needed for work to proceed is
/// to do "hh_decl change" with the affected files. More typically, this will be done
/// by "hh update", which will query watchman for all modified files, then invoke "hh_decl change"
/// for them, then invoke "hh_fanout change" for modified symbols. Note that the only
/// component which can recover from DiskChanged is "hh --retry-on-disk-changed", which
/// sees a DiskChanged error reported from one of its subcomponents (hh_decl, hh_fanout, hh_worker)
/// and does that "hh update" remediation step itself.
#[error("Disk changed: {0} - do hh_decl change then restart the operation. [{1}]")]
DiskChanged(std::path::PathBuf, String),
/// This means that hh_decl was asked for a decl by some component (hh_fanout, hh_worker, hh)
/// but some concurrent process had already done "hh_decl change" to inform it of modified
/// symbols. In other words, hh_decl is "in the future" and knows about changes on disk
/// that its caller doesn't yet know about. In such a situation it's unsafe for the caller
/// to continue -- any facts or derived facts that the caller attempts to store will be
/// invalid in ways it can't recover from. The only action the caller can do is terminate
/// itself, and trust that someone will restart it. For cases purely within "hh check" this
/// situation won't arise. Where it will arise is if someone did "hh --type-at-pos ..." in the
/// background, and then also did "hh update", and the hh update might have updated hh_decl
/// in such a way that the type-at-pos worker is unable to proceed. (If someone had done
/// "hh --type-at-pos --retry-on-disk-changed" then after the worker terminated with this error,
// then hh would know to restart it.)
#[error("Hh_decl changed its checksum: {0:?} - restart the operation. [{1}]")]
ChecksumChanged(Checksum, String),
/// This means that hh_decl was told to stop. This error is our chief signalling mechanism for
/// getting concurrent workers to stop too: they are (presumably) spending their time reading
/// hh_decl, and once we tell hh_decl to stop then they'll soon get the message, and know
/// to shut down.
#[error("Hh_decl stopped - abandon the operation. [{0}]")]
Stopped(String),
}
/// TODO(ljw): once we adopt thiserror 1.0.34 and anyhow 1.0.64, then anyhow
/// stacks will always be present, and we'll have no need for peppering our
/// codebase with .hh_context("desc") to make up for their lack. At that time,
/// let's delete the HhErrorContext trait and all calls to it.
pub trait HhErrorContext<T> {
fn hh_context(self, context: &'static str) -> Result<T, HhError>;
}
impl<T> HhErrorContext<T> for Result<T, HhError> {
fn hh_context(self, ctx: &'static str) -> Result<T, HhError> {
match self {
Ok(r) => Ok(r),
Err(HhError::Unexpected(err)) => Err(HhError::Unexpected(err.context(ctx))),
Err(HhError::DiskChanged(path, ctx0)) => {
Err(HhError::DiskChanged(path, format!("{}\n{}", ctx, ctx0)))
}
Err(HhError::ChecksumChanged(checksum, ctx0)) => Err(HhError::ChecksumChanged(
checksum,
format!("{}\n{}", ctx, ctx0),
)),
Err(HhError::Stopped(ctx0)) => Err(HhError::Stopped(format!("{}\n{}", ctx, ctx0))),
}
}
}
impl<T> HhErrorContext<T> for Result<T, std::io::Error> {
#[inline(never)]
fn hh_context(self, context: &'static str) -> Result<T, HhError> {
self.map_err(|err| HhError::Unexpected(anyhow::Error::new(err).context(context)))
}
}
impl<T> HhErrorContext<T> for Result<T, serde_json::error::Error> {
#[inline(never)]
fn hh_context(self, context: &'static str) -> Result<T, HhError> {
self.map_err(|err| HhError::Unexpected(anyhow::Error::new(err).context(context)))
}
}
impl<T> HhErrorContext<T> for Result<T, anyhow::Error> {
#[inline(never)]
fn hh_context(self, context: &'static str) -> Result<T, HhError> {
self.map_err(|err| HhError::Unexpected(err.context(context)))
}
}
/// Checksum is used to characterize state of every decl in the repository:
/// if a decl is added, removed, moved from one file, changed, then the overall
/// checksum of the repository will change.
#[derive(Copy, Clone, Hash, PartialEq, Eq, Default)]
#[derive(serde::Deserialize, serde::Serialize)]
#[derive(derive_more::UpperHex, derive_more::LowerHex)]
pub struct Checksum(pub u64);
u64_hash_wrapper_impls! { Checksum }
impl Checksum {
pub fn addremove(
&mut self,
symbol_hash: ToplevelSymbolHash,
decl_hash: DeclHash,
path: &relative_path::RelativePath,
) {
// CARE! This implementation must be identical to that in rust_decl_ffi.rs
// I wrote it out as a separate copy because I didn't want hh_server to take a dependency
// upon hh24_types
self.0 ^= hh_hash::hash(&(symbol_hash, decl_hash, path));
}
}
#[derive(Clone, Debug)]
#[derive(serde::Deserialize, serde::Serialize)]
pub struct RichChecksum {
pub checksum: Checksum,
pub timestamp: Timestamp,
pub example_symbol: String,
}
impl RichChecksum {
pub fn to_brief_string(&self) -> String {
format!(
"RichChecksum({:x}@{}@{})",
self.checksum,
self.timestamp.unix_epoch_secs(),
self.example_symbol
)
}
}
impl std::str::FromStr for RichChecksum {
type Err = ParseRichChecksumError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim_start_matches("RichChecksum(");
let s = s.trim_end_matches(')');
let mut iter = s.split('@');
match (iter.next(), iter.next(), iter.next(), iter.next()) {
(Some(checksum), Some(timestamp), Some(example_symbol), None) => Ok(Self {
checksum: checksum
.parse()
.map_err(ParseRichChecksumError::InvalidChecksum)?,
timestamp: timestamp
.parse()
.map_err(ParseRichChecksumError::InvalidTimestamp)?,
example_symbol: String::from(example_symbol),
}),
_ => Err(ParseRichChecksumError::Invalid),
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum ParseRichChecksumError {
#[error("expected \"RichChecksum(<checksum>@<timestamp>@<example_symbol>)\"")]
Invalid,
#[error("{0}")]
InvalidChecksum(#[source] std::num::ParseIntError),
#[error("{0}")]
InvalidTimestamp(#[source] std::num::ParseIntError),
}
/// A measurement of the system clock, useful for talking to external entities
/// like the file system or other processes. Wraps `std::time::SystemTime`, but
/// implements `serde::Serialize` and `serde::Deserialize`.
///
/// Invariant: always represents a time later than the unix epoch.
#[derive(Copy, Clone)]
pub struct Timestamp(std::time::SystemTime);
impl Timestamp {
/// Returns the system time corresponding to "now".
pub fn now() -> Self {
Self(std::time::SystemTime::now())
}
/// Returns the system time corresponding to the unix epoch plus the given
/// number of seconds.
pub fn from_unix_epoch_secs(secs: u64) -> Self {
Self(
std::time::SystemTime::UNIX_EPOCH
.checked_add(std::time::Duration::from_secs(secs))
.expect("Seconds since UNIX_EPOCH too large to fit in SystemTime"),
)
}
/// Returns the number of seconds elapsed between the unix epoch and this
/// `Timestamp`.
pub fn unix_epoch_secs(&self) -> u64 {
self.0
.duration_since(std::time::SystemTime::UNIX_EPOCH)
.expect("Timestamp before UNIX_EPOCH")
.as_secs()
}
/// Returns the `SystemTime` corresponding to this `Timestamp`.
pub fn as_system_time(&self) -> std::time::SystemTime {
self.0
}
}
impl std::fmt::Debug for Timestamp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Timestamp({})", self.unix_epoch_secs())
}
}
impl std::str::FromStr for Timestamp {
type Err = std::num::ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim_start_matches("Timestamp(");
let s = s.trim_end_matches(')');
Ok(Self::from_unix_epoch_secs(s.parse()?))
}
}
impl serde::Serialize for Timestamp {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_u64(self.unix_epoch_secs())
}
}
impl<'de> serde::Deserialize<'de> for Timestamp {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Timestamp;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a u64 for Timestamp")
}
fn visit_u64<E: serde::de::Error>(self, value: u64) -> Result<Self::Value, E> {
Ok(Self::Value::from_unix_epoch_secs(value))
}
}
deserializer.deserialize_u64(Visitor)
}
}
/// The hash of a toplevel symbol name, as it appears in the 64bit dependency graph.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(serde::Deserialize, serde::Serialize)]
#[derive(derive_more::UpperHex, derive_more::LowerHex)]
pub struct ToplevelSymbolHash(u64);
u64_hash_wrapper_impls! { ToplevelSymbolHash }
impl ToplevelSymbolHash {
pub fn new(kind: file_info::NameType, symbol: &str) -> Self {
Self::from_byte_string(kind, symbol.as_bytes())
}
pub fn from_byte_string(kind: file_info::NameType, symbol: &[u8]) -> Self {
Self(typing_deps_hash::hash1(kind.into(), symbol))
}
pub fn from_type(symbol: &str) -> Self {
// Could also be a NameType::Typedef, but both Class and Typedef are
// represented with DepType::Type. See test_dep_type_from_name_type below.
Self::new(file_info::NameType::Class, symbol)
}
pub fn from_fun(symbol: &str) -> Self {
Self::new(file_info::NameType::Fun, symbol)
}
pub fn from_const(symbol: &str) -> Self {
Self::new(file_info::NameType::Const, symbol)
}
pub fn from_module(symbol: &str) -> Self {
Self::new(file_info::NameType::Module, symbol)
}
#[inline(always)]
pub fn to_be_bytes(self) -> [u8; 8] {
self.0.to_be_bytes()
}
#[inline(always)]
pub fn from_be_bytes(bs: [u8; 8]) -> Self {
Self(u64::from_be_bytes(bs))
}
#[inline(always)]
pub fn to_dependency_hash(self) -> DependencyHash {
DependencyHash(self.0)
}
}
/// The "canon hash" of a toplevel symbol name (i.e., the hash of the symbol
/// name after ASCII characters in the name have been converted to lowercase),
/// as it appears in the naming table.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(serde::Deserialize, serde::Serialize)]
#[derive(derive_more::UpperHex, derive_more::LowerHex)]
pub struct ToplevelCanonSymbolHash(u64);
u64_hash_wrapper_impls! { ToplevelCanonSymbolHash }
impl ToplevelCanonSymbolHash {
pub fn new(kind: file_info::NameType, mut symbol: String) -> Self {
symbol.make_ascii_lowercase();
Self(typing_deps_hash::hash1(kind.into(), symbol.as_bytes()))
}
pub fn from_type(symbol: String) -> Self {
// Could also be a NameType::Typedef, but both Class and Typedef are
// represented with DepType::Type. See test_dep_type_from_name_type below.
Self::new(file_info::NameType::Class, symbol)
}
pub fn from_fun(symbol: String) -> Self {
Self::new(file_info::NameType::Fun, symbol)
}
pub fn from_const(symbol: String) -> Self {
Self::new(file_info::NameType::Const, symbol)
}
pub fn from_module(symbol: String) -> Self {
Self::new(file_info::NameType::Module, symbol)
}
}
/// The hash of a toplevel symbol name, or the hash of a class member name, or
/// an Extends, Constructor, or AllMembers hash for a class name.
/// See `Typing_deps.Dep.(dependency variant)`. and Dep.make in typing_deps.ml
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(serde::Deserialize, serde::Serialize)]
#[derive(derive_more::UpperHex, derive_more::LowerHex)]
pub struct DependencyHash(pub u64);
impl DependencyHash {
pub fn of_member(
dep_type: typing_deps_hash::DepType,
type_hash: ToplevelSymbolHash,
member_name: &str,
) -> Self {
Self(typing_deps_hash::hash2(
dep_type,
type_hash.0,
member_name.as_bytes(),
))
}
pub fn of_symbol(dep_type: typing_deps_hash::DepType, type_name: &str) -> Self {
Self(typing_deps_hash::hash1(dep_type, type_name.as_bytes()))
}
#[inline]
pub fn as_u64(self) -> u64 {
self.0
}
}
impl std::fmt::Debug for DependencyHash {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "DependencyHash({:x})", self.0)
}
}
impl std::str::FromStr for DependencyHash {
type Err = std::num::ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(u64::from_str_radix(s, 16)?))
}
}
// A `ToplevelSymbolHash` is a valid `DependencyHash`, but not all
// `DependencyHash` values represent a toplevel symbol.
impl From<ToplevelSymbolHash> for DependencyHash {
fn from(hash: ToplevelSymbolHash) -> Self {
Self(hash.0)
}
}
impl From<DependencyHash> for ToplevelSymbolHash {
fn from(hash: DependencyHash) -> Self {
Self(hash.0)
}
}
impl DependencyHash {
#[inline(always)]
pub fn to_be_bytes(self) -> [u8; 8] {
self.0.to_be_bytes()
}
#[inline(always)]
pub fn from_be_bytes(bs: [u8; 8]) -> Self {
Self(u64::from_be_bytes(bs))
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(serde::Deserialize, serde::Serialize)]
pub struct DepGraphEdge {
pub dependency: DependencyHash,
pub dependent: ToplevelSymbolHash,
}
impl DepGraphEdge {
pub fn from_u64(dependency: u64, dependent: u64) -> Self {
Self {
dependency: DependencyHash(dependency),
dependent: ToplevelSymbolHash::from_u64(dependent),
}
}
}
impl std::str::FromStr for DepGraphEdge {
type Err = ParseDepGraphEdgeError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut iter = s.split(':');
match (iter.next(), iter.next(), iter.next()) {
(Some(dependency), Some(dependent), None) => Ok(Self {
dependency: dependency.parse()?,
dependent: dependent.parse()?,
}),
_ => Err(ParseDepGraphEdgeError::Invalid(s.to_owned())),
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum ParseDepGraphEdgeError {
#[error("expected dependency_hash:dependent_hash format. actual \"{0}\"")]
Invalid(String),
#[error("{0}")]
FromInt(#[from] std::num::ParseIntError),
}
/// The position-insensitive hash of the `Decls` produced by running the direct
/// decl parser on a file. Used in the NAMING_FILE_INFO table.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(serde::Deserialize, serde::Serialize)]
#[derive(derive_more::UpperHex, derive_more::LowerHex)]
pub struct FileDeclsHash(u64);
u64_hash_wrapper_impls! { FileDeclsHash }
/// The position-insensitive hash of a decl (the type signature of a toplevel
/// declaration), as it appears in the naming table. Used in the NAMING_FUNS,
/// NAMING_CONSTS, and NAMING_TYPES tables (in the near future).
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(serde::Deserialize, serde::Serialize)]
#[derive(derive_more::UpperHex, derive_more::LowerHex)]
pub struct DeclHash(u64);
u64_hash_wrapper_impls! { DeclHash }
/// This type is for serializing an anyhow error. What you get out will print with
/// the same information as the original anyhow, but won't look quite as pretty
/// and doesn't support downcasting.
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct StringifiedError {
pub chain: Vec<String>, // invariant: this has at least 1 element
pub backtrace: String,
}
impl StringifiedError {
pub fn from_anyhow(err: anyhow::Error) -> Self {
let chain = err.chain().map(|c| format!("{}", c)).rev().collect();
let backtrace = format!("{:?}", err);
Self { chain, backtrace }
}
pub fn to_anyhow(self) -> anyhow::Error {
let mut e = anyhow::anyhow!("StringifiedError");
e = e.context(self.backtrace);
for cause in self.chain {
e = e.context(cause);
}
e
}
}
impl std::fmt::Display for StringifiedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.chain[self.chain.len() - 1])
}
}
#[cfg(test)]
mod tests {
use anyhow::Context;
use super::*; // to bring Result<T,E>.context into scope
fn stringify_inner() -> anyhow::Result<()> {
anyhow::bail!("oops");
}
fn stringify_middle() -> anyhow::Result<()> {
stringify_inner().context("ctx_middle")
}
fn stringify_outer() -> anyhow::Result<()> {
stringify_middle().context("ctx_outer")
}
#[test]
fn stringify_without_backtrace() {
match stringify_outer() {
Ok(()) => panic!("test wanted to see an error"),
Err(err1) => {
let err2 = StringifiedError::from_anyhow(err1);
let err3 = err2.to_anyhow();
let display = format!("{}", err3);
assert_eq!(display, "ctx_outer");
let debug = format!("{:?}", err3);
assert!(debug.contains("ctx_outer"));
assert!(debug.contains("0: ctx_middle"));
assert!(debug.contains("1: oops"));
}
}
}
#[test]
fn test_dep_type_from_name_type() {
assert_eq!(
typing_deps_hash::DepType::from(file_info::NameType::Class),
typing_deps_hash::DepType::from(file_info::NameType::Typedef)
);
}
} |
TOML | hhvm/hphp/hack/src/utils/hhvm_options/Cargo.toml | # @generated by autocargo
[package]
name = "hhvm_options"
version = "0.0.0"
edition = "2021"
[lib]
path = "hhvm_options.rs"
[dependencies]
anyhow = "1.0.71"
clap = { version = "4.3.5", features = ["derive", "env", "string", "unicode", "wrap_help"] }
hdf = { version = "0.0.0", path = "../hdf" }
hhvm_runtime_options = { version = "0.0.0", path = "hhvm_runtime_options" } |
Rust | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_config.rs | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
use anyhow::Result;
#[derive(Debug, Default)]
pub struct HhvmConfig {
pub hdf_config: hdf::Value,
pub ini_config: hdf::Value,
}
impl HhvmConfig {
pub fn get_str<'a>(&'a self, key: &str) -> Result<Option<String>> {
self.get_helper(
key,
/*prepend_hhvm=*/ true,
|config, key| Ok(config.get_str(key)?),
)
}
pub fn get_bool(&self, key: &str) -> Result<Option<bool>> {
self.get_helper(
key,
/*prepend_hhvm=*/ true,
|config, key| Ok(config.get_bool(key)?),
)
}
pub fn get_uint32(&self, key: &str) -> Result<Option<u32>> {
self.get_helper(
key,
/*prepend_hhvm=*/ true,
|config, key| Ok(config.get_uint32(key)?),
)
}
fn get_helper<'a, T: 'a>(
&'a self,
key: &str,
prepend_hhvm: bool,
mut f: impl FnMut(&'a hdf::Value, &str) -> Result<Option<T>>,
) -> Result<Option<T>> {
match f(&self.hdf_config, key)? {
Some(value) => Ok(Some(value)),
None => {
let ini_name = Self::ini_name(key, prepend_hhvm);
f(&self.ini_config, &ini_name)
}
}
}
fn ini_name(name: &str, prepend_hhvm: bool) -> String {
// Based on IniName() in config.cpp this basically converts CamelCase to
// snake_case.
let mut out = String::new();
if prepend_hhvm {
out.push_str("hhvm.");
}
if name.is_empty() {
return out;
}
let mut prev = ' ';
let mut it = name.chars();
let mut c = it.next().unwrap();
for (idx, next) in it.enumerate() {
if idx == 0 || !c.is_alphanumeric() {
// This is the first character, or any `.` or `_ or punctuator is just output
// with no special behavior.
out.extend(c.to_lowercase());
} else if c.is_uppercase() && prev.is_uppercase() && next.is_lowercase() {
// Handle something like "SSLPort", and c = "P", which will then
// put the underscore between the "L" and "P".
out.push('_');
out.extend(c.to_lowercase());
} else if c.is_lowercase() && next.is_uppercase() {
// Handle something like "PathDebug", and c = "h", which will
// then put the underscore between the "h" and "D".
out.extend(c.to_lowercase());
out.push('_');
} else {
// Otherwise we just output as lower.
out.extend(c.to_lowercase());
}
prev = c;
c = next;
}
// Last character.
out.extend(c.to_lowercase());
out
}
}
#[test]
fn test_ini_name() {
assert_eq!(
HhvmConfig::ini_name("Hack.Lang.AllowUnstableFeatures", true),
"hhvm.hack.lang.allow_unstable_features"
);
assert_eq!(
HhvmConfig::ini_name("Server.SSLPort", false),
"server.ssl_port"
);
assert_eq!(HhvmConfig::ini_name("PathDebug", false), "path_debug");
} |
Rust | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_options.rs | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
mod hhvm_config;
use std::ffi::OsStr;
use std::path::PathBuf;
use anyhow::anyhow;
use anyhow::Result;
use clap::ArgAction;
use clap::Parser;
pub use hhvm_config::*;
// Define HHVM-compatible options, as best as we can with clap.
#[derive(Debug, Default, Parser)]
pub struct HhvmOptions {
/// Load specified HDF or INI config file(s)
#[clap(
short('c'),
long("config-file"),
action(ArgAction::Append),
value_name("CONFIG")
)]
pub config_files: Vec<PathBuf>,
/// Individual HDF configuration string
#[clap(
short('v'),
long("config-value"),
action(ArgAction::Append),
value_name("NAME=VALUE")
)]
pub hdf_values: Vec<String>,
/// Define an INI setting
#[clap(
short('d'),
long("define"),
action(ArgAction::Append),
value_name("NAME=VALUE")
)]
pub ini_values: Vec<String>,
}
impl HhvmOptions {
pub fn is_empty(&self) -> bool {
self.config_files.is_empty() && self.hdf_values.is_empty() && self.ini_values.is_empty()
}
pub fn to_config(&self) -> Result<HhvmConfig> {
let mut hdf_config = hdf::Value::default();
let mut ini_config = hdf::Value::default();
for path in &self.config_files {
let ext = path.extension();
if ext == Some(OsStr::new("hdf")) {
hdf_config = hdf::Value::from_file(path)?;
hdf_config =
hhvm_runtime_options::runtime_options::apply_tier_overrides(hdf_config)?;
} else if ext == Some(OsStr::new("ini")) {
ini_config = hdf::Value::from_ini_file(path)?;
} else {
return Err(anyhow!("{}: Unknown config file format", path.display(),));
}
}
for opt in &self.hdf_values {
hdf_config.set_hdf(opt)?;
}
for opt in &self.ini_values {
ini_config.set_ini(opt)?;
}
Ok(HhvmConfig {
hdf_config,
ini_config,
})
}
}
// Compiler options that are compatible with hphp (hhvm --hphp),
// intended to be CLI-compatible subset with HPHP::CompilerOptions
// and prepareOptions() in hphp/compiler/compiler.cpp.
#[derive(Debug, Parser)]
pub struct HphpOptions {
#[clap(flatten)]
pub config: HhvmOptions,
/// HHBC Output format (ignored).
#[clap(
long,
short,
default_value("binary"),
value_parser = ["binary", "hhas", "text"]
)]
pub format: String,
/// Log level (ignored - use HH_LOG). -1, 0: no logging; 1: errors, 2: warnings;
/// 3: info, 4: verbose.
#[clap(long, short, default_value("-1"))]
pub log: i32,
/// Input directory. If specified, input pathnames are interpreted
/// relative to this directory. Absolute input pathnames must have this
/// directory as a prefix, which will be stripped.
#[clap(long)]
pub input_dir: PathBuf,
/// Output directory
#[clap(long, short)]
pub output_dir: Option<PathBuf>,
/// If specified, generate a static file cache with this filename (ignored)
#[clap(long)]
pub file_cache: Option<PathBuf>,
/// Directory containing input files.
#[clap(long("dir"), action(ArgAction::Append), value_name("PATH"))]
pub dirs: Vec<PathBuf>,
/// Extra directories for static files without exclusion checking
#[clap(long("cdir"), action(ArgAction::Append), value_name("PATH"))]
pub cdirs: Vec<PathBuf>,
/// Extra static files force-included without exclusion checking (ignored)
#[clap(long("cfile"), action(ArgAction::Append), value_name("PATH"))]
pub cfiles: Vec<PathBuf>,
/// Exclude these files or directories from the static content cache (ignored)
#[clap(
long("exclude-static-pattern"),
action(ArgAction::Append),
value_name("REGEX")
)]
pub exclude_static_patterns: Vec<String>,
/// Directories to exclude from the input
#[clap(long("exclude-dir"), action(ArgAction::Append), value_name("PATH"))]
pub exclude_dirs: Vec<PathBuf>,
/// Directories to exclude from the static content cache (ignored)
#[clap(
long("exclude-static-dir"),
action(ArgAction::Append),
value_name("PATH")
)]
pub exclude_static_dirs: Vec<PathBuf>,
/// Regex pattern for files or directories to exclude from the input,
/// even if transitively referenced.
#[clap(
long("exclude-pattern"),
action(ArgAction::Append),
value_name("REGEX")
)]
pub exclude_patterns: Vec<String>,
/// Files to exclude from the input, even if transitively referenced
#[clap(long("exclude-file"), action(ArgAction::Append), value_name("PATH"))]
pub exclude_files: Vec<PathBuf>,
/// Input file names XXX (should this be --inputs?)
pub inputs: Vec<PathBuf>,
/// File containing list of relative file names, one per line.
#[clap(long, value_name("PATH"))]
pub input_list: Option<PathBuf>,
/// Filename of final program to emit; will be placed in output-dir.
#[clap(long)]
pub program: Option<String>,
} |
Rust | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/build.rs | use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
fn main() {
// Assumes the hack workspace 'fbcode/hphp/hack/src/Cargo.toml'.
let mut cargo_cmd = Command::new("cargo");
cargo_cmd.args(&["locate-project", "--workspace", "--message-format=plain"]);
let output = cargo_cmd.output().unwrap().stdout;
let hphp = Path::new(std::str::from_utf8(&output).unwrap().trim())
.ancestors()
.nth(3)
.unwrap();
let fbcode = hphp.parent().unwrap();
let files = vec![
PathBuf::from("ffi_bridge.rs"),
PathBuf::from("ffi_bridge.cpp"),
PathBuf::from("ffi_bridge.h"),
hphp.join("util/process-cpu.cpp"),
hphp.join("util/process-cpu.h"),
hphp.join("util/process-host.cpp"),
hphp.join("util/process-host.h"),
];
cxx_build::bridge("ffi_bridge.rs")
.files(files.iter().filter(is_cpp))
.include(fbcode)
.define("NO_HHVM", "1")
.warnings(false)
.cpp(true)
.flag("-std=c++17")
.compile("ffi_bridge");
files.iter().for_each(rerun_if_changed);
rerun_if_changed("build.rs");
}
fn rerun_if_changed<P: AsRef<Path>>(f: P) {
println!("cargo:rerun-if-changed={}", f.as_ref().to_str().unwrap());
}
fn is_cpp<P: AsRef<Path>>(path: &P) -> bool {
path.as_ref().extension().map_or(false, |e| e == "cpp")
} |
TOML | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/Cargo.toml | # @generated by autocargo
[package]
name = "hhvm_runtime_options"
version = "0.0.0"
edition = "2021"
[lib]
path = "lib.rs"
[dependencies]
anyhow = "1.0.71"
cxx = "1.0.100"
hdf = { version = "0.0.0", path = "../../hdf" }
log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] }
[build-dependencies]
cxx-build = "1.0.100" |
C++ | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/ffi_bridge.cpp | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#include "hphp/util/process-cpu.h"
#include "hphp/util/process-host.h"
#include "rust/cxx.h"
rust::String Process_GetCPUModel() {
return HPHP::Process::GetCPUModel();
}
rust::String Process_GetHostName() {
return HPHP::Process::GetHostName();
} |
C/C++ | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/ffi_bridge.h | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#pragma once
#include "rust/cxx.h"
rust::String Process_GetCPUModel();
rust::String Process_GetHostName(); |
Rust | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/ffi_bridge.rs | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
// This is needed by cxx
#![allow(dead_code)]
// clippy doesn't like "Process_GetCPUModel"
#[allow(unknown_lints)]
#[cxx::bridge]
pub mod cxx_ffi {
unsafe extern "C++" {
include!("hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/ffi_bridge.h");
include!("hphp/util/process-cpu.h");
include!("hphp/util/process-host.h");
fn Process_GetCPUModel() -> String;
fn Process_GetHostName() -> String;
}
} |
C/C++ | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/ffi_status.h | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#pragma once
enum {
FFI_STATUS_ENUM_INITIALIZER = -1000,
// WARNING: Match "hphp/facebook/hacknative/systemlib/hn/main.php".
FFI_STATUS_THROW_Exception,
FFI_STATUS_THROW_InvalidArgumentException,
// WARNING: Match "hphp/facebook/hacknative/systemlib/ext/thrift/hni.php".
FFI_STATUS_THROW_PDOException,
// WARNING: Match "hphp/facebook/hacknative/systemlib/fb/initialize.php".
FFI_STATUS_THROW_CompressionException,
FFI_STATUS_THROW_CryptoException,
FFI_STATUS_THROW_CryptoProjectNotFoundException,
FFI_STATUS_THROW_CryptoUnexpectedException,
FFI_STATUS_THROW_ManagedCompressionException,
FFI_STATUS_FATAL = -5,
FFI_STATUS_ERROR = -4,
FFI_STATUS_NULL_WITH_WARNING = -3,
FFI_STATUS_NULL_WITH_NOTICE = -2,
FFI_STATUS_NULL = -1,
FFI_STATUS_OK = 0,
}; |
Rust | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/lib.rs | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
mod ffi_bridge;
pub mod runtime_options;
pub(crate) use ffi_bridge::cxx_ffi; |
Rust | hhvm/hphp/hack/src/utils/hhvm_options/hhvm_runtime_options/runtime_options.rs | // Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
use std::borrow::Cow;
use std::fs;
use anyhow::Result;
use crate::cxx_ffi;
/// A machine can belong to a tier, which can overwrite
/// various settings, even if they are set in the same
/// hdf file. However, CLI overrides still win the day over
/// everything.
///
/// Based on getTierOverwrites() in runtime-option.cpp
pub fn apply_tier_overrides(mut config: hdf::Value) -> Result<hdf::Value> {
// Machine metrics
let hostname = config
.get_str("Machine.name")?
.unwrap_or_else(cxx_ffi::Process_GetHostName);
let tier: String = config.get_str("Machine.tier")?.unwrap_or_default();
let task: String = config.get_str("Machine.task")?.unwrap_or_default();
let cpu: String = config
.get_str("Machine.cpu")?
.unwrap_or_else(cxx_ffi::Process_GetCPUModel);
let tiers: String = config
.get_str("Machine.tiers")?
.and_then(|tiers| fs::read_to_string(tiers).ok())
.unwrap_or_else(|| "".to_owned());
let tags: String = config
.get_str("Machine.tags")?
.and_then(|tiers| fs::read_to_string(tiers).ok())
.unwrap_or_else(|| "".to_owned());
log::debug!(
"Matching tiers using: machine='{}', tier='{}', task='{}', cpu='{}', tiers='{}', tags='{}'",
hostname,
tier,
task,
cpu,
tiers,
tags
);
let check_patterns = |hdf: &hdf::Value| -> Result<bool> {
Ok(match_hdf_pattern(&hostname, hdf, "machine", "")?
&& match_hdf_pattern(&tier, hdf, "tier", "")?
&& match_hdf_pattern(&task, hdf, "task", "")?
&& match_hdf_pattern(&tiers, hdf, "tiers", "m")?
&& match_hdf_pattern(&tags, hdf, "tags", "m")?
&& match_hdf_pattern(&cpu, hdf, "cpu", "")?)
};
let mut enable_shards = true;
if let Some(tiers) = config.get("Tiers")? {
for tier in tiers.into_children()? {
let tier = tier?;
if check_patterns(&tier)?
&& (!tier.contains_key("exclude")?
|| !tier
.get("exclude")?
.map_or(Ok(false), |v| check_patterns(&v))?
&& match_shard(enable_shards, &hostname, &tier))
{
log::info!("Matched tier: {}", tier.name()?);
if enable_shards && tier.get_bool_or("DisableShards", false)? {
log::info!("Sharding is disabled.");
enable_shards = false;
}
if let Some(remove) = tier.get("clear")? {
for s in remove.values()? {
config.remove(&s)?;
}
}
//-- config.copy(tier["overwrite"]);
// no break here, so we can continue to match more overwrites
}
// Avoid lint errors about unvisited nodes when the tier does not match.
//-- tier["DisableShards"].setVisited();
//-- tier["clear"].setVisited();
//-- tier["overwrite"].setVisited();
}
}
Ok(config)
}
fn match_shard(_en: bool, _hostname: &str, _config: &hdf::Value) -> bool {
todo!();
}
// Config::matchHdfPattern()
fn match_hdf_pattern(_value: &str, config: &hdf::Value, name: &str, suffix: &str) -> Result<bool> {
let pattern = config.get_str(name)?.unwrap_or_default();
if !pattern.is_empty() {
let _pattern: Cow<'_, str> = if suffix.is_empty() {
pattern.into()
} else {
format!("{}{}", pattern, suffix).into()
};
todo!();
//-- Variant ret = preg_match(String(pattern.c_str(), pattern.size(),
//-- CopyString),
//-- String(value.c_str(), value.size(),
//-- CopyString));
//-- if (ret.toInt64() <= 0) {
//-- return false;
//-- }
}
Ok(true)
} |
Rust | hhvm/hphp/hack/src/utils/hh_config/hh_config.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod local_config;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::path::Path;
use std::path::PathBuf;
use std::str::FromStr;
use anyhow::Context;
use anyhow::Result;
use config_file::ConfigFile;
pub use local_config::LocalConfig;
use oxidized::custom_error_config::CustomErrorConfig;
use oxidized::decl_parser_options::DeclParserOptions;
use oxidized::global_options::GlobalOptions;
use sha1::Digest;
use sha1::Sha1;
pub const FILE_PATH_RELATIVE_TO_ROOT: &str = ".hhconfig";
pub const PACKAGE_FILE_PATH_RELATIVE_TO_ROOT: &str = "PACKAGES.toml";
/// For now, this struct only contains the parts of .hhconfig which
/// have been needed in Rust tools.
///
/// Fields correspond to ServerConfig.t
#[derive(Debug, Clone, Default)]
pub struct HhConfig {
pub version: Option<String>,
/// List of regex patterns of root-relative paths to ignore.
pub ignored_paths: Vec<String>,
/// SHA1 Hash of the .hhconfig file contents.
pub hash: String,
pub opts: GlobalOptions,
pub local_config: LocalConfig,
/// Config settings that did not match any setting known to this parser.
pub unknown: Vec<(String, String)>,
pub gc_minor_heap_size: usize,
pub gc_space_overhead: usize,
pub hackfmt_version: usize,
pub sharedmem_dep_table_pow: usize,
pub sharedmem_global_size: usize,
pub sharedmem_hash_table_pow: usize,
pub sharedmem_heap_size: usize,
pub ide_fall_back_to_full_index: bool,
pub hh_distc_should_disable_trace_store: bool,
}
impl HhConfig {
pub fn from_root(root: impl AsRef<Path>, overrides: &ConfigFile) -> Result<Self> {
let hhconfig_path = root.as_ref().join(FILE_PATH_RELATIVE_TO_ROOT);
let hh_conf_path = system_config_path();
Self::from_files(hhconfig_path, hh_conf_path, overrides)
}
pub fn create_packages_path(hhconfig_path: &Path) -> PathBuf {
// Unwrap is safe because hhconfig_path is always at least one nonempty string
let mut packages_path = hhconfig_path.parent().unwrap().to_path_buf();
packages_path.push("PACKAGES.toml");
packages_path
}
pub fn create_custom_errors_path(hhconfig_path: &Path) -> PathBuf {
// Unwrap is safe because hhconfig_path is always at least one nonempty string
let mut packages_path = hhconfig_path.parent().unwrap().to_path_buf();
packages_path.push("CUSTOM_ERRORS.json");
packages_path
}
pub fn from_files(
hhconfig_path: impl AsRef<Path>,
hh_conf_path: impl AsRef<Path>,
overrides: &ConfigFile,
) -> Result<Self> {
let hhconfig_path = hhconfig_path.as_ref();
let package_config_pathbuf = Self::create_packages_path(hhconfig_path);
let package_config_path = package_config_pathbuf.as_path();
let custom_error_config_path = Self::create_custom_errors_path(hhconfig_path);
let (contents, mut hhconfig) = ConfigFile::from_file_with_contents(hhconfig_path)
.with_context(|| hhconfig_path.display().to_string())?;
// Grab extra config and use it to process the hash
let package_contents: String = if package_config_path.exists() {
let ctxt = || package_config_path.display().to_string();
let bytes = std::fs::read(&package_config_path).with_context(ctxt)?;
String::from_utf8(bytes).unwrap()
} else {
String::new()
};
let custom_error_contents: String = if custom_error_config_path.exists() {
let ctxt = || custom_error_config_path.as_path().display().to_string();
let bytes = std::fs::read(&custom_error_config_path).with_context(ctxt)?;
String::from_utf8(bytes).unwrap()
} else {
"[]".to_string()
};
let hash = Self::hash(
&hhconfig,
&contents,
&package_contents,
&custom_error_contents,
);
hhconfig.apply_overrides(overrides);
let hh_conf_path = hh_conf_path.as_ref();
let mut hh_conf = ConfigFile::from_file(hh_conf_path)
.with_context(|| hh_conf_path.display().to_string())?;
hh_conf.apply_overrides(overrides);
let custom_error_config =
CustomErrorConfig::from_str(&custom_error_contents).unwrap_or_default();
Ok(Self {
hash,
..Self::from_configs(hhconfig, hh_conf, custom_error_config)?
})
}
pub fn into_config_files(
root: impl AsRef<Path>,
) -> Result<(ConfigFile, ConfigFile, CustomErrorConfig)> {
let hhconfig_path = root.as_ref().join(FILE_PATH_RELATIVE_TO_ROOT);
let hh_conf_path = system_config_path();
let custom_error_config_pathbuf = Self::create_custom_errors_path(hhconfig_path.as_path());
let custom_error_config_path = custom_error_config_pathbuf.as_path();
let hh_config_file = ConfigFile::from_file(&hhconfig_path)
.with_context(|| hhconfig_path.display().to_string())?;
let hh_conf_file = ConfigFile::from_file(&hh_conf_path)
.with_context(|| hh_conf_path.display().to_string())?;
let custom_error_config = CustomErrorConfig::from_path(custom_error_config_path)?;
Ok((hh_conf_file, hh_config_file, custom_error_config))
}
fn hash(
parsed: &ConfigFile,
config_contents: &str,
package_config: &str,
custom_error_config: &str,
) -> String {
if let Some(hash) = parsed.get_str("override_hhconfig_hash") {
return hash.to_owned();
}
let mut hasher = Sha1::new();
hasher.update(config_contents.as_bytes());
hasher.update(package_config.as_bytes());
hasher.update(custom_error_config.as_bytes());
format!("{:x}", hasher.finalize())
}
pub fn from_slice(bytes: &[u8]) -> Result<Self> {
let (hash, config) = ConfigFile::from_slice_with_sha1(bytes);
Ok(Self {
hash,
..Self::from_configs(config, Default::default(), Default::default())?
})
}
/// Construct from .hhconfig and hh.conf files with CLI overrides already applied.
pub fn from_configs(
hhconfig: ConfigFile,
hh_conf: ConfigFile,
custom_error_config: CustomErrorConfig,
) -> Result<Self> {
let current_rolled_out_flag_idx = hhconfig
.get_int("current_saved_state_rollout_flag_index")
.unwrap_or(Ok(isize::MIN))?;
let deactivate_saved_state_rollout = hhconfig
.get_bool("deactivate_saved_state_rollout")
.unwrap_or(Ok(false))?;
let version = hhconfig.get_str("version");
let mut c = Self {
local_config: LocalConfig::from_config(
version,
current_rolled_out_flag_idx,
deactivate_saved_state_rollout,
&hh_conf,
)?,
..Self::default()
};
// Some GlobalOptions fields are copied from LocalConfig
let go = &mut c.opts;
go.tco_saved_state = c.local_config.saved_state.clone();
go.po_allow_unstable_features = c.local_config.allow_unstable_features;
go.tco_rust_elab = c.local_config.rust_elab;
go.tco_custom_error_config = custom_error_config;
go.dump_tast_hashes = match hh_conf.get_str("dump_tast_hashes") {
Some("true") => true,
_ => false,
};
for (key, mut value) in hhconfig {
match key.as_str() {
"current_saved_state_rollout_flag_index"
| "deactivate_saved_state_rollout"
| "override_hhconfig_hash" => {
// These were already queried for LocalConfig above.
// Ignore them so they aren't added to c.unknown.
}
"auto_namespace_map" => {
let map: BTreeMap<String, String> = parse_json(&value)?;
go.po_auto_namespace_map = map.into_iter().collect();
}
"disable_xhp_element_mangling" => {
go.po_disable_xhp_element_mangling = parse_json(&value)?;
}
"disable_xhp_children_declarations" => {
go.po_disable_xhp_children_declarations = parse_json(&value)?;
}
"interpret_soft_types_as_like_types" => {
go.po_interpret_soft_types_as_like_types = parse_json(&value)?;
}
"everything_sdt" => {
go.tco_everything_sdt = parse_json(&value)?;
}
"deregister_php_stdlib" => {
go.po_deregister_php_stdlib = parse_json(&value)?;
}
"version" => {
c.version = Some(value);
}
"ignored_paths" => {
c.ignored_paths = parse_json(&value)?;
}
"enable_experimental_tc_features" => {
go.tco_experimental_features = parse_sset(&value);
}
"enable_xhp_class_modifier" => {
go.po_enable_xhp_class_modifier = parse_json(&value)?;
}
"disallow_invalid_arraykey" => {
go.tco_disallow_invalid_arraykey = parse_json(&value)?;
}
"check_xhp_attribute" => {
go.tco_check_xhp_attribute = parse_json(&value)?;
}
"disallow_silence" => {
go.po_disallow_silence = parse_json(&value)?;
}
"check_redundant_generics" => {
go.tco_check_redundant_generics = parse_json(&value)?;
}
"disallow_func_ptrs_in_constants" => {
go.po_disallow_func_ptrs_in_constants = parse_json(&value)?;
}
"enable_strict_string_concat_interp" => {
go.tco_enable_strict_string_concat_interp = parse_json(&value)?;
}
"allowed_expression_tree_visitors" => {
let mut allowed_expression_tree_visitors = parse_svec(&value);
// Fix up type names so they will match with elaborated names.
// Keep this in sync with the Utils.add_ns loop in server/serverConfig.ml
for ty in &mut allowed_expression_tree_visitors {
if !ty.starts_with('\\') {
*ty = format!("\\{}", ty)
}
}
go.tco_allowed_expression_tree_visitors = allowed_expression_tree_visitors;
}
"locl_cache_capacity" => {
go.tco_locl_cache_capacity = parse_json(&value)?;
}
"locl_cache_node_threshold" => {
go.tco_locl_cache_node_threshold = parse_json(&value)?;
}
"math_new_code" => {
go.tco_math_new_code = parse_json(&value)?;
}
"explicit_consistent_constructors" => {
go.tco_explicit_consistent_constructors = parse_json(&value)?;
}
"enable_strict_const_semantics" => {
go.tco_enable_strict_const_semantics = parse_json(&value)?;
}
"require_types_tco_require_types_class_consts" => {
go.tco_require_types_class_consts = parse_json(&value)?;
}
"strict_wellformedness" => {
go.tco_strict_wellformedness = parse_json(&value)?;
}
"disable_hh_ignore_error" => {
go.po_disable_hh_ignore_error = parse_json(&value)?;
}
"allowed_fixme_codes_strict" => {
go.allowed_fixme_codes_strict = parse_iset(&value)?;
}
"allowed_decl_fixme_codes" => {
go.po_allowed_decl_fixme_codes = parse_iset(&value)?;
}
"code_agnostic_fixme" => {
go.code_agnostic_fixme = parse_json(&value)?;
}
"allowed_files_for_module_declarations" => {
go.tco_allowed_files_for_module_declarations = parse_svec(&value);
}
"expression_tree_virtualize_functions" => {
go.tco_expression_tree_virtualize_functions = parse_json(&value)?;
}
"tco_global_access_check_enabled" => {
go.tco_global_access_check_enabled = parse_json(&value)?;
}
"log_levels" => {
go.log_levels = parse_json(&value)?;
}
"const_default_func_args" => {
go.po_const_default_func_args = parse_json(&value)?;
}
"const_default_lambda_args" => {
go.po_const_default_lambda_args = parse_json(&value)?;
}
"like_casts" => {
go.tco_like_casts = parse_json(&value)?;
}
"timeout" => {
go.tco_timeout = parse_json(&value)?;
}
"enable_sound_dynamic_type" => {
go.tco_enable_sound_dynamic = parse_json(&value)?;
}
"pessimise_builtins" => {
go.tco_pessimise_builtins = parse_json(&value)?;
}
"enable_no_auto_dynamic" => {
go.tco_enable_no_auto_dynamic = parse_json(&value)?;
}
"like_type_hints" => {
go.tco_like_type_hints = parse_json(&value)?;
}
"union_intersection_type_hints" => {
go.tco_union_intersection_type_hints = parse_json(&value)?;
}
"typecheck_sample_rate" => {
go.tco_typecheck_sample_rate = parse_json(&value)?;
}
"type_printer_fuel" => {
go.tco_type_printer_fuel = parse_json(&value)?;
}
"profile_top_level_definitions" => {
go.tco_profile_top_level_definitions = parse_json(&value)?;
}
"skip_check_under_dynamic" => {
go.tco_skip_check_under_dynamic = parse_json(&value)?;
}
"gc_minor_heap_size" => {
value.retain(|c| c != '_');
c.gc_minor_heap_size = parse_json(&value)?;
}
"gc_space_overhead" => {
c.gc_space_overhead = parse_json(&value)?;
}
"hackfmt.version" => {
c.hackfmt_version = parse_json(&value)?;
}
"sharedmem_dep_table_pow" => {
c.sharedmem_dep_table_pow = parse_json(&value)?;
}
"sharedmem_global_size" => {
value.retain(|c| c != '_');
c.sharedmem_global_size = parse_json(&value)?;
}
"sharedmem_hash_table_pow" => {
c.sharedmem_hash_table_pow = parse_json(&value)?;
}
"sharedmem_heap_size" => {
value.retain(|c| c != '_');
c.sharedmem_heap_size = parse_json(&value)?;
}
"ide_fall_back_to_full_index" => {
c.ide_fall_back_to_full_index = parse_json(&value)?;
}
"hh_distc_should_disable_trace_store" => {
c.hh_distc_should_disable_trace_store = parse_json(&value)?;
}
"log_exhaustivity_check" => {
go.tco_log_exhaustivity_check = parse_json(&value)?;
}
_ => c.unknown.push((key, value)),
}
}
Ok(c)
}
pub fn get_decl_parser_options(&self) -> DeclParserOptions {
DeclParserOptions::from_parser_options(&self.opts)
}
}
fn parse_json<'de, T: serde::de::Deserialize<'de>>(value: &'de str) -> Result<T> {
Ok(serde_json::from_slice(value.as_bytes())?)
}
fn parse_sset(value: &str) -> BTreeSet<String> {
value
.split_terminator(',')
.map(|s| s.trim().to_owned())
.collect()
}
fn parse_svec(value: &str) -> Vec<String> {
value
.split_terminator(',')
.map(|s| s.trim().to_owned())
.collect()
}
fn parse_iset(value: &str) -> Result<BTreeSet<isize>> {
value
.split_terminator(',')
.map(|s| Ok(s.trim().parse()?))
.collect::<Result<_>>()
}
/// Return the local config file path, allowing HH_LOCALCONF_PATH to override it.
pub fn system_config_path() -> PathBuf {
const HH_CONF: &str = "hh.conf";
match std::env::var_os("HH_LOCALCONF_PATH") {
Some(path) => Path::new(&path).join(HH_CONF),
None => Path::new("/etc").join(HH_CONF), // TODO see options/buildOptions.ml for mac cfg
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_log_levels() {
let hhconf = HhConfig::from_slice(br#"log_levels={ "pessimise": 1 }"#).unwrap();
assert_eq!(
hhconf.opts.log_levels.get("pessimise").copied(),
Some(1isize)
);
}
} |
Rust | hhvm/hphp/hack/src/utils/hh_config/local_config.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use anyhow::Result;
use config_file::ConfigFile;
use oxidized::global_options::SavedState;
use oxidized::saved_state_rollouts::SavedStateRollouts;
/// A port of just enough of Server_local_config.t to support settings that are
/// copied into GlobalOptions.
#[derive(Debug, Default, Clone)]
pub struct LocalConfig {
/// Allows unstable features to be enabled within a file via the
/// '__EnableUnstableFeatures' attribute
pub allow_unstable_features: bool,
/// Use the Rust implementation of naming elaboration and NAST checks.
pub rust_elab: bool,
/// Used when fetching JustKnobs, but not in GlobalOptions
pub rollout_group: Option<String>,
pub saved_state: SavedState,
}
impl LocalConfig {
/// Construct from an hh.conf file with CLI overrides already applied.
pub fn from_config(
current_version: Option<&str>,
current_rolled_out_flag_idx: isize,
deactivate_saved_state_rollout: bool,
config: &ConfigFile,
) -> Result<Self> {
let mut lc = Self::default();
lc.saved_state.rollouts = SavedStateRollouts::make(
current_rolled_out_flag_idx,
deactivate_saved_state_rollout,
config.get_str("ss_force"),
|flag_name| config.get_bool(flag_name).unwrap_or(Ok(false)),
)?;
if let Some(b) = config.get_bool("project_metadata_w_flags") {
lc.saved_state.project_metadata_w_flags = b?;
}
if let Some(b) = config.bool_if_min_version("allow_unstable_features", current_version) {
lc.allow_unstable_features = b?;
}
if let Some(b) = config.bool_if_min_version("rust_elab", current_version) {
lc.rust_elab = b?;
}
if let Some(rollout_group) = config.get_str("rollout_group") {
lc.rollout_group = Some(rollout_group.into());
}
Ok(lc)
}
} |
TOML | hhvm/hphp/hack/src/utils/hh_config/cargo/Cargo.toml | # @generated by autocargo
[package]
name = "hh_config"
version = "0.0.0"
edition = "2021"
[lib]
path = "../hh_config.rs"
[dependencies]
anyhow = "1.0.71"
config_file = { version = "0.0.0", path = "../../config_file/rust" }
oxidized = { version = "0.0.0", path = "../../../oxidized" }
serde = { version = "1.0.176", features = ["derive", "rc"] }
serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] }
sha1 = "0.10.5" |
TOML | hhvm/hphp/hack/src/utils/hh_hash/Cargo.toml | # @generated by autocargo
[package]
name = "hh_hash"
version = "0.0.0"
edition = "2021"
[lib]
path = "hh_hash.rs"
[dependencies]
fnv = "1.0"
no_pos_hash = { version = "0.0.0", path = "../no_pos_hash" } |
Rust | hhvm/hphp/hack/src/utils/hh_hash/hh_hash.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
pub use std::hash::Hash;
pub use std::hash::Hasher;
pub use no_pos_hash::position_insensitive_hash;
pub fn hash<T: Hash>(value: &T) -> u64 {
let mut hasher = fnv::FnvHasher::default();
value.hash(&mut hasher);
hasher.finish()
} |
OCaml | hhvm/hphp/hack/src/utils/hh_json/hh_json.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(**
* Hh_json parsing and pretty printing library.
*)
(*
<value> ::=
| <object>
| <array>
| <string>
| <number>
| 'true'
| 'false'
| 'null'
<object> ::= '{' <members>* '}'
<members> ::= <pair> { ',' <pair> }* [',']
<pair> ::= <string> ':' <value>
<array> ::= '{' <elements>* '}'
<elements> ::= <value> { ',' <value> }* [',']
<number> ::=
Caveats:
(+) No handling of Unicode yet
(+) Numbers are just stored as strings
*)
module List = Base.List
type json =
| JSON_Object of (string * json) list
| JSON_Array of json list
| JSON_String of string
| JSON_Number of string
| JSON_Bool of bool
| JSON_Null
let is_digit = function
| '0' .. '9' -> true
| _ -> false
let is_whitespace = function
| ' '
| '\n'
| '\r'
| '\t' ->
true
| _ -> false
type env = {
allow_trailing_comma: bool;
data: string;
mutable pos: int;
}
exception Syntax_error of string
(* Ignore whitespace in peek/eat/next/has_more to make code that uses them
cleaner
*)
let has_more env = String.length env.data > env.pos
let syntax_error env msg =
let err_msg =
if has_more env then
Printf.sprintf "%s at char[%d]=%c" msg env.pos env.data.[env.pos]
else
Printf.sprintf "%s after the last character" msg
in
raise (Syntax_error err_msg)
let peek env =
try env.data.[env.pos] with
| Invalid_argument _ -> syntax_error env "peek"
(* skip all blank and new line characters *)
let skip_blank_chars env =
while has_more env && is_whitespace (peek env) do
env.pos <- env.pos + 1
done
let create_env strict s =
let env = { allow_trailing_comma = not strict; data = s; pos = 0 } in
skip_blank_chars env;
env
let eat_ws env c =
let c' = peek env in
if c' = c then
env.pos <- env.pos + 1
else
let err_msg = Printf.sprintf "eat_ws: expected %c, saw %c" c c' in
syntax_error env err_msg
let eat env c =
skip_blank_chars env;
let c' = peek env in
if c' = c then (
env.pos <- env.pos + 1;
skip_blank_chars env
) else
let err_msg = Printf.sprintf "eat: expected %c, saw %c" c c' in
syntax_error env err_msg
let match_substring_at s offset ss =
let ss_len = String.length ss in
if String.length s - offset >= ss_len then
try
for i = 0 to ss_len - 1 do
if s.[i + offset] <> ss.[i] then raise Exit
done;
true
with
| Exit -> false
else
false
let js_literal env s js =
skip_blank_chars env;
if match_substring_at env.data env.pos s then (
env.pos <- env.pos + String.length s;
js
) else
let err_msg = Printf.sprintf "expected '%s'" s in
syntax_error env err_msg
let js_true env = js_literal env "true" (JSON_Bool true)
let js_false env = js_literal env "false" (JSON_Bool false)
let js_null env = js_literal env "null" JSON_Null
let buf_eat buf env c =
eat env c;
Buffer.add_char buf c
let buf_eat_all buf env c =
eat_ws env c;
Buffer.add_char buf c
let char_code env =
let rec char_code_ (acc : int) env len =
if len = 0 then
acc
else (
env.pos <- env.pos + 1;
let c = Char.lowercase_ascii (peek env) in
let i =
if '0' <= c && c <= '9' then
Char.code c - Char.code '0'
else if 'a' <= c && c <= 'f' then
10 + Char.code c - Char.code 'a'
else
syntax_error env "expected hexadecimal digit"
in
char_code_ ((16 * acc) + i) env (len - 1)
)
in
char_code_ 0 env 4
let js_string env =
let buf = Buffer.create 128 in
let rec loop env =
let c = peek env in
match c with
| '"' -> JSON_String (Buffer.contents buf)
| '\\' ->
env.pos <- env.pos + 1;
let c' = peek env in
let c' =
match c' with
| 'n' -> '\n'
| 'r' -> '\r'
| 't' -> '\t'
| 'u' ->
let code = char_code env in
Char.chr code
| x -> x
in
env.pos <- env.pos + 1;
Buffer.add_char buf c';
loop env
| _ ->
buf_eat_all buf env c;
loop env
in
(match peek env with
| '"' -> env.pos <- env.pos + 1
| _ -> syntax_error env "expected '\"' character");
if peek env = '"' then (
eat env '"';
JSON_String ""
) else
let res = loop env in
eat env '"';
res
let rec buf_eat_digits buf env =
if has_more env then
let c = peek env in
if is_digit c then (
buf_eat buf env c;
buf_eat_digits buf env
) else
()
(* encountered a non-digit char, stop *)
else
(* end of string, stop *)
()
let buf_eat_exp buf env =
let c = peek env in
if c = 'e' || c = 'E' then (
buf_eat buf env c;
let sign = peek env in
if sign = '+' || sign = '-' then buf_eat buf env sign;
buf_eat_digits buf env
)
let js_number env =
let buf = Buffer.create 32 in
let c = peek env in
if c = '-' then buf_eat buf env '-';
buf_eat_digits buf env;
(* ['-'] digits *)
let c = peek env in
if c = '.' then (
(* ['.' digits ] *)
buf_eat buf env '.';
buf_eat_digits buf env
);
buf_eat_exp buf env;
(* [exp digits] *)
JSON_Number (Buffer.contents buf)
(* The recursive rules *)
let rec js_value env =
let js_value_syntax_error () =
let err_msg = "expected '{[\"0123456789' or {t,f,n}" in
syntax_error env err_msg
in
if not (has_more env) then
js_value_syntax_error ()
else
match peek env with
| '{' -> js_object env
| '[' -> js_array env
| '"' -> js_string env
| c when is_digit c || c = '-' -> js_number env
| 't' -> js_true env
| 'f' -> js_false env
| 'n' -> js_null env
| _ -> js_value_syntax_error ()
and js_object env =
let rec loop members =
let p = js_pair env in
if peek env <> ',' then
JSON_Object (List.rev (p :: members))
else (
eat env ',';
if peek env = '}' then
if env.allow_trailing_comma then
JSON_Object (List.rev (p :: members))
else
syntax_error env "Hh_json.object: trailing comma"
else
loop (p :: members)
)
in
eat env '{';
if peek env = '}' then (
eat env '}';
JSON_Object []
) else
let res = loop [] in
eat env '}';
res
and js_array env =
let rec elements accum =
let v = js_value env in
if peek env <> ',' then
JSON_Array (List.rev (v :: accum))
else (
eat env ',';
if peek env = ']' then
if env.allow_trailing_comma then
JSON_Array (List.rev (v :: accum))
else
syntax_error env "Hh_json.array: trailing comma"
else
elements (v :: accum)
)
in
eat env '[';
if peek env = ']' then (
eat env ']';
JSON_Array []
) else
let res = elements [] in
eat env ']';
res
and js_pair env =
skip_blank_chars env;
let k = js_string env in
skip_blank_chars env;
eat env ':';
let v = js_value env in
match k with
| JSON_String s -> (s, v)
| _ -> syntax_error env "Hh_json.js_pair: expected a JSON String"
let string_of_file filename =
let ic = open_in filename in
let buf = Buffer.create 5096 in
let rec loop () =
match
try Some (input_line ic) with
| _ -> None
with
| None -> Buffer.contents buf
| Some l ->
Buffer.add_string buf l;
Buffer.add_char buf '\n';
loop ()
in
loop ()
(* Writing JSON *)
let sort_object obj_entries =
List.sort ~compare:(fun (k1, _) (k2, _) -> String.compare k1 k2) obj_entries
module type Output_stream_intf = sig
type t
val add_char : t -> char -> unit
val add_string : t -> string -> unit
val add_substring : t -> string -> int -> int -> unit
end
module Buffer_stream : Output_stream_intf with type t = Buffer.t = struct
type t = Buffer.t
let add_char b c = Buffer.add_char b c
let add_string b s = Buffer.add_string b s
let add_substring b s ofs len = Buffer.add_substring b s ofs len
end
module Channel_stream : Output_stream_intf with type t = Stdlib.out_channel =
struct
type t = Stdlib.out_channel
let add_char b c = Stdlib.output_char b c
let add_string b s = Stdlib.output_string b s
let add_substring b s ofs len = Stdlib.output_substring b s ofs len
end
module Make_streamer (Out : Output_stream_intf) = struct
(* Designed as a substitute for String.concat that passes a buffer
* into which intermediate strings are added, and also includes left
* and right bracket (lb and rb) in addition to sep. They are strings,
* despite common case of (), [],{}, or even <>, to handle missing brackets,
* brackets with spacing and multichar brackets like OCaml's arrays
* ([| and |]). The conc_elt function parameter performs the operation of
* transforming the list element to a string and adding it to the buffer, the
* simplest example would be fun x -> Buffer.add_string (to_string x)
*)
let concat ~lb ~rb ~sep ~concat_elt buf l =
Out.add_string buf lb;
(match l with
| [] -> ()
| elt :: elts ->
concat_elt buf elt;
List.iter elts ~f:(fun e ->
Out.add_string buf sep;
concat_elt buf e));
Out.add_string buf rb
let escape b s =
Out.add_char b '"';
let pos = ref 0 in
let add_escaped i chr =
Out.add_substring b s !pos (i - !pos);
Out.add_string b chr;
pos := i + 1
in
for i = 0 to String.length s - 1 do
match s.[i] with
| '\\' -> add_escaped i "\\\\"
| '"' -> add_escaped i "\\\""
| '\n' -> add_escaped i "\\n"
| '\r' -> add_escaped i "\\r"
| '\t' -> add_escaped i "\\t"
| '\x00' .. '\x1f' as c ->
let code = Char.code c in
add_escaped i (Printf.sprintf "\\u%04x" code)
| _ -> ()
done;
Out.add_substring b s !pos (String.length s - !pos);
Out.add_char b '"'
let rec add_json ~sort_keys (buf : Out.t) (json : json) : unit =
match json with
| JSON_Object l ->
(* Make the pretty output deterministic by sorting the keys *)
let l =
if sort_keys then
sort_object l
else
l
in
concat ~lb:"{" ~rb:"}" ~sep:"," ~concat_elt:(add_assoc ~sort_keys) buf l
| JSON_Array l ->
concat ~lb:"[" ~rb:"]" ~sep:"," ~concat_elt:(add_json ~sort_keys) buf l
| JSON_String s -> escape buf s
| JSON_Number n -> Out.add_string buf n
| JSON_Bool b ->
Out.add_string
buf
(if b then
"true"
else
"false")
| JSON_Null -> Out.add_string buf "null"
and add_assoc ~sort_keys (buf : Out.t) (k, v) =
escape buf k;
Out.add_char buf ':';
add_json ~sort_keys buf v
end
module Out_buffer = Make_streamer (Buffer_stream)
module Out_channel = Make_streamer (Channel_stream)
let rec json_to_string ?(sort_keys = false) ?(pretty = false) (json : json) :
string =
if pretty then
json_to_multiline ~sort_keys json
else
let buf = Buffer.create 1024 in
(* need a better estimate! *)
Out_buffer.add_json ~sort_keys buf json;
Buffer.contents buf
and json_to_multiline ?(sort_keys = false) json =
let rec loop indent json =
let single = json_to_string ~sort_keys json in
if String.length single < 80 then
single
else
match json with
| JSON_Array l ->
let nl = List.map l ~f:(loop (indent ^ " ")) in
"[\n"
^ indent
^ " "
^ String.concat (",\n" ^ indent ^ " ") nl
^ "\n"
^ indent
^ "]"
| JSON_Object l ->
(* Make the pretty output deterministic by sorting the keys *)
let l =
if sort_keys then
sort_object l
else
l
in
let nl =
List.map l ~f:(fun (k, v) ->
indent
^ " "
^ json_to_string ~sort_keys (JSON_String k)
^ ":"
^ loop (indent ^ " ") v)
in
"{\n" ^ String.concat ",\n" nl ^ "\n" ^ indent ^ "}"
| _ -> single
in
loop "" json
let pp_json fmt json =
Format.fprintf fmt "%s" (Printf.sprintf "%s" (json_to_multiline json))
let json_to_output oc (json : json) : unit =
Out_channel.add_json ~sort_keys:false oc json
let rec json_to_multiline_output oc (json : json) : unit =
let json_assoc_to_output oc (k, v) : unit =
Out_channel.escape oc k;
output_string oc ":";
json_to_multiline_output oc v
in
match json with
| JSON_Object l ->
Out_channel.concat
~lb:"{"
~rb:"}"
~sep:",\n"
~concat_elt:json_assoc_to_output
oc
l
| JSON_Array l ->
Out_channel.concat
~lb:"["
~rb:"]"
~sep:",\n"
~concat_elt:json_to_multiline_output
oc
l
| JSON_String s -> Out_channel.escape oc s
| JSON_Number n -> output_string oc n
| JSON_Bool b ->
output_string
oc
(if b then
"true"
else
"false")
| JSON_Null -> output_string oc "null"
let output_json_endline ~pretty (oc : out_channel) (json : json) =
if pretty then
output_string oc (json_to_multiline json)
else
json_to_output oc json;
output_char oc '\n';
flush oc
let print_json_endline ?(pretty = false) (json : json) =
output_json_endline ~pretty stdout json
let prerr_json_endline ?(pretty = false) (json : json) =
output_json_endline ~pretty stderr json
let json_of_string ?(strict = true) s =
let lb = create_env strict s in
js_value lb
let json_of_file ?strict filename =
json_of_string ?strict (string_of_file filename)
let int_ n = JSON_Number (string_of_int n)
let float_ n =
if Float.is_infinite n || Float.is_nan n then
(* nan/infinite isn't a valid value in json and will result in something unparseable;
null is the best we can do. *)
JSON_Null
else
let s = string_of_float n in
(* ocaml strings can end in '.', which isn't allowed in json *)
let len = String.length s in
let s =
if s.[len - 1] = '.' then
String.sub s 0 (len - 1)
else
s
in
JSON_Number s
let string_ s = JSON_String s
let bool_ flag = JSON_Bool flag
let opt_ (to_json : 'a -> json) (x : 'a option) : json =
match x with
| None -> JSON_Null
| Some x -> to_json x
let array_ (f : 'a -> json) (xs : 'a list) : json = JSON_Array (List.map ~f xs)
let string_map (to_json : 'a -> json) (map : 'a SMap.t) : json =
JSON_Object (SMap.bindings map |> List.map ~f:(fun (k, v) -> (k, to_json v)))
let get_object_exn = function
| JSON_Object o -> o
| _ -> assert false
let get_array_exn = function
| JSON_Array a -> a
| _ -> assert false
let get_string_exn = function
| JSON_String s -> s
| _ -> assert false
let get_number_exn = function
| JSON_Number s -> s
| _ -> assert false
let get_number_int_exn = function
| JSON_Number s -> int_of_string s
| _ -> assert false
let get_bool_exn = function
| JSON_Bool b -> b
| _ -> assert false
let opt_string_to_json = function
| Some x -> JSON_String x
| None -> JSON_Null
let opt_int_to_json = function
| Some x -> JSON_Number (string_of_int x)
| None -> JSON_Null
type json_type =
| Object_t
| Array_t
| String_t
| Number_t
| Integer_t
| Bool_t
let json_type_to_string = function
| Object_t -> "Object"
| Array_t -> "Array"
| String_t -> "String"
| Number_t -> "Number"
| Integer_t -> "Integer"
| Bool_t -> "Bool"
module type Access = sig
type keytrace = string list
type access_failure =
| Not_an_object of keytrace
| Missing_key_error of string * keytrace
| Wrong_type_error of keytrace * json_type
type 'a m = ('a * keytrace, access_failure) result
val keytrace_to_string : keytrace -> string
val access_failure_to_string : access_failure -> string
val return : 'a -> 'a m
val ( >>= ) : 'a m -> ('a * keytrace -> 'b m) -> 'b m
val counit_with : (access_failure -> 'a) -> 'a m -> 'a
val to_option : 'a m -> 'a option
val get_obj : string -> json * keytrace -> json m
val get_bool : string -> json * keytrace -> bool m
val get_string : string -> json * keytrace -> string m
val get_number : string -> json * keytrace -> string m
val get_number_int : string -> json * keytrace -> int m
val get_array : string -> json * keytrace -> json list m
val get_val : string -> json * keytrace -> json m
end
module Access = struct
type keytrace = string list
type access_failure =
| Not_an_object of keytrace
| Missing_key_error of string * keytrace
| Wrong_type_error of keytrace * json_type
type 'a m = ('a * keytrace, access_failure) result
let keytrace_to_string x =
if x = [] then
""
else
let res = List.rev x |> String.concat "." in
" (at field `" ^ res ^ "`)"
let access_failure_to_string = function
| Not_an_object x ->
Printf.sprintf "Value is not an object %s" (keytrace_to_string x)
| Missing_key_error (x, y) ->
Printf.sprintf "Missing key: %s%s" x (keytrace_to_string y)
| Wrong_type_error (x, y) ->
Printf.sprintf
"Value expected to be %s%s"
(json_type_to_string y)
(keytrace_to_string x)
let return v = Ok (v, [])
let ( >>= ) m f =
match m with
| Error _ as x -> x
| Ok x -> f x
let counit_with f m =
match m with
| Ok (v, _) -> v
| Error e -> f e
let to_option = function
| Ok (v, _) -> Some v
| Error _ -> None
let catch_type_error exp f (v, keytrace) =
try Ok (f v, keytrace) with
| Failure msg when String.equal "int_of_string" msg ->
Error (Wrong_type_error (keytrace, exp))
| Assert_failure _ -> Error (Wrong_type_error (keytrace, exp))
let get_val k (v, keytrace) =
try
let obj = get_object_exn v in
let candidate =
List.fold_left obj ~init:None ~f:(fun opt (key, json) ->
if opt <> None then
opt
else if key = k then
Some json
else
None)
in
match candidate with
| None -> Error (Missing_key_error (k, keytrace))
| Some obj -> Ok (obj, k :: keytrace)
with
| Assert_failure _ -> Error (Not_an_object keytrace)
let make_object_json v = JSON_Object (get_object_exn v)
let get_obj k (v, keytrace) =
get_val k (v, keytrace) >>= catch_type_error Object_t make_object_json
let get_bool k (v, keytrace) =
get_val k (v, keytrace) >>= catch_type_error Bool_t get_bool_exn
let get_string k (v, keytrace) =
get_val k (v, keytrace) >>= catch_type_error String_t get_string_exn
let get_number k (v, keytrace) =
get_val k (v, keytrace) >>= catch_type_error Number_t get_number_exn
let get_number_int k (v, keytrace) =
get_val k (v, keytrace) >>= catch_type_error Integer_t get_number_int_exn
let get_array k (v, keytrace) =
get_val k (v, keytrace) >>= catch_type_error Array_t get_array_exn
end
let ( >=@ ) : int -> int option -> bool =
fun lhs rhs ->
match rhs with
| None -> false
| Some rhs -> lhs >= rhs
let ( <=@ ) : int -> int option -> bool =
fun lhs rhs ->
match rhs with
| None -> false
| Some rhs -> lhs <= rhs
let json_truncate
?(max_string_length : int option)
?(max_object_child_count : int option)
?(max_array_elt_count : int option)
?(max_depth : int option)
?(max_total_count : int option)
?(has_changed : bool ref option)
(json : json) : json =
let total_count = ref 0 in
let mark_changed () =
match has_changed with
| None -> ()
| Some r -> r := true
in
let truncate_children children max_child_count ~f =
let rec truncate_children child_count children =
match children with
| [] -> []
| _ when !total_count >=@ max_total_count ->
mark_changed ();
[]
| _ when child_count >=@ max_child_count ->
mark_changed ();
[]
| c :: rest ->
incr total_count;
let c' = f c in
(* because of mutable variable, it's important to do this first *)
c' :: truncate_children (child_count + 1) rest
in
truncate_children 0 children
in
let rec truncate ~(depth : int) (json : json) : json =
match json with
| JSON_Object []
| JSON_Array []
| JSON_Number _
| JSON_Bool _
| JSON_Null ->
json
| JSON_Object props ->
let f (k, v) = (k, truncate ~depth:(depth + 1) v) in
if depth >=@ max_depth then (
mark_changed ();
JSON_Object []
) else
JSON_Object (truncate_children props max_object_child_count ~f)
| JSON_Array values ->
let f v = truncate ~depth:(depth + 1) v in
if depth >=@ max_depth then (
mark_changed ();
JSON_Array []
) else
JSON_Array (truncate_children values max_array_elt_count ~f)
| JSON_String s -> begin
match max_string_length with
| None -> json
| Some max_string_length ->
if String.length s <= max_string_length then
JSON_String s
else (
mark_changed ();
JSON_String (String.sub s 0 max_string_length ^ "...")
)
end
in
truncate ~depth:0 json
let json_truncate_string
?(max_string_length : int option)
?(max_child_count : int option)
?(max_depth : int option)
?(max_total_count : int option)
?(allowed_total_length : int option)
?(if_reformat_multiline = true)
(s : string) : string =
if String.length s <=@ allowed_total_length then
s
(* fast zero-allocation path for the commonest case *)
else
let has_changed = ref false in
let json = json_of_string s in
let truncated_json =
json_truncate
?max_string_length
?max_object_child_count:max_child_count
?max_array_elt_count:max_child_count
?max_depth
?max_total_count
~has_changed
json
in
if not !has_changed then
s
(* moderately fast fewer-string-allocating for another common case *)
else if if_reformat_multiline then
json_to_multiline truncated_json
else
json_to_string truncated_json
let get_field accessor on_failure json =
Access.(
let on_failure af = on_failure (access_failure_to_string af) in
counit_with on_failure (return json >>= accessor))
let get_field_opt accessor json = Access.(to_option (return json >>= accessor))
module JsonKey = struct
type t = json
(* Object comparison is SENSITIVE to the order of keys. *)
(* Numbers are compared by string value, so "1" and "1.0" and "1.00" are *)
(* all different; this way we don't worry about different floating point *)
(* semantics between ocaml and json. *)
let rec compare (x : t) (y : t) =
match (x, y) with
| (JSON_Null, JSON_Null) -> 0
| (JSON_Null, _) -> -1
| (_, JSON_Null) -> 1
| (JSON_Bool false, JSON_Bool false) -> 0
| (JSON_Bool false, JSON_Bool true) -> -1
| (JSON_Bool true, JSON_Bool false) -> 1
| (JSON_Bool true, JSON_Bool true) -> 0
| (JSON_Bool _, _) -> -1
| (_, JSON_Bool _) -> 1
| (JSON_Number x, JSON_Number y) -> String.compare x y
| (JSON_Number _, _) -> -1
| (_, JSON_Number _) -> 1
| (JSON_String x, JSON_String y) -> String.compare x y
| (JSON_String _, _) -> -1
| (_, JSON_String _) -> 1
| (JSON_Array (x :: xs), JSON_Array (y :: ys)) ->
let r = compare x y in
if r <> 0 then
r
else
compare (JSON_Array xs) (JSON_Array ys)
| (JSON_Array [], JSON_Array []) -> 0
| (JSON_Array [], JSON_Array _) -> -1
| (JSON_Array _, JSON_Array []) -> 1
| (JSON_Array _, _) -> -1
| (_, JSON_Array _) -> 1
| (JSON_Object ((kx, vx) :: xs), JSON_Object ((ky, vy) :: ys)) ->
let r = String.compare kx ky in
if r <> 0 then
r
else
let r = compare vx vy in
if r <> 0 then
r
else
compare (JSON_Object xs) (JSON_Object ys)
| (JSON_Object [], JSON_Object []) -> 0
| (JSON_Object [], JSON_Object _) -> -1
| (JSON_Object _, JSON_Object []) -> 1
end
module JSet = Set.Make (JsonKey)
module JMap = WrappedMap.Make (JsonKey) |
OCaml Interface | hhvm/hphp/hack/src/utils/hh_json/hh_json.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(*
* Hh_json parsing and pretty printing library.
*)
type json =
| JSON_Object of (string * json) list
| JSON_Array of json list
| JSON_String of string
| JSON_Number of string
| JSON_Bool of bool
| JSON_Null
exception Syntax_error of string
(* A function conforming to `show`'s pretty printing signature;
calls `json_to_multiline` internally
*)
val pp_json : Format.formatter -> json -> unit
(** Calling this with [~pretty:true] is the same as calling [json_to_multiline] *)
val json_to_string : ?sort_keys:bool -> ?pretty:bool -> json -> string
val json_to_multiline : ?sort_keys:bool -> json -> string
val json_to_output : out_channel -> json -> unit
val json_to_multiline_output : out_channel -> json -> unit
val json_of_string : ?strict:bool -> string -> json
val json_of_file : ?strict:bool -> string -> json
(** Truncate fields of a json object.
String fields will be truncated according to [max_string_length].
[max_object_child_count] determines the maximum number of children of objects.
[max_array_elt_count] determines the maximum number of array elements.
Fields at depth greater than [max_depth] will be removed.
[max_total_count] is the maximum total number of children of arrays and objects
aggregated over all arrays and objects *)
val json_truncate :
?max_string_length:int ->
?max_object_child_count:int ->
?max_array_elt_count:int ->
?max_depth:int ->
?max_total_count:int ->
?has_changed:bool ref ->
json ->
json
val json_truncate_string :
?max_string_length:int ->
?max_child_count:int ->
?max_depth:int ->
?max_total_count:int ->
?allowed_total_length:int ->
?if_reformat_multiline:bool ->
string ->
string
val print_json_endline : ?pretty:bool -> json -> unit
val prerr_json_endline : ?pretty:bool -> json -> unit
val get_object_exn : json -> (string * json) list
val get_array_exn : json -> json list
val get_string_exn : json -> string
val get_number_exn : json -> string
val get_number_int_exn : json -> int
val get_bool_exn : json -> bool
val opt_string_to_json : string option -> json
val opt_int_to_json : int option -> json
val int_ : int -> json
val float_ : float -> json
val string_ : string -> json
val bool_ : bool -> json
val opt_ : ('a -> json) -> 'a option -> json
val array_ : ('a -> json) -> 'a list -> json
val string_map : ('a -> json) -> 'a SMap.t -> json
(* Types and functions for monadic API for traversing a JSON object. *)
type json_type =
| Object_t
| Array_t
| String_t
| Number_t
| Integer_t
| Bool_t
(*
* This module gives monadic recursive access to values within objects by key.
* It uses Stdlib.result to manage control flow in the monad when an error
* is encountered. It also tracks the backtrace of the keys accessed to give
* detailed error messages.
*
* Usage:
* To access the boolean value "qux" from the following json:
* { "foo": { "bar" : { "baz" : { "qux" : true } } } }
* Is as follows:
* (return json) >>=
* get_obj "foo" >>=
* get_obj "bar" >>=
* get_obj "baz" >>=
* get_bool "qux"
*
* If an error is encountered along the call chain, a Error is returned
* with the appropriate error and the history of key accesses that arrived
* there (so you can trace how far it went successfully and exactly where the
* error was encountered).
*
* Same goes for accessing multiple fields within one object.
* Suppose we have a record type:
* type fbz_record = {
* foo : bool;
* bar : string;
* baz : int;
* }
*
* And we have JSON as a string:
* let data =
* "{\n"^
* " \"foo\" : true,\n"^
* " \"bar\" : \"hello\",\n"^
* " \"baz\" : 5\n"^
* "}"
* in
*
* We parse the JSON, monadically access the fields we want, and fill in the
* record by doing:
*
* let json = Hh_json_json_of_string data in
* let open Hh_json.Access in
* let accessor = return json in
* let result =
* accessor >>= get_bool "foo" >>= fun (foo, _) ->
* accessor >>= get_string "bar" >>= fun (bar, _) ->
* accessor >>= get_number_int "baz" >>= fun (baz, _) ->
* return {
* foo;
* bar;
* baz;
* }
* in
*
* The result will be the record type inside the Result monad.
*
* match result with
* | Ok (v, _) ->
* Printf.eprintf "Got baz: %d" v.baz
* | Error access_failure ->
* Printf.eprintf "JSON failure: %s"
* (access_failure_to_string access_failure)
*
* See unit tests for more examples.
*)
module type Access = sig
type keytrace = string list
type access_failure =
(* You can't access keys on a non-object JSON thing. *)
| Not_an_object of keytrace
(* The key is missing. *)
| Missing_key_error of string * keytrace
(* The key has the wrong type. *)
| Wrong_type_error of keytrace * json_type
(* Our type for the result monad. It isn't just the json because it tracks
* a history of the keys traversed to arrive at the current point. This helps
* produce more informative error states. *)
type 'a m = ('a * keytrace, access_failure) result
val keytrace_to_string : keytrace -> string
val access_failure_to_string : access_failure -> string
val return : 'a -> 'a m
val ( >>= ) : 'a m -> ('a * keytrace -> 'b m) -> 'b m
(* This is a comonad, but we need a little help to deal with failure *)
val counit_with : (access_failure -> 'a) -> 'a m -> 'a
(* From the Error monad to the Option monad. Error states go to None. *)
val to_option : 'a m -> 'a option
(*
* The following getters operate on a JSON_Object by accessing keys on it,
* and asserting the returned value has the given expected type (types
* are asserted by which getter you choose to use).
*
* Returns Not_an_object if the given JSON object is not a JSON_Object type,
* since you can only access keys on those.
*
* Returns Wrong_type_error if the obtained value is not an object type.
*
* Returns Missing_key_error if the given key is not found in this object.
*
*)
val get_obj : string -> json * keytrace -> json m
val get_bool : string -> json * keytrace -> bool m
val get_string : string -> json * keytrace -> string m
val get_number : string -> json * keytrace -> string m
val get_number_int : string -> json * keytrace -> int m
val get_array : string -> json * keytrace -> json list m
val get_val : string -> json * keytrace -> json m (* any expected type *)
end
module Access : Access
val get_field :
(json * Access.keytrace -> 'a Access.m) -> (string -> 'a) -> json -> 'a
val get_field_opt : (json * Access.keytrace -> 'a Access.m) -> json -> 'a option
module JsonKey : Set.OrderedType with type t = json
module JSet : Set.S with type elt = json
module JMap : WrappedMap.S with type key = json |
OCaml | hhvm/hphp/hack/src/utils/hh_json/hh_json_helpers.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
* *)
module Option = Base.Option
module Int = Base.Int
module List = Base.List
open Hh_json
(************************************************************************)
(* Helpers for parsing & printing *)
(************************************************************************)
module Jget = struct
exception Parse of string
(* Helpers for the various "option" monads in use for Json, to succinctly
capture the spirit of JSON (tolerance for missing values) and the spirit
of LSP (loads of nested optional members with obvious defaults)
and the usefuless of error-checking (in case a required field is absent)...
- We use "json option" throughout. Things which you might expect to return
a json are instead lifted to return a json option, so you can use all the
accessors on them more easily. When you attempt to get string "o.m", either
it's present both because "o" is Some, and because "m" is a string member
Or it's absent because any of those three things is false...
- The "_opt" accessors uniformally return Some (present) or None (absent),
regardless of which of the two things caused absence.
- The "_d" accessors uniformally return a value (present) or default.
- The "_exn" accessors uniformally return a value (present) or throw.
The effect of this is you lose precise information about what exactly
caused an absence (which is usually only of marginal benefit), and in
return you gain a consistent way to handle both optionals and requireds.
Note one exception to the above: if you wish to get an int/float, and it's
present as a JSON_Number but not parseable as an int/float, then all
accessors will throw.
*)
let get_opt hhjson_getter json key =
match json with
| None -> None
| Some json ->
(match hhjson_getter key (json, []) with
| Ok (r, _keytrace) -> Some r
| _ -> None)
let get_exn opt_getter json key =
match opt_getter json key with
| None -> raise (Parse key)
| Some v -> v
let int_string_opt (s : string option) : int option =
match s with
| None -> None
| Some s ->
(try Some (Int.of_string s) with
| Failure _ -> raise (Parse ("not an int: " ^ s)))
let float_string_opt (s : string option) : float option =
match s with
| None -> None
| Some s ->
(try Some (Float.of_string s) with
| Failure _ -> raise (Parse ("not a float: " ^ s)))
let list_opt (l : 'a list option) : 'a option list option =
match l with
| None -> None
| Some x -> Some (List.map ~f:(fun a -> Some a) x)
(* Accessors which return None on absence *)
let string_opt = get_opt Access.get_string
let bool_opt = get_opt Access.get_bool
let obj_opt = get_opt Access.get_obj
let val_opt = get_opt Access.get_val
let int_opt json key = get_opt Access.get_number json key |> int_string_opt
let float_opt json key =
get_opt Access.get_number json key |> float_string_opt
let array_opt json key = get_opt Access.get_array json key |> list_opt
(* array_opt lifts all the array's members into the "json option" monad *)
(* Accessors which return a supplied default on absence *)
let string_d json key ~default = Option.value (string_opt json key) ~default
let bool_d json key ~default = Option.value (bool_opt json key) ~default
let int_d json key ~default = Option.value (int_opt json key) ~default
let float_d json key ~default = Option.value (float_opt json key) ~default
let array_d json key ~default = Option.value (array_opt json key) ~default
(* Accessors which throw "Error.Parse key" on absence *)
let bool_exn = get_exn bool_opt
let string_exn = get_exn string_opt
let val_exn = get_exn val_opt
let int_exn = get_exn int_opt
let float_exn = get_exn float_opt
let array_exn = get_exn array_opt
(** obj_exn lifts the result into the "json option" monad *)
let obj_exn json key = Some (get_exn obj_opt json key)
let string_array_exn json key =
array_exn json key
|> List.map ~f:(fun opt -> Option.value_exn opt)
|> List.map ~f:Hh_json.get_string_exn
end
module Jprint = struct
(* object_opt is like Hh_json.JSON_Object constructor except it takes
key * (value option): if a value is None, then it omits this member. *)
let object_opt (keyvalues : (string * json option) list) : json =
let rec filter keyvalues =
match keyvalues with
| [] -> []
| (_key, None) :: rest -> filter rest
| (key, Some value) :: rest -> (key, value) :: filter rest
in
JSON_Object (filter keyvalues)
(* Convenience function to convert string list to JSON_Array *)
let string_array (l : string list) : json = JSON_Array (List.map ~f:string_ l)
end
(* Some ad-hoc JSON processing helpers. *)
module AdhocJsonHelpers = struct
exception Not_found
let try_get_val key json =
let obj = Hh_json.get_object_exn json in
Base.List.Assoc.find ~equal:String.equal obj key
let get_string_val key ?default json =
let v = try_get_val key json in
match (v, default) with
| (Some v, _) -> Hh_json.get_string_exn v
| (None, Some def) -> def
| (None, None) -> raise Not_found
let get_number_val key ?default json =
let v = try_get_val key json in
match (v, default) with
| (Some v, _) -> Hh_json.get_number_exn v
| (None, Some def) -> def
| (None, None) -> raise Not_found
let get_bool_val key ?default json =
let v = try_get_val key json in
match (v, default) with
| (Some v, _) -> Hh_json.get_bool_exn v
| (None, Some def) -> def
| (None, None) -> raise Not_found
let get_array_val key ?default json =
let v = try_get_val key json in
match (v, default) with
| (Some v, _) -> Hh_json.get_array_exn v
| (None, Some def) -> def
| (None, None) -> raise Not_found
let strlist args =
Hh_json.JSON_Array (List.map ~f:(fun arg -> Hh_json.JSON_String arg) args)
(* Useful for building an array like [ "suffix", [".txt", ".js", ".php" ]] *)
let assoc_strlist name args =
Hh_json.JSON_Array [Hh_json.JSON_String name; strlist args]
(* Prepend a string to a JSON array of strings. pred stands for predicate,
* because that's how they are typically represented in watchman. See e.g.
* https://facebook.github.io/watchman/docs/expr/allof.html *)
let pred name args = Hh_json.(JSON_Array (JSON_String name :: args))
end |
Rust | hhvm/hphp/hack/src/utils/hh_slog/hh_slog.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::io;
use slog::o;
use slog::Drain;
const TIMESTAMP_FORMAT: &str = "[%Y-%m-%d %H:%M:%S%.3f]";
fn timestamp_format(io: &mut dyn std::io::Write) -> std::io::Result<()> {
write!(io, "{}", chrono::Local::now().format(TIMESTAMP_FORMAT))
}
/// Creates a logger that drains to the given path. Also returns its guard (read
/// `init` for more details).
/// The file logger uses unix flock to lock the log file when flushing its logs.
/// With this overhead, the throughput is still around 7200 short messages per
/// second.
pub fn init_file(filename: &std::path::Path) -> (slog::Logger, slog_async::AsyncGuard) {
let drain = locked_file_drain::LockedFileDrain::new(filename);
// NB: panicking in the async thread does not cause Fuse to panic.
let (drain, guard) = slog_async::Async::new(drain)
.thread_name("slog_logfile".to_owned())
.build_with_guard();
let logger = slog::Logger::root(drain.fuse(), o!());
(logger, guard)
}
pub fn init_file_sync(filename: &std::path::Path) -> slog::Logger {
let drain = locked_file_drain::LockedFileDrain::new(filename);
slog::Logger::root(drain.fuse(), o!())
}
/// Logs to terminal via `slog_envlogger`. Set `RUST_LOG` to control logging.
pub fn init_term_envlogger(binary_name: &'static str) -> (slog::Logger, slog_async::AsyncGuard) {
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator)
.use_custom_timestamp(timestamp_format)
.build()
.fuse();
let drain = slog_envlogger::new(drain);
let (drain, guard) = slog_async::Async::new(drain)
.thread_name("slog_async".to_owned())
.build_with_guard();
let drain = drain.fuse();
let logger = if binary_name.is_empty() {
slog::Logger::root(drain, o!())
} else {
slog::Logger::root(drain, o!("bin" => binary_name))
};
(logger, guard)
}
/// Logs produced by this logger will be visible on stderr in Buck tests (on
/// test failure).
pub fn stderr_sync_testing() -> slog::Logger {
let decorator = slog_term::PlainSyncDecorator::new(TestStderrWriter);
let drain = slog_term::FullFormat::new(decorator)
.use_custom_timestamp(timestamp_format)
.build()
.fuse();
slog::Logger::root(drain, o!())
}
/// Essentially a /dev/null logger
pub fn null() -> slog::Logger {
slog::Logger::root(slog::Discard, o!())
}
/// Helper class to print up to the first 'n' items of a slice or BTreeSet.
/// It delegates to the Debug or Display traits for T.
/// If it can't show all items, then it shows "... [LEN items]" at the end
/// See docs for FmtN::slice and FmtN::bset for usage.
pub struct FmtN<'a, T> {
/// Stores the first n items
items: Vec<&'a T>,
/// this is LEN, the length of the original slice or BTreeSet
len: usize,
}
impl<'a, T> FmtN<'a, T> {
/// Prints up to first 'n' items of the slice, e.g.
/// println!("strings: {}", FmtN::slice(3, &names));
/// println!("pathbufs: {:?}", FmtN::slice(5, &files));
pub fn slice(n: usize, slice: &'a [T]) -> Self {
Self {
items: slice.iter().take(n).collect(),
len: slice.len(),
}
}
/// Prints up to the first 'n' items of the BTreeSet in iter() order, e.g.
/// println!("strings: {}", FmtN::bset(3, &names));
/// println!("pathbufs: {:?}", FmtN::bset(5, &files));
pub fn bset(n: usize, set: &'a std::collections::BTreeSet<T>) -> Self {
Self {
items: set.iter().take(n).collect(),
len: set.len(),
}
}
}
impl<'a, T: std::fmt::Display> std::fmt::Display for FmtN<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sep = "";
for item in self.items.iter() {
f.write_str(sep)?;
item.fmt(f)?; // Display::fmt
sep = ", ";
}
if self.items.is_empty() {
f.write_str("[none]")?;
} else if self.len > self.items.len() {
f.write_fmt(std::format_args!(", ... [{} total]", self.len))?;
}
Ok(())
}
}
impl<'a, T: std::fmt::Debug> std::fmt::Debug for FmtN<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sep = "";
for item in self.items.iter() {
f.write_str(sep)?;
item.fmt(f)?; // Display::fmt
sep = ", ";
}
if self.items.is_empty() {
f.write_str("[none]")?;
} else if self.len > self.items.len() {
f.write_fmt(std::format_args!(", ... [{} total]", self.len))?;
}
Ok(())
}
}
// Inspired by `slog_term::TestStdoutWriter`, but to stderr instead.
struct TestStderrWriter;
impl io::Write for TestStderrWriter {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
// Cargo's test harness only captures output to macros like `print!`,
// `eprint!`, etc. If we logged normally to stderr, then Cargo won't
// capture this output, and instead Buck would. But, when a Rust test
// fails, Buck only prints out what Cargo has captured.
eprint!(
"{}",
std::str::from_utf8(data).map_err(|x| io::Error::new(io::ErrorKind::InvalidData, x))?
);
Ok(data.len())
}
fn flush(&mut self) -> io::Result<()> {
io::stderr().flush()
}
}
#[test]
fn test_fmtn() {
// Test FmtN::slice
let v0: Vec<&str> = vec![];
let v1: Vec<&str> = vec!["a"];
let v2: Vec<&str> = vec!["a", "b"];
let v3: Vec<&str> = vec!["a", "b", "c"];
assert_eq!(format!("{}", FmtN::slice(0, &v0)), "[none]".to_owned());
assert_eq!(format!("{}", FmtN::slice(1, &v0)), "[none]".to_owned());
assert_eq!(format!("{}", FmtN::slice(0, &v1)), "[none]".to_owned());
assert_eq!(format!("{}", FmtN::slice(1, &v1)), "a".to_owned());
assert_eq!(format!("{}", FmtN::slice(2, &v1)), "a".to_owned());
assert_eq!(format!("{}", FmtN::slice(0, &v2)), "[none]".to_owned());
assert_eq!(
format!("{}", FmtN::slice(1, &v2)),
"a, ... [2 total]".to_owned()
);
assert_eq!(format!("{}", FmtN::slice(2, &v2)), "a, b".to_owned());
assert_eq!(format!("{}", FmtN::slice(3, &v2)), "a, b".to_owned());
assert_eq!(format!("{}", FmtN::slice(0, &v3)), "[none]".to_owned());
assert_eq!(
format!("{}", FmtN::slice(1, &v3)),
"a, ... [3 total]".to_owned()
);
assert_eq!(
format!("{}", FmtN::slice(2, &v3)),
"a, b, ... [3 total]".to_owned()
);
assert_eq!(format!("{}", FmtN::slice(3, &v3)), "a, b, c".to_owned());
assert_eq!(format!("{}", FmtN::slice(4, &v3)), "a, b, c".to_owned());
// Test FmtN::bset
let v0: std::collections::BTreeSet<&str> = v0.into_iter().collect();
let v1: std::collections::BTreeSet<&str> = v1.into_iter().collect();
let v2: std::collections::BTreeSet<&str> = v2.into_iter().collect();
let v3: std::collections::BTreeSet<&str> = v3.into_iter().collect();
assert_eq!(format!("{}", FmtN::bset(0, &v0)), "[none]".to_owned());
assert_eq!(format!("{}", FmtN::bset(1, &v0)), "[none]".to_owned());
assert_eq!(format!("{}", FmtN::bset(0, &v1)), "[none]".to_owned());
assert_eq!(format!("{}", FmtN::bset(1, &v1)), "a".to_owned());
assert_eq!(format!("{}", FmtN::bset(2, &v1)), "a".to_owned());
assert_eq!(format!("{}", FmtN::bset(0, &v2)), "[none]".to_owned());
assert_eq!(
format!("{}", FmtN::bset(1, &v2)),
"a, ... [2 total]".to_owned()
);
assert_eq!(format!("{}", FmtN::bset(2, &v2)), "a, b".to_owned());
assert_eq!(format!("{}", FmtN::bset(3, &v2)), "a, b".to_owned());
assert_eq!(format!("{}", FmtN::bset(0, &v3)), "[none]".to_owned());
assert_eq!(
format!("{}", FmtN::bset(1, &v3)),
"a, ... [3 total]".to_owned()
);
assert_eq!(
format!("{}", FmtN::bset(2, &v3)),
"a, b, ... [3 total]".to_owned()
);
assert_eq!(format!("{}", FmtN::bset(3, &v3)), "a, b, c".to_owned());
assert_eq!(format!("{}", FmtN::bset(4, &v3)), "a, b, c".to_owned());
// Test FmtN::slice debug
use std::path::Path;
use std::path::PathBuf;
let a = PathBuf::from("a");
let b = PathBuf::from("b");
let c = PathBuf::from("c");
let v0: Vec<&Path> = vec![];
let v1: Vec<&Path> = vec![&a];
let v2: Vec<&Path> = vec![&a, &b];
let v3: Vec<&Path> = vec![&a, &b, &c];
assert_eq!(format!("{:?}", FmtN::slice(0, &v0)), "[none]".to_owned());
assert_eq!(format!("{:?}", FmtN::slice(1, &v0)), "[none]".to_owned());
assert_eq!(format!("{:?}", FmtN::slice(0, &v1)), "[none]".to_owned());
assert_eq!(format!("{:?}", FmtN::slice(1, &v1)), "\"a\"".to_owned());
assert_eq!(format!("{:?}", FmtN::slice(2, &v1)), "\"a\"".to_owned());
assert_eq!(format!("{:?}", FmtN::slice(0, &v2)), "[none]".to_owned());
assert_eq!(
format!("{:?}", FmtN::slice(1, &v2)),
"\"a\", ... [2 total]".to_owned()
);
assert_eq!(
format!("{:?}", FmtN::slice(2, &v2)),
"\"a\", \"b\"".to_owned()
);
assert_eq!(
format!("{:?}", FmtN::slice(3, &v2)),
"\"a\", \"b\"".to_owned()
);
assert_eq!(format!("{:?}", FmtN::slice(0, &v3)), "[none]".to_owned());
assert_eq!(
format!("{:?}", FmtN::slice(1, &v3)),
"\"a\", ... [3 total]".to_owned()
);
assert_eq!(
format!("{:?}", FmtN::slice(2, &v3)),
"\"a\", \"b\", ... [3 total]".to_owned()
);
assert_eq!(
format!("{:?}", FmtN::slice(3, &v3)),
"\"a\", \"b\", \"c\"".to_owned()
);
assert_eq!(
format!("{:?}", FmtN::slice(4, &v3)),
"\"a\", \"b\", \"c\"".to_owned()
);
} |
Rust | hhvm/hphp/hack/src/utils/hh_slog/locked_file_drain.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use fs2::FileExt;
use slog::Drain;
/// Locks the file when logging. Prevents multi-process writes from mangling if
/// every process follows the advisory lock (flock on Unix).
pub struct LockedFileDrain {
filepath: std::path::PathBuf,
}
impl LockedFileDrain {
pub fn new(filepath: &std::path::Path) -> Self {
Self {
filepath: filepath.to_path_buf(),
}
}
}
impl Drain for LockedFileDrain {
type Ok = ();
// A requirement to be used with Async. Since Async would process the logs
// in a separate thread, we wouldn't be able to handle the error anyway.
type Err = slog::Never;
fn log(
&self,
record: &slog::Record<'_>,
logger_values: &slog::OwnedKVList,
) -> Result<Self::Ok, Self::Err> {
// We acquire a new file handle each time we log to ensure the file
// exists when logging. This is because when keeping a file handle
// across logs and the log file gets deleted, we will quietly fail to
// log to the file.
let file = match std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&self.filepath)
{
Ok(file) => file,
Err(err) => {
eprintln!("Unable to get log file handle. Failed to log. Err: {}", err);
return Ok(());
}
};
// Acquiring exclusive lock
while let Err(err) = file.lock_exclusive() {
match err.kind() {
// This shouldn't happen often, but if it's just an
// interruption, retry.
std::io::ErrorKind::Interrupted => continue,
_ => {
eprintln!(
"failed to acquire exclusive lock: {}. Logging anyway...",
err
);
break;
}
}
}
let drain = slog_json::Json::default(&file);
if let Err(err) = drain.log(record, logger_values) {
eprintln!("LockedFileDrain failed to log: {}", err);
}
// Releasing lock
while let Err(err) = file.unlock() {
match err.kind() {
std::io::ErrorKind::Interrupted => continue,
_ => {
eprintln!("failed to release exclusive lock: {}", err);
break;
}
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::io::BufRead;
use serde_json::Value;
use slog::o;
use tempfile::NamedTempFile;
use super::*;
#[test]
fn log_to_file() {
let temp_logfile = NamedTempFile::new().unwrap();
let drain = LockedFileDrain::new(temp_logfile.path());
let logger = slog::Logger::root(drain.fuse(), o!());
slog::info!(logger, "test msg!"; "test_key" => 1);
slog::info!(logger, "test msg 2!"; "test_key" => 2);
let mut lines = std::io::BufReader::new(temp_logfile)
.lines()
.map(|l| l.unwrap());
let test_msg_line = lines.next().unwrap();
if let Ok(Value::Object(json)) = serde_json::from_str(&test_msg_line) {
assert_eq!(json.get("msg").unwrap(), &Value::String("test msg!".into()));
assert_eq!(json.get("test_key").unwrap(), &Value::Number(1.into()));
} else {
panic!("unable to parse logfile line: {}", &test_msg_line);
}
let test_msg_line2 = lines.next().unwrap();
if let Ok(Value::Object(json)) = serde_json::from_str(&test_msg_line2) {
assert_eq!(
json.get("msg").unwrap(),
&Value::String("test msg 2!".into())
);
assert_eq!(json.get("test_key").unwrap(), &Value::Number(2.into()));
} else {
panic!("unable to parse logfile line: {}", &test_msg_line2);
}
}
} |
TOML | hhvm/hphp/hack/src/utils/hh_slog/cargo/hh_slog/Cargo.toml | # @generated by autocargo
[package]
name = "hh_slog"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../hh_slog.rs"
[dependencies]
chrono = { version = "0.4", features = ["clock", "serde", "std"], default-features = false }
locked_file_drain = { version = "0.0.0", path = "../locked_file_drain" }
slog = { version = "2.7", features = ["max_level_trace", "nested-values"] }
slog-async = { version = "2.3", features = ["nested-values"] }
slog-envlogger = "2.2"
slog-term = "2.8" |
TOML | hhvm/hphp/hack/src/utils/hh_slog/cargo/locked_file_drain/Cargo.toml | # @generated by autocargo
[package]
name = "locked_file_drain"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../locked_file_drain.rs"
[dependencies]
fs2 = "0.4"
slog = { version = "2.7", features = ["max_level_trace", "nested-values"] }
slog-json = "2.3"
[dev-dependencies]
serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] }
tempfile = "3.5" |
TOML | hhvm/hphp/hack/src/utils/html_entities/Cargo.toml | # @generated by autocargo
[package]
name = "html_entities"
version = "0.0.0"
edition = "2021"
[lib]
path = "html_entities.rs"
[dependencies]
lazy_static = "1.4"
ocaml_helper = { version = "0.0.0", path = "../ocaml_helper" }
regex = "1.9.2"
[dev-dependencies]
pretty_assertions = { version = "1.2", features = ["alloc"], default-features = false } |
Rust | hhvm/hphp/hack/src/utils/html_entities/decoder.rs | /**
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree. An additional
* directory.
*/
/**
* HTML5 special entity decoding
*
* HHVM decodes certain HTML entities present in input strings before
* generating bytecode. In order to generate bytecode identical to HHVM's,
* this module performs the same HTML entity decoding as HHVM.
* Mimics: zend-html.cpp
* The list of entities tested was taken from
* https://dev.w3.org/html5/html-author/charref on 09/27/2017.
*/
pub fn decode(s: &[u8]) -> Option<&'static [u8]> {
match s {
//"bsim" => "∽"
[0x62, 0x73, 0x69, 0x6d] => Some(&[0xe2, 0x88, 0xbd]),
//"sscue" => "≽"
[0x73, 0x73, 0x63, 0x75, 0x65] => Some(&[0xe2, 0x89, 0xbd]),
//"becaus" => "∵"
[0x62, 0x65, 0x63, 0x61, 0x75, 0x73] => Some(&[0xe2, 0x88, 0xb5]),
//"nexist" => "∄"
[0x6e, 0x65, 0x78, 0x69, 0x73, 0x74] => Some(&[0xe2, 0x88, 0x84]),
//"Atilde" => "Ã"
[0x41, 0x74, 0x69, 0x6c, 0x64, 0x65] => Some(&[0xc3, 0x83]),
//"emsp" => " "
[0x65, 0x6d, 0x73, 0x70] => Some(&[0xe2, 0x80, 0x83]),
//"nabla" => "∇"
[0x6e, 0x61, 0x62, 0x6c, 0x61] => Some(&[0xe2, 0x88, 0x87]),
//"lang" => "〈"
[0x6c, 0x61, 0x6e, 0x67] => Some(&[0xe2, 0x8c, 0xa9]),
//"Ugrave" => "Ù"
[0x55, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0x99]),
//"hearts" => "♥"
[0x68, 0x65, 0x61, 0x72, 0x74, 0x73] => Some(&[0xe2, 0x99, 0xa5]),
//"oplus" => "⊕"
[0x6f, 0x70, 0x6c, 0x75, 0x73] => Some(&[0xe2, 0x8a, 0x95]),
//"le" => "≤"
[0x6c, 0x65] => Some(&[0xe2, 0x89, 0xa4]),
//"wreath" => "≀"
[0x77, 0x72, 0x65, 0x61, 0x74, 0x68] => Some(&[0xe2, 0x89, 0x80]),
//"kappa" => "κ"
[0x6b, 0x61, 0x70, 0x70, 0x61] => Some(&[0xce, 0xba]),
//"lrm" => ""
[0x6c, 0x72, 0x6d] => Some(&[0xe2, 0x80, 0x8e]),
//"OElig" => "Œ"
[0x4f, 0x45, 0x6c, 0x69, 0x67] => Some(&[0xc5, 0x92]),
//"prod" => "∏"
[0x70, 0x72, 0x6f, 0x64] => Some(&[0xe2, 0x88, 0x8f]),
//"npr" => "⊀"
[0x6e, 0x70, 0x72] => Some(&[0xe2, 0x8a, 0x80]),
//"notin" => "∉"
[0x6e, 0x6f, 0x74, 0x69, 0x6e] => Some(&[0xe2, 0x88, 0x89]),
//"rsaquo" => "›"
[0x72, 0x73, 0x61, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0xba]),
//"upsilon" => "υ"
[0x75, 0x70, 0x73, 0x69, 0x6c, 0x6f, 0x6e] => Some(&[0xcf, 0x85]),
//"lg" => "≶"
[0x6c, 0x67] => Some(&[0xe2, 0x89, 0xb6]),
//"trade" => "™"
[0x74, 0x72, 0x61, 0x64, 0x65] => Some(&[0xe2, 0x84, 0xa2]),
//"ape" => "≊"
[0x61, 0x70, 0x65] => Some(&[0xe2, 0x89, 0x8a]),
//"bdquo" => "„"
[0x62, 0x64, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0x9e]),
//"theta" => "θ"
[0x74, 0x68, 0x65, 0x74, 0x61] => Some(&[0xce, 0xb8]),
//"ldquo" => "“"
[0x6c, 0x64, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0x9c]),
//"Yuml" => "Ÿ"
[0x59, 0x75, 0x6d, 0x6c] => Some(&[0xc5, 0xb8]),
//"scaron" => "š"
[0x73, 0x63, 0x61, 0x72, 0x6f, 0x6e] => Some(&[0xc5, 0xa1]),
//"permil" => "‰"
[0x70, 0x65, 0x72, 0x6d, 0x69, 0x6c] => Some(&[0xe2, 0x80, 0xb0]),
//"xi" => "ξ"
[0x78, 0x69] => Some(&[0xce, 0xbe]),
//"rsquo" => "’"
[0x72, 0x73, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0x99]),
//"clubs" => "♣"
[0x63, 0x6c, 0x75, 0x62, 0x73] => Some(&[0xe2, 0x99, 0xa3]),
//"Tau" => "Τ"
[0x54, 0x61, 0x75] => Some(&[0xce, 0xa4]),
//"Ecirc" => "Ê"
[0x45, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0x8a]),
//"loz" => "◊"
[0x6c, 0x6f, 0x7a] => Some(&[0xe2, 0x97, 0x8a]),
//"nlt" => "≮"
[0x6e, 0x6c, 0x74] => Some(&[0xe2, 0x89, 0xae]),
//"angmsd" => "∡"
[0x61, 0x6e, 0x67, 0x6d, 0x73, 0x64] => Some(&[0xe2, 0x88, 0xa1]),
//"rlm" => ""
[0x72, 0x6c, 0x6d] => Some(&[0xe2, 0x80, 0x8f]),
//"Nu" => "Ν"
[0x4e, 0x75] => Some(&[0xce, 0x9d]),
//"conint" => "∮"
[0x63, 0x6f, 0x6e, 0x69, 0x6e, 0x74] => Some(&[0xe2, 0x88, 0xae]),
//"Egrave" => "È"
[0x45, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0x88]),
//"szlig" => "ß"
[0x73, 0x7a, 0x6c, 0x69, 0x67] => Some(&[0xc3, 0x9f]),
//"cup" => "∪"
[0x63, 0x75, 0x70] => Some(&[0xe2, 0x88, 0xaa]),
//"piv" => "ϖ"
[0x70, 0x69, 0x76] => Some(&[0xcf, 0x96]),
//"Zeta" => "Ζ"
[0x5a, 0x65, 0x74, 0x61] => Some(&[0xce, 0x96]),
//"gt" => ">"
[0x67, 0x74] => Some(&[0x3e]),
//"darr" => "↓"
[0x64, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x86, 0x93]),
//"frac14" => "¼"
[0x66, 0x72, 0x61, 0x63, 0x31, 0x34] => Some(&[0xc2, 0xbc]),
//"nges" => "≱"
[0x6e, 0x67, 0x65, 0x73] => Some(&[0xe2, 0x89, 0xb1]),
//"frasl" => "⁄"
[0x66, 0x72, 0x61, 0x73, 0x6c] => Some(&[0xe2, 0x81, 0x84]),
//"minus" => "−"
[0x6d, 0x69, 0x6e, 0x75, 0x73] => Some(&[0xe2, 0x88, 0x92]),
//"uarr" => "↑"
[0x75, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x86, 0x91]),
//"zeta" => "ζ"
[0x7a, 0x65, 0x74, 0x61] => Some(&[0xce, 0xb6]),
//"Iota" => "Ι"
[0x49, 0x6f, 0x74, 0x61] => Some(&[0xce, 0x99]),
//"atilde" => "ã"
[0x61, 0x74, 0x69, 0x6c, 0x64, 0x65] => Some(&[0xc3, 0xa3]),
//"agrave" => "à"
[0x61, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0xa0]),
//"Aacute" => "Á"
[0x41, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0x81]),
//"ensp" => " "
[0x65, 0x6e, 0x73, 0x70] => Some(&[0xe2, 0x80, 0x82]),
//"mu" => "μ"
[0x6d, 0x75] => Some(&[0xce, 0xbc]),
//"ocirc" => "ô"
[0x6f, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0xb4]),
//"deg" => "°"
[0x64, 0x65, 0x67] => Some(&[0xc2, 0xb0]),
//"alefsym" => "ℵ"
[0x61, 0x6c, 0x65, 0x66, 0x73, 0x79, 0x6d] => Some(&[0xe2, 0x84, 0xb5]),
//"prime" => "′"
[0x70, 0x72, 0x69, 0x6d, 0x65] => Some(&[0xe2, 0x80, 0xb2]),
//"Gamma" => "Γ"
[0x47, 0x61, 0x6d, 0x6d, 0x61] => Some(&[0xce, 0x93]),
//"Sigma" => "Σ"
[0x53, 0x69, 0x67, 0x6d, 0x61] => Some(&[0xce, 0xa3]),
//"sdot" => "⋅"
[0x73, 0x64, 0x6f, 0x74] => Some(&[0xe2, 0x8b, 0x85]),
//"par" => "∥"
[0x70, 0x61, 0x72] => Some(&[0xe2, 0x88, 0xa5]),
//"comet" => "☄"
[0x63, 0x6f, 0x6d, 0x65, 0x74] => Some(&[0xe2, 0x98, 0x84]),
//"and" => "∧"
[0x61, 0x6e, 0x64] => Some(&[0xe2, 0x88, 0xa7]),
//"ndash" => "–"
[0x6e, 0x64, 0x61, 0x73, 0x68] => Some(&[0xe2, 0x80, 0x93]),
//"oelig" => "œ"
[0x6f, 0x65, 0x6c, 0x69, 0x67] => Some(&[0xc5, 0x93]),
//"compfn" => "∘"
[0x63, 0x6f, 0x6d, 0x70, 0x66, 0x6e] => Some(&[0xe2, 0x88, 0x98]),
//"lAarr" => "⇚"
[0x6c, 0x41, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x9a]),
//"Euml" => "Ë"
[0x45, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0x8b]),
//"lsaquo" => "‹"
[0x6c, 0x73, 0x61, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0xb9]),
//"thinsp" => " "
[0x74, 0x68, 0x69, 0x6e, 0x73, 0x70] => Some(&[0xe2, 0x80, 0x89]),
//"omicron" => "ο"
[0x6f, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6e] => Some(&[0xce, 0xbf]),
//"thunderstorm" => "☈"
[
0x74,
0x68,
0x75,
0x6e,
0x64,
0x65,
0x72,
0x73,
0x74,
0x6f,
0x72,
0x6d,
] => Some(&[0xe2, 0x98, 0x88]),
//"cloud" => "☁"
[0x63, 0x6c, 0x6f, 0x75, 0x64] => Some(&[0xe2, 0x98, 0x81]),
//"mnplus" => "∓"
[0x6d, 0x6e, 0x70, 0x6c, 0x75, 0x73] => Some(&[0xe2, 0x88, 0x93]),
//"nsup" => "⊅"
[0x6e, 0x73, 0x75, 0x70] => Some(&[0xe2, 0x8a, 0x85]),
//"mdash" => "—"
[0x6d, 0x64, 0x61, 0x73, 0x68] => Some(&[0xe2, 0x80, 0x94]),
//"twixt" => "≬"
[0x74, 0x77, 0x69, 0x78, 0x74] => Some(&[0xe2, 0x89, 0xac]),
//"angsph" => "∢"
[0x61, 0x6e, 0x67, 0x73, 0x70, 0x68] => Some(&[0xe2, 0x88, 0xa2]),
//"Delta" => "Δ"
[0x44, 0x65, 0x6c, 0x74, 0x61] => Some(&[0xce, 0x94]),
//"lambda" => "λ"
[0x6c, 0x61, 0x6d, 0x62, 0x64, 0x61] => Some(&[0xce, 0xbb]),
//"Eta" => "Η"
[0x45, 0x74, 0x61] => Some(&[0xce, 0x97]),
//"Theta" => "Θ"
[0x54, 0x68, 0x65, 0x74, 0x61] => Some(&[0xce, 0x98]),
//"crarr" => "↵"
[0x63, 0x72, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x86, 0xb5]),
//"Chi" => "Χ"
[0x43, 0x68, 0x69] => Some(&[0xce, 0xa7]),
//"sup3" => "³"
[0x73, 0x75, 0x70, 0x33] => Some(&[0xc2, 0xb3]),
//"snowflake" => "❅"
[0x73, 0x6e, 0x6f, 0x77, 0x66, 0x6c, 0x61, 0x6b, 0x65] => Some(&[0xe2, 0x9d, 0x85]),
//"plusdo" => "∔"
[0x70, 0x6c, 0x75, 0x73, 0x64, 0x6f] => Some(&[0xe2, 0x88, 0x94]),
//"supe" => "⊇"
[0x73, 0x75, 0x70, 0x65] => Some(&[0xe2, 0x8a, 0x87]),
//"Lt" => "≪"
[0x4c, 0x74] => Some(&[0xe2, 0x89, 0xaa]),
//"prop" => "∝"
[0x70, 0x72, 0x6f, 0x70] => Some(&[0xe2, 0x88, 0x9d]),
//"frac34" => "¾"
[0x66, 0x72, 0x61, 0x63, 0x33, 0x34] => Some(&[0xc2, 0xbe]),
//"sup2" => "²"
[0x73, 0x75, 0x70, 0x32] => Some(&[0xc2, 0xb2]),
//"reg" => "®"
[0x72, 0x65, 0x67] => Some(&[0xc2, 0xae]),
//"isin" => "∈"
[0x69, 0x73, 0x69, 0x6e] => Some(&[0xe2, 0x88, 0x88]),
//"sube" => "⊆"
[0x73, 0x75, 0x62, 0x65] => Some(&[0xe2, 0x8a, 0x86]),
//"rAarr" => "⇛"
[0x72, 0x41, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x9b]),
//"gl" => "≷"
[0x67, 0x6c] => Some(&[0xe2, 0x89, 0xb7]),
//"sime" => "≃"
[0x73, 0x69, 0x6d, 0x65] => Some(&[0xe2, 0x89, 0x83]),
//"nsub" => "⊄"
[0x6e, 0x73, 0x75, 0x62] => Some(&[0xe2, 0x8a, 0x84]),
//"hArr" => "⇔"
[0x68, 0x41, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x94]),
//"icirc" => "î"
[0x69, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0xae]),
//"ne" => "≠"
[0x6e, 0x65] => Some(&[0xe2, 0x89, 0xa0]),
//"ucirc" => "û"
[0x75, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0xbb]),
//"coprod" => "∐"
[0x63, 0x6f, 0x70, 0x72, 0x6f, 0x64] => Some(&[0xe2, 0x88, 0x90]),
//"oacute" => "ó"
[0x6f, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0xb3]),
//"cent" => "¢"
[0x63, 0x65, 0x6e, 0x74] => Some(&[0xc2, 0xa2]),
//"nsc" => "⊁"
[0x6e, 0x73, 0x63] => Some(&[0xe2, 0x8a, 0x81]),
//"cupre" => "≼"
[0x63, 0x75, 0x70, 0x72, 0x65] => Some(&[0xe2, 0x89, 0xbc]),
//"lArr" => "⇐"
[0x6c, 0x41, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x90]),
//"pi" => "π"
[0x70, 0x69] => Some(&[0xcf, 0x80]),
//"plusmn" => "±"
[0x70, 0x6c, 0x75, 0x73, 0x6d, 0x6e] => Some(&[0xc2, 0xb1]),
//"Phi" => "Φ"
[0x50, 0x68, 0x69] => Some(&[0xce, 0xa6]),
//"infin" => "∞"
[0x69, 0x6e, 0x66, 0x69, 0x6e] => Some(&[0xe2, 0x88, 0x9e]),
//"divide" => "÷"
[0x64, 0x69, 0x76, 0x69, 0x64, 0x65] => Some(&[0xc3, 0xb7]),
//"tau" => "τ"
[0x74, 0x61, 0x75] => Some(&[0xcf, 0x84]),
//"frac12" => "½"
[0x66, 0x72, 0x61, 0x63, 0x31, 0x32] => Some(&[0xc2, 0xbd]),
//"equiv" => "≡"
[0x65, 0x71, 0x75, 0x69, 0x76] => Some(&[0xe2, 0x89, 0xa1]),
//"bump" => "≎"
[0x62, 0x75, 0x6d, 0x70] => Some(&[0xe2, 0x89, 0x8e]),
//"THORN" => "Þ"
[0x54, 0x48, 0x4f, 0x52, 0x4e] => Some(&[0xc3, 0x9e]),
//"oline" => "‾"
[0x6f, 0x6c, 0x69, 0x6e, 0x65] => Some(&[0xe2, 0x80, 0xbe]),
//"Mu" => "Μ"
[0x4d, 0x75] => Some(&[0xce, 0x9c]),
//"sub" => "⊂"
[0x73, 0x75, 0x62] => Some(&[0xe2, 0x8a, 0x82]),
//"shy" => ""
[0x73, 0x68, 0x79] => Some(&[0xc2, 0xad]),
//"nsim" => "≁"
[0x6e, 0x73, 0x69, 0x6d] => Some(&[0xe2, 0x89, 0x81]),
//"thetasym" => "ϑ"
[0x74, 0x68, 0x65, 0x74, 0x61, 0x73, 0x79, 0x6d] => Some(&[0xcf, 0x91]),
//"Omega" => "Ω"
[0x4f, 0x6d, 0x65, 0x67, 0x61] => Some(&[0xce, 0xa9]),
//"Oslash" => "Ø"
[0x4f, 0x73, 0x6c, 0x61, 0x73, 0x68] => Some(&[0xc3, 0x98]),
//"ang90" => "∟"
[0x61, 0x6e, 0x67, 0x39, 0x30] => Some(&[0xe2, 0x88, 0x9f]),
//"iexcl" => "¡"
[0x69, 0x65, 0x78, 0x63, 0x6c] => Some(&[0xc2, 0xa1]),
//"rArr" => "⇒"
[0x72, 0x41, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x92]),
//"cedil" => "¸"
[0x63, 0x65, 0x64, 0x69, 0x6c] => Some(&[0xc2, 0xb8]),
//"uacute" => "ú"
[0x75, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0xba]),
//"sup" => "⊃"
[0x73, 0x75, 0x70] => Some(&[0xe2, 0x8a, 0x83]),
//"lE" => "≦"
[0x6c, 0x45] => Some(&[0xe2, 0x89, 0xa6]),
//"sum" => "∑"
[0x73, 0x75, 0x6d] => Some(&[0xe2, 0x88, 0x91]),
//"ntilde" => "ñ"
[0x6e, 0x74, 0x69, 0x6c, 0x64, 0x65] => Some(&[0xc3, 0xb1]),
//"lceil" => "⌈"
[0x6c, 0x63, 0x65, 0x69, 0x6c] => Some(&[0xe2, 0x8c, 0x88]),
//"bcong" => "≌"
[0x62, 0x63, 0x6f, 0x6e, 0x67] => Some(&[0xe2, 0x89, 0x8c]),
//"mid" => "∣"
[0x6d, 0x69, 0x64] => Some(&[0xe2, 0x88, 0xa3]),
//"dArr" => "⇓"
[0x64, 0x41, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x93]),
//"sigma" => "σ"
[0x73, 0x69, 0x67, 0x6d, 0x61] => Some(&[0xcf, 0x83]),
//"nsime" => "≄"
[0x6e, 0x73, 0x69, 0x6d, 0x65] => Some(&[0xe2, 0x89, 0x84]),
//"Xi" => "Ξ"
[0x58, 0x69] => Some(&[0xce, 0x9e]),
//"sc" => "≻"
[0x73, 0x63] => Some(&[0xe2, 0x89, 0xbb]),
//"Lambda" => "Λ"
[0x4c, 0x61, 0x6d, 0x62, 0x64, 0x61] => Some(&[0xce, 0x9b]),
//"oslash" => "ø"
[0x6f, 0x73, 0x6c, 0x61, 0x73, 0x68] => Some(&[0xc3, 0xb8]),
//"forall" => "∀"
[0x66, 0x6f, 0x72, 0x61, 0x6c, 0x6c] => Some(&[0xe2, 0x88, 0x80]),
//"umbrella" => "☂"
[0x75, 0x6d, 0x62, 0x72, 0x65, 0x6c, 0x6c, 0x61] => Some(&[0xe2, 0x98, 0x82]),
//"uArr" => "⇑"
[0x75, 0x41, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x91]),
//"diams" => "♦"
[0x64, 0x69, 0x61, 0x6d, 0x73] => Some(&[0xe2, 0x99, 0xa6]),
//"iquest" => "¿"
[0x69, 0x71, 0x75, 0x65, 0x73, 0x74] => Some(&[0xc2, 0xbf]),
//"eta" => "η"
[0x65, 0x74, 0x61] => Some(&[0xce, 0xb7]),
//"gamma" => "γ"
[0x67, 0x61, 0x6d, 0x6d, 0x61] => Some(&[0xce, 0xb3]),
//"iuml" => "ï"
[0x69, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0xaf]),
//"middot" => "·"
[0x6d, 0x69, 0x64, 0x64, 0x6f, 0x74] => Some(&[0xc2, 0xb7]),
//"gE" => "≧"
[0x67, 0x45] => Some(&[0xe2, 0x89, 0xa7]),
//"dagger" => "†"
[0x64, 0x61, 0x67, 0x67, 0x65, 0x72] => Some(&[0xe2, 0x80, 0xa0]),
//"weierp" => "℘"
[0x77, 0x65, 0x69, 0x65, 0x72, 0x70] => Some(&[0xe2, 0x84, 0x98]),
//"ouml" => "ö"
[0x6f, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0xb6]),
//"perp" => "⊥"
[0x70, 0x65, 0x72, 0x70] => Some(&[0xe2, 0x8a, 0xa5]),
//"curren" => "¤"
[0x63, 0x75, 0x72, 0x72, 0x65, 0x6e] => Some(&[0xc2, 0xa4]),
//"amp" => "&"
[0x61, 0x6d, 0x70] => Some(&[0x26]),
//"iota" => "ι"
[0x69, 0x6f, 0x74, 0x61] => Some(&[0xce, 0xb9]),
//"quot" => """
[0x71, 0x75, 0x6f, 0x74] => Some(&[0x22]),
//"ang" => "∠"
[0x61, 0x6e, 0x67] => Some(&[0xe2, 0x88, 0xa0]),
//"Iuml" => "Ï"
[0x49, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0x8f]),
//"spades" => "♠"
[0x73, 0x70, 0x61, 0x64, 0x65, 0x73] => Some(&[0xe2, 0x99, 0xa0]),
//"ge" => "≥"
[0x67, 0x65] => Some(&[0xe2, 0x89, 0xa5]),
//"image" => "ℑ"
[0x69, 0x6d, 0x61, 0x67, 0x65] => Some(&[0xe2, 0x84, 0x91]),
//"psi" => "ψ"
[0x70, 0x73, 0x69] => Some(&[0xcf, 0x88]),
//"Eacute" => "É"
[0x45, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0x89]),
//"uuml" => "ü"
[0x75, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0xbc]),
//"radic" => "√"
[0x72, 0x61, 0x64, 0x69, 0x63] => Some(&[0xe2, 0x88, 0x9a]),
//"ni" => "∋"
[0x6e, 0x69] => Some(&[0xe2, 0x88, 0x8b]),
//"bull" => "•"
[0x62, 0x75, 0x6c, 0x6c] => Some(&[0xe2, 0x80, 0xa2]),
//"times" => "×"
[0x74, 0x69, 0x6d, 0x65, 0x73] => Some(&[0xc3, 0x97]),
//"AElig" => "Æ"
[0x41, 0x45, 0x6c, 0x69, 0x67] => Some(&[0xc3, 0x86]),
//"ordm" => "º"
[0x6f, 0x72, 0x64, 0x6d] => Some(&[0xc2, 0xba]),
//"prsim" => "≾"
[0x70, 0x72, 0x73, 0x69, 0x6d] => Some(&[0xe2, 0x89, 0xbe]),
//"bepsi" => "∍"
[0x62, 0x65, 0x70, 0x73, 0x69] => Some(&[0xe2, 0x88, 0x8d]),
//"epsis" => "∊"
[0x65, 0x70, 0x73, 0x69, 0x73] => Some(&[0xe2, 0x88, 0x8a]),
//"vArr" => "⇕"
[0x76, 0x41, 0x72, 0x72] => Some(&[0xe2, 0x87, 0x95]),
//"ngt" => "≯"
[0x6e, 0x67, 0x74] => Some(&[0xe2, 0x89, 0xaf]),
//"part" => "∂"
[0x70, 0x61, 0x72, 0x74] => Some(&[0xe2, 0x88, 0x82]),
//"otimes" => "⊗"
[0x6f, 0x74, 0x69, 0x6d, 0x65, 0x73] => Some(&[0xe2, 0x8a, 0x97]),
//"micro" => "µ"
[0x6d, 0x69, 0x63, 0x72, 0x6f] => Some(&[0xc2, 0xb5]),
//"raquo" => "»"
[0x72, 0x61, 0x71, 0x75, 0x6f] => Some(&[0xc2, 0xbb]),
//"Ocirc" => "Ô"
[0x4f, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0x94]),
//"macr" => "¯"
[0x6d, 0x61, 0x63, 0x72] => Some(&[0xc2, 0xaf]),
//"Upsilon" => "Υ"
[0x55, 0x70, 0x73, 0x69, 0x6c, 0x6f, 0x6e] => Some(&[0xce, 0xa5]),
//"Auml" => "Ä"
[0x41, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0x84]),
//"sup1" => "¹"
[0x73, 0x75, 0x70, 0x31] => Some(&[0xc2, 0xb9]),
//"iacute" => "í"
[0x69, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0xad]),
//"ETH" => "Ð"
[0x45, 0x54, 0x48] => Some(&[0xc3, 0x90]),
//"Icirc" => "Î"
[0x49, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0x8e]),
//"or" => "∨"
[0x6f, 0x72] => Some(&[0xe2, 0x88, 0xa8]),
//"sigmaf" => "ς"
[0x73, 0x69, 0x67, 0x6d, 0x61, 0x66] => Some(&[0xcf, 0x82]),
//"bumpe" => "≏"
[0x62, 0x75, 0x6d, 0x70, 0x65] => Some(&[0xe2, 0x89, 0x8f]),
//"phi" => "φ"
[0x70, 0x68, 0x69] => Some(&[0xcf, 0x86]),
//"pr" => "≺"
[0x70, 0x72] => Some(&[0xe2, 0x89, 0xba]),
//"Ucirc" => "Û"
[0x55, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0x9b]),
//"beta" => "β"
[0x62, 0x65, 0x74, 0x61] => Some(&[0xce, 0xb2]),
//"aring" => "å"
[0x61, 0x72, 0x69, 0x6e, 0x67] => Some(&[0xc3, 0xa5]),
//"lfloor" => "⌊"
[0x6c, 0x66, 0x6c, 0x6f, 0x6f, 0x72] => Some(&[0xe2, 0x8c, 0x8a]),
//"Epsilon" => "Ε"
[0x45, 0x70, 0x73, 0x69, 0x6c, 0x6f, 0x6e] => Some(&[0xce, 0x95]),
//"upsih" => "ϒ"
[0x75, 0x70, 0x73, 0x69, 0x68] => Some(&[0xcf, 0x92]),
//"thorn" => "þ"
[0x74, 0x68, 0x6f, 0x72, 0x6e] => Some(&[0xc3, 0xbe]),
//"Oacute" => "Ó"
[0x4f, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0x93]),
//"rarrw" => "⇝"
[0x72, 0x61, 0x72, 0x72, 0x77] => Some(&[0xe2, 0x87, 0x9d]),
//"larr" => "←"
[0x6c, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x86, 0x90]),
//"aelig" => "æ"
[0x61, 0x65, 0x6c, 0x69, 0x67] => Some(&[0xc3, 0xa6]),
//"gnE" => "≩"
[0x67, 0x6e, 0x45] => Some(&[0xe2, 0x89, 0xa9]),
//"brvbar" => "¦"
[0x62, 0x72, 0x76, 0x62, 0x61, 0x72] => Some(&[0xc2, 0xa6]),
//"asympeq" => "≍"
[0x61, 0x73, 0x79, 0x6d, 0x70, 0x65, 0x71] => Some(&[0xe2, 0x89, 0x8d]),
//"int" => "∫"
[0x69, 0x6e, 0x74] => Some(&[0xe2, 0x88, 0xab]),
//"ccedil" => "ç"
[0x63, 0x63, 0x65, 0x64, 0x69, 0x6c] => Some(&[0xc3, 0xa7]),
//"npar" => "∦"
[0x6e, 0x70, 0x61, 0x72] => Some(&[0xe2, 0x88, 0xa6]),
//"lsquo" => "‘"
[0x6c, 0x73, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0x98]),
//"harr" => "↔"
[0x68, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x86, 0x94]),
//"Rho" => "Ρ"
[0x52, 0x68, 0x6f] => Some(&[0xce, 0xa1]),
//"pound" => "£"
[0x70, 0x6f, 0x75, 0x6e, 0x64] => Some(&[0xc2, 0xa3]),
//"apos" => "'"
[0x61, 0x70, 0x6f, 0x73] => Some(&[0x27]),
//"real" => "ℜ"
[0x72, 0x65, 0x61, 0x6c] => Some(&[0xe2, 0x84, 0x9c]),
//"hellip" => "…"
[0x68, 0x65, 0x6c, 0x6c, 0x69, 0x70] => Some(&[0xe2, 0x80, 0xa6]),
//"Ouml" => "Ö"
[0x4f, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0x96]),
//"euml" => "ë"
[0x65, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0xab]),
//"uml" => "¨"
[0x75, 0x6d, 0x6c] => Some(&[0xc2, 0xa8]),
//"Kappa" => "Κ"
[0x4b, 0x61, 0x70, 0x70, 0x61] => Some(&[0xce, 0x9a]),
//"rceil" => "⌉"
[0x72, 0x63, 0x65, 0x69, 0x6c] => Some(&[0xe2, 0x8c, 0x89]),
//"notni" => "∌"
[0x6e, 0x6f, 0x74, 0x6e, 0x69] => Some(&[0xe2, 0x88, 0x8c]),
//"ugrave" => "ù"
[0x75, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0xb9]),
//"acirc" => "â"
[0x61, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0xa2]),
//"laquo" => "«"
[0x6c, 0x61, 0x71, 0x75, 0x6f] => Some(&[0xc2, 0xab]),
//"ncong" => "≇"
[0x6e, 0x63, 0x6f, 0x6e, 0x67] => Some(&[0xe2, 0x89, 0x87]),
//"para" => "¶"
[0x70, 0x61, 0x72, 0x61] => Some(&[0xc2, 0xb6]),
//"asymp" => "≈"
[0x61, 0x73, 0x79, 0x6d, 0x70] => Some(&[0xe2, 0x89, 0x88]),
//"Agrave" => "À"
[0x41, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0x80]),
//"Pi" => "Π"
[0x50, 0x69] => Some(&[0xce, 0xa0]),
//"aacute" => "á"
[0x61, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0xa1]),
//"gsim" => "≳"
[0x67, 0x73, 0x69, 0x6d] => Some(&[0xe2, 0x89, 0xb3]),
//"rfloor" => "⌋"
[0x72, 0x66, 0x6c, 0x6f, 0x6f, 0x72] => Some(&[0xe2, 0x8c, 0x8b]),
//"rarr" => "→"
[0x72, 0x61, 0x72, 0x72] => Some(&[0xe2, 0x86, 0x92]),
//"ecirc" => "ê"
[0x65, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0xaa]),
//"delta" => "δ"
[0x64, 0x65, 0x6c, 0x74, 0x61] => Some(&[0xce, 0xb4]),
//"Ograve" => "Ò"
[0x4f, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0x92]),
//"there4" => "∴"
[0x74, 0x68, 0x65, 0x72, 0x65, 0x34] => Some(&[0xe2, 0x88, 0xb4]),
//"Prime" => "″"
[0x50, 0x72, 0x69, 0x6d, 0x65] => Some(&[0xe2, 0x80, 0xb3]),
//"sect" => "§"
[0x73, 0x65, 0x63, 0x74] => Some(&[0xc2, 0xa7]),
//"empty" => "∅"
[0x65, 0x6d, 0x70, 0x74, 0x79] => Some(&[0xe2, 0x88, 0x85]),
//"Omicron" => "Ο"
[0x4f, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6e] => Some(&[0xce, 0x9f]),
//"tilde" => "˜"
[0x74, 0x69, 0x6c, 0x64, 0x65] => Some(&[0xcb, 0x9c]),
//"fnof" => "ƒ"
[0x66, 0x6e, 0x6f, 0x66] => Some(&[0xc6, 0x92]),
//"eth" => "ð"
[0x65, 0x74, 0x68] => Some(&[0xc3, 0xb0]),
//"ordf" => "ª"
[0x6f, 0x72, 0x64, 0x66] => Some(&[0xc2, 0xaa]),
//"zwj" => ""
[0x7a, 0x77, 0x6a] => Some(&[0xe2, 0x80, 0x8d]),
//"nmid" => "∤"
[0x6e, 0x6d, 0x69, 0x64] => Some(&[0xe2, 0x88, 0xa4]),
//"rho" => "ρ"
[0x72, 0x68, 0x6f] => Some(&[0xcf, 0x81]),
//"auml" => "ä"
[0x61, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0xa4]),
//"lnE" => "≨"
[0x6c, 0x6e, 0x45] => Some(&[0xe2, 0x89, 0xa8]),
//"zwnj" => ""
[0x7a, 0x77, 0x6e, 0x6a] => Some(&[0xe2, 0x80, 0x8c]),
//"Uacute" => "Ú"
[0x55, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0x9a]),
//"yuml" => "ÿ"
[0x79, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0xbf]),
//"Aring" => "Å"
[0x41, 0x72, 0x69, 0x6e, 0x67] => Some(&[0xc3, 0x85]),
//"lsim" => "≲"
[0x6c, 0x73, 0x69, 0x6d] => Some(&[0xe2, 0x89, 0xb2]),
//"nap" => "≉"
[0x6e, 0x61, 0x70] => Some(&[0xe2, 0x89, 0x89]),
//"Scaron" => "Š"
[0x53, 0x63, 0x61, 0x72, 0x6f, 0x6e] => Some(&[0xc5, 0xa0]),
//"acute" => "´"
[0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc2, 0xb4]),
//"yacute" => "ý"
[0x79, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0xbd]),
//"lowast" => "∗"
[0x6c, 0x6f, 0x77, 0x61, 0x73, 0x74] => Some(&[0xe2, 0x88, 0x97]),
//"egrave" => "è"
[0x65, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0xa8]),
//"Acirc" => "Â"
[0x41, 0x63, 0x69, 0x72, 0x63] => Some(&[0xc3, 0x82]),
//"euro" => "€"
[0x65, 0x75, 0x72, 0x6f] => Some(&[0xe2, 0x82, 0xac]),
//"Gt" => "≫"
[0x47, 0x74] => Some(&[0xe2, 0x89, 0xab]),
//"igrave" => "ì"
[0x69, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0xac]),
//"cap" => "∩"
[0x63, 0x61, 0x70] => Some(&[0xe2, 0x88, 0xa9]),
//"Otilde" => "Õ"
[0x4f, 0x74, 0x69, 0x6c, 0x64, 0x65] => Some(&[0xc3, 0x95]),
//"scsim" => "≿"
[0x73, 0x63, 0x73, 0x69, 0x6d] => Some(&[0xe2, 0x89, 0xbf]),
//"Igrave" => "Ì"
[0x49, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0x8c]),
//"exist" => "∃"
[0x65, 0x78, 0x69, 0x73, 0x74] => Some(&[0xe2, 0x88, 0x83]),
//"nu" => "ν"
[0x6e, 0x75] => Some(&[0xce, 0xbd]),
//"omega" => "ω"
[0x6f, 0x6d, 0x65, 0x67, 0x61] => Some(&[0xcf, 0x89]),
//"snowman" => "☃"
[0x73, 0x6e, 0x6f, 0x77, 0x6d, 0x61, 0x6e] => Some(&[0xe2, 0x98, 0x83]),
//"cong" => "≅"
[0x63, 0x6f, 0x6e, 0x67] => Some(&[0xe2, 0x89, 0x85]),
//"Ccedil" => "Ç"
[0x43, 0x63, 0x65, 0x64, 0x69, 0x6c] => Some(&[0xc3, 0x87]),
//"rang" => "〉"
[0x72, 0x61, 0x6e, 0x67] => Some(&[0xe2, 0x8c, 0xaa]),
//"setmn" => "∖"
[0x73, 0x65, 0x74, 0x6d, 0x6e] => Some(&[0xe2, 0x88, 0x96]),
//"rdquo" => "”"
[0x72, 0x64, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0x9d]),
//"yen" => "¥"
[0x79, 0x65, 0x6e] => Some(&[0xc2, 0xa5]),
//"ograve" => "ò"
[0x6f, 0x67, 0x72, 0x61, 0x76, 0x65] => Some(&[0xc3, 0xb2]),
//"Psi" => "Ψ"
[0x50, 0x73, 0x69] => Some(&[0xce, 0xa8]),
//"lt" => "<"
[0x6c, 0x74] => Some(&[0x3c]),
//"epsilon" => "ε"
[0x65, 0x70, 0x73, 0x69, 0x6c, 0x6f, 0x6e] => Some(&[0xce, 0xb5]),
//"alpha" => "α"
[0x61, 0x6c, 0x70, 0x68, 0x61] => Some(&[0xce, 0xb1]),
//"sim" => "∼"
[0x73, 0x69, 0x6d] => Some(&[0xe2, 0x88, 0xbc]),
//"Uuml" => "Ü"
[0x55, 0x75, 0x6d, 0x6c] => Some(&[0xc3, 0x9c]),
//"not" => "¬"
[0x6e, 0x6f, 0x74] => Some(&[0xc2, 0xac]),
//"nbsp" => " "
[0x6e, 0x62, 0x73, 0x70] => Some(&[0xc2, 0xa0]),
//"circ" => "ˆ"
[0x63, 0x69, 0x72, 0x63] => Some(&[0xcb, 0x86]),
//"copy" => "©"
[0x63, 0x6f, 0x70, 0x79] => Some(&[0xc2, 0xa9]),
//"chi" => "χ"
[0x63, 0x68, 0x69] => Some(&[0xcf, 0x87]),
//"Beta" => "Β"
[0x42, 0x65, 0x74, 0x61] => Some(&[0xce, 0x92]),
//"Alpha" => "Α"
[0x41, 0x6c, 0x70, 0x68, 0x61] => Some(&[0xce, 0x91]),
//"Ntilde" => "Ñ"
[0x4e, 0x74, 0x69, 0x6c, 0x64, 0x65] => Some(&[0xc3, 0x91]),
//"Dagger" => "‡"
[0x44, 0x61, 0x67, 0x67, 0x65, 0x72] => Some(&[0xe2, 0x80, 0xa1]),
//"sbquo" => "‚"
[0x73, 0x62, 0x71, 0x75, 0x6f] => Some(&[0xe2, 0x80, 0x9a]),
//"eacute" => "é"
[0x65, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0xa9]),
//"nles" => "≰"
[0x6e, 0x6c, 0x65, 0x73] => Some(&[0xe2, 0x89, 0xb0]),
//"comp" => "∁"
[0x63, 0x6f, 0x6d, 0x70] => Some(&[0xe2, 0x88, 0x81]),
//"Yacute" => "Ý"
[0x59, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0x9d]),
//"Iacute" => "Í"
[0x49, 0x61, 0x63, 0x75, 0x74, 0x65] => Some(&[0xc3, 0x8d]),
//"otilde" => "õ"
[0x6f, 0x74, 0x69, 0x6c, 0x64, 0x65] => Some(&[0xc3, 0xb5]),
_ => None,
}
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::*;
#[test]
fn test() {
assert_eq!(decode("bsim".as_bytes()).unwrap(), "∽".as_bytes());
assert_eq!(decode("sscue".as_bytes()).unwrap(), "≽".as_bytes());
assert_eq!(decode("becaus".as_bytes()).unwrap(), "∵".as_bytes());
assert_eq!(decode("nexist".as_bytes()).unwrap(), "∄".as_bytes());
assert_eq!(decode("Atilde".as_bytes()).unwrap(), "Ã".as_bytes());
assert_eq!(decode("emsp".as_bytes()).unwrap(), " ".as_bytes());
assert_eq!(decode("nabla".as_bytes()).unwrap(), "∇".as_bytes());
assert_eq!(decode("lang".as_bytes()).unwrap(), "〈".as_bytes());
assert_eq!(decode("Ugrave".as_bytes()).unwrap(), "Ù".as_bytes());
assert_eq!(decode("hearts".as_bytes()).unwrap(), "♥".as_bytes());
assert_eq!(decode("oplus".as_bytes()).unwrap(), "⊕".as_bytes());
assert_eq!(decode("le".as_bytes()).unwrap(), "≤".as_bytes());
assert_eq!(decode("wreath".as_bytes()).unwrap(), "≀".as_bytes());
assert_eq!(decode("kappa".as_bytes()).unwrap(), "κ".as_bytes());
assert_eq!(decode("lrm".as_bytes()).unwrap(), "".as_bytes());
assert_eq!(decode("OElig".as_bytes()).unwrap(), "Œ".as_bytes());
assert_eq!(decode("prod".as_bytes()).unwrap(), "∏".as_bytes());
assert_eq!(decode("npr".as_bytes()).unwrap(), "⊀".as_bytes());
assert_eq!(decode("notin".as_bytes()).unwrap(), "∉".as_bytes());
assert_eq!(decode("rsaquo".as_bytes()).unwrap(), "›".as_bytes());
assert_eq!(decode("upsilon".as_bytes()).unwrap(), "υ".as_bytes());
assert_eq!(decode("lg".as_bytes()).unwrap(), "≶".as_bytes());
assert_eq!(decode("trade".as_bytes()).unwrap(), "™".as_bytes());
assert_eq!(decode("ape".as_bytes()).unwrap(), "≊".as_bytes());
assert_eq!(decode("bdquo".as_bytes()).unwrap(), "„".as_bytes());
assert_eq!(decode("theta".as_bytes()).unwrap(), "θ".as_bytes());
assert_eq!(decode("ldquo".as_bytes()).unwrap(), "“".as_bytes());
assert_eq!(decode("Yuml".as_bytes()).unwrap(), "Ÿ".as_bytes());
assert_eq!(decode("scaron".as_bytes()).unwrap(), "š".as_bytes());
assert_eq!(decode("permil".as_bytes()).unwrap(), "‰".as_bytes());
assert_eq!(decode("xi".as_bytes()).unwrap(), "ξ".as_bytes());
assert_eq!(decode("rsquo".as_bytes()).unwrap(), "’".as_bytes());
assert_eq!(decode("clubs".as_bytes()).unwrap(), "♣".as_bytes());
assert_eq!(decode("Tau".as_bytes()).unwrap(), "Τ".as_bytes());
assert_eq!(decode("Ecirc".as_bytes()).unwrap(), "Ê".as_bytes());
assert_eq!(decode("loz".as_bytes()).unwrap(), "◊".as_bytes());
assert_eq!(decode("nlt".as_bytes()).unwrap(), "≮".as_bytes());
assert_eq!(decode("angmsd".as_bytes()).unwrap(), "∡".as_bytes());
assert_eq!(decode("rlm".as_bytes()).unwrap(), "".as_bytes());
assert_eq!(decode("Nu".as_bytes()).unwrap(), "Ν".as_bytes());
assert_eq!(decode("conint".as_bytes()).unwrap(), "∮".as_bytes());
assert_eq!(decode("Egrave".as_bytes()).unwrap(), "È".as_bytes());
assert_eq!(decode("szlig".as_bytes()).unwrap(), "ß".as_bytes());
assert_eq!(decode("cup".as_bytes()).unwrap(), "∪".as_bytes());
assert_eq!(decode("piv".as_bytes()).unwrap(), "ϖ".as_bytes());
assert_eq!(decode("Zeta".as_bytes()).unwrap(), "Ζ".as_bytes());
assert_eq!(decode("gt".as_bytes()).unwrap(), ">".as_bytes());
assert_eq!(decode("darr".as_bytes()).unwrap(), "↓".as_bytes());
assert_eq!(decode("frac14".as_bytes()).unwrap(), "¼".as_bytes());
assert_eq!(decode("nges".as_bytes()).unwrap(), "≱".as_bytes());
assert_eq!(decode("frasl".as_bytes()).unwrap(), "⁄".as_bytes());
assert_eq!(decode("minus".as_bytes()).unwrap(), "−".as_bytes());
assert_eq!(decode("uarr".as_bytes()).unwrap(), "↑".as_bytes());
assert_eq!(decode("zeta".as_bytes()).unwrap(), "ζ".as_bytes());
assert_eq!(decode("Iota".as_bytes()).unwrap(), "Ι".as_bytes());
assert_eq!(decode("atilde".as_bytes()).unwrap(), "ã".as_bytes());
assert_eq!(decode("agrave".as_bytes()).unwrap(), "à".as_bytes());
assert_eq!(decode("Aacute".as_bytes()).unwrap(), "Á".as_bytes());
assert_eq!(decode("ensp".as_bytes()).unwrap(), " ".as_bytes());
assert_eq!(decode("mu".as_bytes()).unwrap(), "μ".as_bytes());
assert_eq!(decode("ocirc".as_bytes()).unwrap(), "ô".as_bytes());
assert_eq!(decode("deg".as_bytes()).unwrap(), "°".as_bytes());
assert_eq!(decode("alefsym".as_bytes()).unwrap(), "ℵ".as_bytes());
assert_eq!(decode("prime".as_bytes()).unwrap(), "′".as_bytes());
assert_eq!(decode("Gamma".as_bytes()).unwrap(), "Γ".as_bytes());
assert_eq!(decode("Sigma".as_bytes()).unwrap(), "Σ".as_bytes());
assert_eq!(decode("sdot".as_bytes()).unwrap(), "⋅".as_bytes());
assert_eq!(decode("par".as_bytes()).unwrap(), "∥".as_bytes());
assert_eq!(decode("comet".as_bytes()).unwrap(), "☄".as_bytes());
assert_eq!(decode("and".as_bytes()).unwrap(), "∧".as_bytes());
assert_eq!(decode("ndash".as_bytes()).unwrap(), "–".as_bytes());
assert_eq!(decode("oelig".as_bytes()).unwrap(), "œ".as_bytes());
assert_eq!(decode("compfn".as_bytes()).unwrap(), "∘".as_bytes());
assert_eq!(decode("lAarr".as_bytes()).unwrap(), "⇚".as_bytes());
assert_eq!(decode("Euml".as_bytes()).unwrap(), "Ë".as_bytes());
assert_eq!(decode("lsaquo".as_bytes()).unwrap(), "‹".as_bytes());
assert_eq!(decode("thinsp".as_bytes()).unwrap(), " ".as_bytes());
assert_eq!(decode("omicron".as_bytes()).unwrap(), "ο".as_bytes());
assert_eq!(decode("thunderstorm".as_bytes()).unwrap(), "☈".as_bytes());
assert_eq!(decode("cloud".as_bytes()).unwrap(), "☁".as_bytes());
assert_eq!(decode("mnplus".as_bytes()).unwrap(), "∓".as_bytes());
assert_eq!(decode("nsup".as_bytes()).unwrap(), "⊅".as_bytes());
assert_eq!(decode("mdash".as_bytes()).unwrap(), "—".as_bytes());
assert_eq!(decode("twixt".as_bytes()).unwrap(), "≬".as_bytes());
assert_eq!(decode("angsph".as_bytes()).unwrap(), "∢".as_bytes());
assert_eq!(decode("Delta".as_bytes()).unwrap(), "Δ".as_bytes());
assert_eq!(decode("lambda".as_bytes()).unwrap(), "λ".as_bytes());
assert_eq!(decode("Eta".as_bytes()).unwrap(), "Η".as_bytes());
assert_eq!(decode("Theta".as_bytes()).unwrap(), "Θ".as_bytes());
assert_eq!(decode("crarr".as_bytes()).unwrap(), "↵".as_bytes());
assert_eq!(decode("Chi".as_bytes()).unwrap(), "Χ".as_bytes());
assert_eq!(decode("sup3".as_bytes()).unwrap(), "³".as_bytes());
assert_eq!(decode("snowflake".as_bytes()).unwrap(), "❅".as_bytes());
assert_eq!(decode("plusdo".as_bytes()).unwrap(), "∔".as_bytes());
assert_eq!(decode("supe".as_bytes()).unwrap(), "⊇".as_bytes());
assert_eq!(decode("Lt".as_bytes()).unwrap(), "≪".as_bytes());
assert_eq!(decode("prop".as_bytes()).unwrap(), "∝".as_bytes());
assert_eq!(decode("frac34".as_bytes()).unwrap(), "¾".as_bytes());
assert_eq!(decode("sup2".as_bytes()).unwrap(), "²".as_bytes());
assert_eq!(decode("reg".as_bytes()).unwrap(), "®".as_bytes());
assert_eq!(decode("isin".as_bytes()).unwrap(), "∈".as_bytes());
assert_eq!(decode("sube".as_bytes()).unwrap(), "⊆".as_bytes());
assert_eq!(decode("rAarr".as_bytes()).unwrap(), "⇛".as_bytes());
assert_eq!(decode("gl".as_bytes()).unwrap(), "≷".as_bytes());
assert_eq!(decode("sime".as_bytes()).unwrap(), "≃".as_bytes());
assert_eq!(decode("nsub".as_bytes()).unwrap(), "⊄".as_bytes());
assert_eq!(decode("hArr".as_bytes()).unwrap(), "⇔".as_bytes());
assert_eq!(decode("icirc".as_bytes()).unwrap(), "î".as_bytes());
assert_eq!(decode("ne".as_bytes()).unwrap(), "≠".as_bytes());
assert_eq!(decode("ucirc".as_bytes()).unwrap(), "û".as_bytes());
assert_eq!(decode("coprod".as_bytes()).unwrap(), "∐".as_bytes());
assert_eq!(decode("oacute".as_bytes()).unwrap(), "ó".as_bytes());
assert_eq!(decode("cent".as_bytes()).unwrap(), "¢".as_bytes());
assert_eq!(decode("nsc".as_bytes()).unwrap(), "⊁".as_bytes());
assert_eq!(decode("cupre".as_bytes()).unwrap(), "≼".as_bytes());
assert_eq!(decode("lArr".as_bytes()).unwrap(), "⇐".as_bytes());
assert_eq!(decode("pi".as_bytes()).unwrap(), "π".as_bytes());
assert_eq!(decode("plusmn".as_bytes()).unwrap(), "±".as_bytes());
assert_eq!(decode("Phi".as_bytes()).unwrap(), "Φ".as_bytes());
assert_eq!(decode("infin".as_bytes()).unwrap(), "∞".as_bytes());
assert_eq!(decode("divide".as_bytes()).unwrap(), "÷".as_bytes());
assert_eq!(decode("tau".as_bytes()).unwrap(), "τ".as_bytes());
assert_eq!(decode("frac12".as_bytes()).unwrap(), "½".as_bytes());
assert_eq!(decode("equiv".as_bytes()).unwrap(), "≡".as_bytes());
assert_eq!(decode("bump".as_bytes()).unwrap(), "≎".as_bytes());
assert_eq!(decode("THORN".as_bytes()).unwrap(), "Þ".as_bytes());
assert_eq!(decode("oline".as_bytes()).unwrap(), "‾".as_bytes());
assert_eq!(decode("Mu".as_bytes()).unwrap(), "Μ".as_bytes());
assert_eq!(decode("sub".as_bytes()).unwrap(), "⊂".as_bytes());
assert_eq!(decode("shy".as_bytes()).unwrap(), "\u{AD}".as_bytes());
assert_eq!(decode("nsim".as_bytes()).unwrap(), "≁".as_bytes());
assert_eq!(decode("thetasym".as_bytes()).unwrap(), "ϑ".as_bytes());
assert_eq!(decode("Omega".as_bytes()).unwrap(), "Ω".as_bytes());
assert_eq!(decode("Oslash".as_bytes()).unwrap(), "Ø".as_bytes());
assert_eq!(decode("ang90".as_bytes()).unwrap(), "∟".as_bytes());
assert_eq!(decode("iexcl".as_bytes()).unwrap(), "¡".as_bytes());
assert_eq!(decode("rArr".as_bytes()).unwrap(), "⇒".as_bytes());
assert_eq!(decode("cedil".as_bytes()).unwrap(), "¸".as_bytes());
assert_eq!(decode("uacute".as_bytes()).unwrap(), "ú".as_bytes());
assert_eq!(decode("sup".as_bytes()).unwrap(), "⊃".as_bytes());
assert_eq!(decode("lE".as_bytes()).unwrap(), "≦".as_bytes());
assert_eq!(decode("sum".as_bytes()).unwrap(), "∑".as_bytes());
assert_eq!(decode("ntilde".as_bytes()).unwrap(), "ñ".as_bytes());
assert_eq!(decode("lceil".as_bytes()).unwrap(), "⌈".as_bytes());
assert_eq!(decode("bcong".as_bytes()).unwrap(), "≌".as_bytes());
assert_eq!(decode("mid".as_bytes()).unwrap(), "∣".as_bytes());
assert_eq!(decode("dArr".as_bytes()).unwrap(), "⇓".as_bytes());
assert_eq!(decode("sigma".as_bytes()).unwrap(), "σ".as_bytes());
assert_eq!(decode("nsime".as_bytes()).unwrap(), "≄".as_bytes());
assert_eq!(decode("Xi".as_bytes()).unwrap(), "Ξ".as_bytes());
assert_eq!(decode("sc".as_bytes()).unwrap(), "≻".as_bytes());
assert_eq!(decode("Lambda".as_bytes()).unwrap(), "Λ".as_bytes());
assert_eq!(decode("oslash".as_bytes()).unwrap(), "ø".as_bytes());
assert_eq!(decode("forall".as_bytes()).unwrap(), "∀".as_bytes());
assert_eq!(decode("umbrella".as_bytes()).unwrap(), "☂".as_bytes());
assert_eq!(decode("uArr".as_bytes()).unwrap(), "⇑".as_bytes());
assert_eq!(decode("diams".as_bytes()).unwrap(), "♦".as_bytes());
assert_eq!(decode("iquest".as_bytes()).unwrap(), "¿".as_bytes());
assert_eq!(decode("eta".as_bytes()).unwrap(), "η".as_bytes());
assert_eq!(decode("gamma".as_bytes()).unwrap(), "γ".as_bytes());
assert_eq!(decode("iuml".as_bytes()).unwrap(), "ï".as_bytes());
assert_eq!(decode("middot".as_bytes()).unwrap(), "·".as_bytes());
assert_eq!(decode("gE".as_bytes()).unwrap(), "≧".as_bytes());
assert_eq!(decode("dagger".as_bytes()).unwrap(), "†".as_bytes());
assert_eq!(decode("weierp".as_bytes()).unwrap(), "℘".as_bytes());
assert_eq!(decode("ouml".as_bytes()).unwrap(), "ö".as_bytes());
assert_eq!(decode("perp".as_bytes()).unwrap(), "⊥".as_bytes());
assert_eq!(decode("curren".as_bytes()).unwrap(), "¤".as_bytes());
assert_eq!(decode("amp".as_bytes()).unwrap(), "&".as_bytes());
assert_eq!(decode("iota".as_bytes()).unwrap(), "ι".as_bytes());
assert_eq!(decode("quot".as_bytes()).unwrap(), "\"".as_bytes());
assert_eq!(decode("ang".as_bytes()).unwrap(), "∠".as_bytes());
assert_eq!(decode("Iuml".as_bytes()).unwrap(), "Ï".as_bytes());
assert_eq!(decode("spades".as_bytes()).unwrap(), "♠".as_bytes());
assert_eq!(decode("ge".as_bytes()).unwrap(), "≥".as_bytes());
assert_eq!(decode("image".as_bytes()).unwrap(), "ℑ".as_bytes());
assert_eq!(decode("psi".as_bytes()).unwrap(), "ψ".as_bytes());
assert_eq!(decode("Eacute".as_bytes()).unwrap(), "É".as_bytes());
assert_eq!(decode("uuml".as_bytes()).unwrap(), "ü".as_bytes());
assert_eq!(decode("radic".as_bytes()).unwrap(), "√".as_bytes());
assert_eq!(decode("ni".as_bytes()).unwrap(), "∋".as_bytes());
assert_eq!(decode("bull".as_bytes()).unwrap(), "•".as_bytes());
assert_eq!(decode("times".as_bytes()).unwrap(), "×".as_bytes());
assert_eq!(decode("AElig".as_bytes()).unwrap(), "Æ".as_bytes());
assert_eq!(decode("ordm".as_bytes()).unwrap(), "º".as_bytes());
assert_eq!(decode("prsim".as_bytes()).unwrap(), "≾".as_bytes());
assert_eq!(decode("bepsi".as_bytes()).unwrap(), "∍".as_bytes());
assert_eq!(decode("epsis".as_bytes()).unwrap(), "∊".as_bytes());
assert_eq!(decode("vArr".as_bytes()).unwrap(), "⇕".as_bytes());
assert_eq!(decode("ngt".as_bytes()).unwrap(), "≯".as_bytes());
assert_eq!(decode("part".as_bytes()).unwrap(), "∂".as_bytes());
assert_eq!(decode("otimes".as_bytes()).unwrap(), "⊗".as_bytes());
assert_eq!(decode("micro".as_bytes()).unwrap(), "µ".as_bytes());
assert_eq!(decode("raquo".as_bytes()).unwrap(), "»".as_bytes());
assert_eq!(decode("Ocirc".as_bytes()).unwrap(), "Ô".as_bytes());
assert_eq!(decode("macr".as_bytes()).unwrap(), "¯".as_bytes());
assert_eq!(decode("Upsilon".as_bytes()).unwrap(), "Υ".as_bytes());
assert_eq!(decode("Auml".as_bytes()).unwrap(), "Ä".as_bytes());
assert_eq!(decode("sup1".as_bytes()).unwrap(), "¹".as_bytes());
assert_eq!(decode("iacute".as_bytes()).unwrap(), "í".as_bytes());
assert_eq!(decode("ETH".as_bytes()).unwrap(), "Ð".as_bytes());
assert_eq!(decode("Icirc".as_bytes()).unwrap(), "Î".as_bytes());
assert_eq!(decode("or".as_bytes()).unwrap(), "∨".as_bytes());
assert_eq!(decode("sigmaf".as_bytes()).unwrap(), "ς".as_bytes());
assert_eq!(decode("bumpe".as_bytes()).unwrap(), "≏".as_bytes());
assert_eq!(decode("phi".as_bytes()).unwrap(), "φ".as_bytes());
assert_eq!(decode("pr".as_bytes()).unwrap(), "≺".as_bytes());
assert_eq!(decode("Ucirc".as_bytes()).unwrap(), "Û".as_bytes());
assert_eq!(decode("beta".as_bytes()).unwrap(), "β".as_bytes());
assert_eq!(decode("aring".as_bytes()).unwrap(), "å".as_bytes());
assert_eq!(decode("lfloor".as_bytes()).unwrap(), "⌊".as_bytes());
assert_eq!(decode("Epsilon".as_bytes()).unwrap(), "Ε".as_bytes());
assert_eq!(decode("upsih".as_bytes()).unwrap(), "ϒ".as_bytes());
assert_eq!(decode("thorn".as_bytes()).unwrap(), "þ".as_bytes());
assert_eq!(decode("Oacute".as_bytes()).unwrap(), "Ó".as_bytes());
assert_eq!(decode("rarrw".as_bytes()).unwrap(), "⇝".as_bytes());
assert_eq!(decode("larr".as_bytes()).unwrap(), "←".as_bytes());
assert_eq!(decode("aelig".as_bytes()).unwrap(), "æ".as_bytes());
assert_eq!(decode("gnE".as_bytes()).unwrap(), "≩".as_bytes());
assert_eq!(decode("brvbar".as_bytes()).unwrap(), "¦".as_bytes());
assert_eq!(decode("asympeq".as_bytes()).unwrap(), "≍".as_bytes());
assert_eq!(decode("int".as_bytes()).unwrap(), "∫".as_bytes());
assert_eq!(decode("ccedil".as_bytes()).unwrap(), "ç".as_bytes());
assert_eq!(decode("npar".as_bytes()).unwrap(), "∦".as_bytes());
assert_eq!(decode("lsquo".as_bytes()).unwrap(), "‘".as_bytes());
assert_eq!(decode("harr".as_bytes()).unwrap(), "↔".as_bytes());
assert_eq!(decode("Rho".as_bytes()).unwrap(), "Ρ".as_bytes());
assert_eq!(decode("pound".as_bytes()).unwrap(), "£".as_bytes());
assert_eq!(decode("apos".as_bytes()).unwrap(), "'".as_bytes());
assert_eq!(decode("real".as_bytes()).unwrap(), "ℜ".as_bytes());
assert_eq!(decode("hellip".as_bytes()).unwrap(), "…".as_bytes());
assert_eq!(decode("Ouml".as_bytes()).unwrap(), "Ö".as_bytes());
assert_eq!(decode("euml".as_bytes()).unwrap(), "ë".as_bytes());
assert_eq!(decode("uml".as_bytes()).unwrap(), "¨".as_bytes());
assert_eq!(decode("Kappa".as_bytes()).unwrap(), "Κ".as_bytes());
assert_eq!(decode("rceil".as_bytes()).unwrap(), "⌉".as_bytes());
assert_eq!(decode("notni".as_bytes()).unwrap(), "∌".as_bytes());
assert_eq!(decode("ugrave".as_bytes()).unwrap(), "ù".as_bytes());
assert_eq!(decode("acirc".as_bytes()).unwrap(), "â".as_bytes());
assert_eq!(decode("laquo".as_bytes()).unwrap(), "«".as_bytes());
assert_eq!(decode("ncong".as_bytes()).unwrap(), "≇".as_bytes());
assert_eq!(decode("para".as_bytes()).unwrap(), "¶".as_bytes());
assert_eq!(decode("asymp".as_bytes()).unwrap(), "≈".as_bytes());
assert_eq!(decode("Agrave".as_bytes()).unwrap(), "À".as_bytes());
assert_eq!(decode("Pi".as_bytes()).unwrap(), "Π".as_bytes());
assert_eq!(decode("aacute".as_bytes()).unwrap(), "á".as_bytes());
assert_eq!(decode("gsim".as_bytes()).unwrap(), "≳".as_bytes());
assert_eq!(decode("rfloor".as_bytes()).unwrap(), "⌋".as_bytes());
assert_eq!(decode("rarr".as_bytes()).unwrap(), "→".as_bytes());
assert_eq!(decode("ecirc".as_bytes()).unwrap(), "ê".as_bytes());
assert_eq!(decode("delta".as_bytes()).unwrap(), "δ".as_bytes());
assert_eq!(decode("Ograve".as_bytes()).unwrap(), "Ò".as_bytes());
assert_eq!(decode("there4".as_bytes()).unwrap(), "∴".as_bytes());
assert_eq!(decode("Prime".as_bytes()).unwrap(), "″".as_bytes());
assert_eq!(decode("sect".as_bytes()).unwrap(), "§".as_bytes());
assert_eq!(decode("empty".as_bytes()).unwrap(), "∅".as_bytes());
assert_eq!(decode("Omicron".as_bytes()).unwrap(), "Ο".as_bytes());
assert_eq!(decode("tilde".as_bytes()).unwrap(), "˜".as_bytes());
assert_eq!(decode("fnof".as_bytes()).unwrap(), "ƒ".as_bytes());
assert_eq!(decode("eth".as_bytes()).unwrap(), "ð".as_bytes());
assert_eq!(decode("ordf".as_bytes()).unwrap(), "ª".as_bytes());
assert_eq!(decode("zwj".as_bytes()).unwrap(), "".as_bytes());
assert_eq!(decode("nmid".as_bytes()).unwrap(), "∤".as_bytes());
assert_eq!(decode("rho".as_bytes()).unwrap(), "ρ".as_bytes());
assert_eq!(decode("auml".as_bytes()).unwrap(), "ä".as_bytes());
assert_eq!(decode("lnE".as_bytes()).unwrap(), "≨".as_bytes());
assert_eq!(decode("zwnj".as_bytes()).unwrap(), "".as_bytes());
assert_eq!(decode("Uacute".as_bytes()).unwrap(), "Ú".as_bytes());
assert_eq!(decode("yuml".as_bytes()).unwrap(), "ÿ".as_bytes());
assert_eq!(decode("Aring".as_bytes()).unwrap(), "Å".as_bytes());
assert_eq!(decode("lsim".as_bytes()).unwrap(), "≲".as_bytes());
assert_eq!(decode("nap".as_bytes()).unwrap(), "≉".as_bytes());
assert_eq!(decode("Scaron".as_bytes()).unwrap(), "Š".as_bytes());
assert_eq!(decode("acute".as_bytes()).unwrap(), "´".as_bytes());
assert_eq!(decode("yacute".as_bytes()).unwrap(), "ý".as_bytes());
assert_eq!(decode("lowast".as_bytes()).unwrap(), "∗".as_bytes());
assert_eq!(decode("egrave".as_bytes()).unwrap(), "è".as_bytes());
assert_eq!(decode("Acirc".as_bytes()).unwrap(), "Â".as_bytes());
assert_eq!(decode("euro".as_bytes()).unwrap(), "€".as_bytes());
assert_eq!(decode("Gt".as_bytes()).unwrap(), "≫".as_bytes());
assert_eq!(decode("igrave".as_bytes()).unwrap(), "ì".as_bytes());
assert_eq!(decode("cap".as_bytes()).unwrap(), "∩".as_bytes());
assert_eq!(decode("Otilde".as_bytes()).unwrap(), "Õ".as_bytes());
assert_eq!(decode("scsim".as_bytes()).unwrap(), "≿".as_bytes());
assert_eq!(decode("Igrave".as_bytes()).unwrap(), "Ì".as_bytes());
assert_eq!(decode("exist".as_bytes()).unwrap(), "∃".as_bytes());
assert_eq!(decode("nu".as_bytes()).unwrap(), "ν".as_bytes());
assert_eq!(decode("omega".as_bytes()).unwrap(), "ω".as_bytes());
assert_eq!(decode("snowman".as_bytes()).unwrap(), "☃".as_bytes());
assert_eq!(decode("cong".as_bytes()).unwrap(), "≅".as_bytes());
assert_eq!(decode("Ccedil".as_bytes()).unwrap(), "Ç".as_bytes());
assert_eq!(decode("rang".as_bytes()).unwrap(), "〉".as_bytes());
assert_eq!(decode("setmn".as_bytes()).unwrap(), "∖".as_bytes());
assert_eq!(decode("rdquo".as_bytes()).unwrap(), "”".as_bytes());
assert_eq!(decode("yen".as_bytes()).unwrap(), "¥".as_bytes());
assert_eq!(decode("ograve".as_bytes()).unwrap(), "ò".as_bytes());
assert_eq!(decode("Psi".as_bytes()).unwrap(), "Ψ".as_bytes());
assert_eq!(decode("lt".as_bytes()).unwrap(), "<".as_bytes());
assert_eq!(decode("epsilon".as_bytes()).unwrap(), "ε".as_bytes());
assert_eq!(decode("alpha".as_bytes()).unwrap(), "α".as_bytes());
assert_eq!(decode("sim".as_bytes()).unwrap(), "∼".as_bytes());
assert_eq!(decode("Uuml".as_bytes()).unwrap(), "Ü".as_bytes());
assert_eq!(decode("not".as_bytes()).unwrap(), "¬".as_bytes());
assert_eq!(decode("nbsp".as_bytes()).unwrap(), " ".as_bytes());
assert_eq!(decode("circ".as_bytes()).unwrap(), "ˆ".as_bytes());
assert_eq!(decode("copy".as_bytes()).unwrap(), "©".as_bytes());
assert_eq!(decode("chi".as_bytes()).unwrap(), "χ".as_bytes());
assert_eq!(decode("Beta".as_bytes()).unwrap(), "Β".as_bytes());
assert_eq!(decode("Alpha".as_bytes()).unwrap(), "Α".as_bytes());
assert_eq!(decode("Ntilde".as_bytes()).unwrap(), "Ñ".as_bytes());
assert_eq!(decode("Dagger".as_bytes()).unwrap(), "‡".as_bytes());
assert_eq!(decode("sbquo".as_bytes()).unwrap(), "‚".as_bytes());
assert_eq!(decode("eacute".as_bytes()).unwrap(), "é".as_bytes());
assert_eq!(decode("nles".as_bytes()).unwrap(), "≰".as_bytes());
assert_eq!(decode("comp".as_bytes()).unwrap(), "∁".as_bytes());
assert_eq!(decode("Yacute".as_bytes()).unwrap(), "Ý".as_bytes());
assert_eq!(decode("Iacute".as_bytes()).unwrap(), "Í".as_bytes());
assert_eq!(decode("otilde".as_bytes()).unwrap(), "õ".as_bytes());
}
} |
Rust | hhvm/hphp/hack/src/utils/html_entities/html_entities.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#[macro_use]
extern crate lazy_static;
pub mod decoder;
use ocaml_helper::int_of_string_opt;
use regex::bytes::Captures;
use regex::bytes::Regex;
fn utf32_to_utf8(k: i64, r: &mut [u8; 6]) -> &[u8] {
if k < 0x80 {
r[0] = k as u8;
&r[0..1]
} else if k < 0x800 {
r[0] = 0xc0 | ((k >> 6) as u8);
r[1] = 0x80 | ((k & 0x3f) as u8);
&r[0..2]
} else if k < 0x10000 {
r[0] = 0xe0 | ((k >> 12) as u8);
r[1] = 0x80 | (((k >> 6) & 0x3f) as u8);
r[2] = 0x80 | ((k & 0x3f) as u8);
&r[0..3]
} else if k < 0x200000 {
r[0] = 0xf0 | ((k >> 18) as u8);
r[1] = 0x80 | (((k >> 12) & 0x3f) as u8);
r[2] = 0x80 | (((k >> 6) & 0x3f) as u8);
r[3] = 0x80 | ((k & 0x3f) as u8);
&r[0..4]
} else if k < 0x4000000 {
r[0] = 0xf8 | ((k >> 24) as u8);
r[1] = 0x80 | (((k >> 18) & 0x3f) as u8);
r[2] = 0x80 | (((k >> 12) & 0x3f) as u8);
r[3] = 0x80 | (((k >> 6) & 0x3f) as u8);
r[4] = 0x80 | ((k & 0x3f) as u8);
&r[0..5]
} else {
r[0] = 0xfc | ((k >> 30) as u8);
r[1] = 0x80 | (((k >> 24) & 0x3f) as u8);
r[2] = 0x80 | (((k >> 18) & 0x3f) as u8);
r[3] = 0x80 | (((k >> 12) & 0x3f) as u8);
r[4] = 0x80 | (((k >> 6) & 0x3f) as u8);
r[5] = 0x80 | ((k & 0x3f) as u8);
&r[0..6]
}
}
pub fn utf32_to_utf8_alloc(k: i64) -> Vec<u8> {
Vec::from(utf32_to_utf8(k, &mut [0u8; 6]))
}
fn decode_u32(s: &[u8]) -> Vec<u8> {
let mut s1: Vec<u8> = Vec::with_capacity(s.len() - 1);
s1.push(b'0');
s1.extend_from_slice(&s[2..s.len() - 1]);
let n = int_of_string_opt(&s1);
match n {
Some(n) => utf32_to_utf8_alloc(n),
None => vec![],
}
}
fn decode_charref<'a>(s: &'a [u8]) -> &'a [u8] {
let charref = &s[1..s.len() - 1];
decoder::decode(charref).unwrap_or(s)
}
pub fn decode<'a>(s: &'a [u8]) -> Vec<u8> {
lazy_static! {
static ref ENTITY: Regex = Regex::new("&[^;&]+;").unwrap();
}
ENTITY
.replace_all(s, |caps: &Captures<'_>| match caps.get(0) {
None => vec![],
Some(m) => {
let m = m.as_bytes();
if m[1] == b'#' {
decode_u32(m)
} else {
// Have to alloc memory, this is contrainted by
// the return type of this closure, AsRef<[u8]>.
Vec::from(decode_charref(m))
}
}
})
.to_vec()
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::*;
#[test]
fn test() {
assert_eq!(decode(b"Š"), Vec::from("Š"));
assert_eq!(decode(b"&Scaron"), Vec::from("&Scaron"));
assert_eq!(decode(b"&#Scaron"), Vec::from("&#Scaron"));
assert_eq!(decode(b"Š"), Vec::from("Š"));
assert_eq!(decode(b"Š"), Vec::from("Š"));
assert_eq!(decode(b"abcŠefg"), Vec::from("abcŠefg"));
assert_eq!(decode(b"!"), Vec::from("!"));
}
} |
Rust | hhvm/hphp/hack/src/utils/html_entities/test/bench.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#![feature(test)]
#[cfg(test)]
extern crate test;
fn main() {
let command =
"buck run @//mode/opt //hphp/hack/src/utils/html_entities/tests:bench-unittest -- --bench";
println!("Run this bench suite with: {}", command);
}
#[cfg(test)]
mod tests {
use html_entities::*;
use test::Bencher;
#[bench]
fn bench_last(b: &mut Bencher) {
b.iter(|| {
for _i in 0..100 {
decode("comp".as_bytes());
}
});
}
#[bench]
fn bench_first(b: &mut Bencher) {
b.iter(|| {
for _i in 0..100 {
decode("bsim".as_bytes());
}
});
}
} |
TOML | hhvm/hphp/hack/src/utils/html_entities/test/Cargo.toml | # @generated by autocargo
[package]
name = "tests"
version = "0.0.0"
edition = "2021"
[lib]
path = "utf32_to_utf8.rs"
[[bin]]
name = "bench"
path = "bench.rs"
[dev-dependencies]
html_entities = { version = "0.0.0", path = ".." }
pretty_assertions = { version = "1.2", features = ["alloc"], default-features = false } |
Rust | hhvm/hphp/hack/src/utils/html_entities/test/utf32_to_utf8.rs | /**
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree. An additional
* directory.
*
**
*
* THIS FILE IS @generated; DO NOT EDIT IT
* To regenerate this file, run
*
* buck run //hphp/hack/src:generate_full_fidelity
*
**
*
*/
#[cfg(test)]
mod tests {
use html_entities::decoder::*;
use pretty_assertions::assert_eq;
fn helper(x: i64) -> Vec<u8> {
html_entities::utf32_to_utf8_alloc(x)
}
#[test]
fn test() {
assert_eq!(&helper(160)[..], decode(b"nbsp").unwrap());
assert_eq!(&helper(161)[..], decode(b"iexcl").unwrap());
assert_eq!(&helper(162)[..], decode(b"cent").unwrap());
assert_eq!(&helper(163)[..], decode(b"pound").unwrap());
assert_eq!(&helper(164)[..], decode(b"curren").unwrap());
assert_eq!(&helper(165)[..], decode(b"yen").unwrap());
assert_eq!(&helper(166)[..], decode(b"brvbar").unwrap());
assert_eq!(&helper(167)[..], decode(b"sect").unwrap());
assert_eq!(&helper(168)[..], decode(b"uml").unwrap());
assert_eq!(&helper(169)[..], decode(b"copy").unwrap());
assert_eq!(&helper(170)[..], decode(b"ordf").unwrap());
assert_eq!(&helper(171)[..], decode(b"laquo").unwrap());
assert_eq!(&helper(172)[..], decode(b"not").unwrap());
assert_eq!(&helper(173)[..], decode(b"shy").unwrap());
assert_eq!(&helper(174)[..], decode(b"reg").unwrap());
assert_eq!(&helper(175)[..], decode(b"macr").unwrap());
assert_eq!(&helper(176)[..], decode(b"deg").unwrap());
assert_eq!(&helper(177)[..], decode(b"plusmn").unwrap());
assert_eq!(&helper(178)[..], decode(b"sup2").unwrap());
assert_eq!(&helper(179)[..], decode(b"sup3").unwrap());
assert_eq!(&helper(180)[..], decode(b"acute").unwrap());
assert_eq!(&helper(181)[..], decode(b"micro").unwrap());
assert_eq!(&helper(182)[..], decode(b"para").unwrap());
assert_eq!(&helper(183)[..], decode(b"middot").unwrap());
assert_eq!(&helper(184)[..], decode(b"cedil").unwrap());
assert_eq!(&helper(185)[..], decode(b"sup1").unwrap());
assert_eq!(&helper(186)[..], decode(b"ordm").unwrap());
assert_eq!(&helper(187)[..], decode(b"raquo").unwrap());
assert_eq!(&helper(188)[..], decode(b"frac14").unwrap());
assert_eq!(&helper(189)[..], decode(b"frac12").unwrap());
assert_eq!(&helper(190)[..], decode(b"frac34").unwrap());
assert_eq!(&helper(191)[..], decode(b"iquest").unwrap());
assert_eq!(&helper(192)[..], decode(b"Agrave").unwrap());
assert_eq!(&helper(193)[..], decode(b"Aacute").unwrap());
assert_eq!(&helper(194)[..], decode(b"Acirc").unwrap());
assert_eq!(&helper(195)[..], decode(b"Atilde").unwrap());
assert_eq!(&helper(196)[..], decode(b"Auml").unwrap());
assert_eq!(&helper(197)[..], decode(b"Aring").unwrap());
assert_eq!(&helper(198)[..], decode(b"AElig").unwrap());
assert_eq!(&helper(199)[..], decode(b"Ccedil").unwrap());
assert_eq!(&helper(200)[..], decode(b"Egrave").unwrap());
assert_eq!(&helper(201)[..], decode(b"Eacute").unwrap());
assert_eq!(&helper(202)[..], decode(b"Ecirc").unwrap());
assert_eq!(&helper(203)[..], decode(b"Euml").unwrap());
assert_eq!(&helper(204)[..], decode(b"Igrave").unwrap());
assert_eq!(&helper(205)[..], decode(b"Iacute").unwrap());
assert_eq!(&helper(206)[..], decode(b"Icirc").unwrap());
assert_eq!(&helper(207)[..], decode(b"Iuml").unwrap());
assert_eq!(&helper(208)[..], decode(b"ETH").unwrap());
assert_eq!(&helper(209)[..], decode(b"Ntilde").unwrap());
assert_eq!(&helper(210)[..], decode(b"Ograve").unwrap());
assert_eq!(&helper(211)[..], decode(b"Oacute").unwrap());
assert_eq!(&helper(212)[..], decode(b"Ocirc").unwrap());
assert_eq!(&helper(213)[..], decode(b"Otilde").unwrap());
assert_eq!(&helper(214)[..], decode(b"Ouml").unwrap());
assert_eq!(&helper(215)[..], decode(b"times").unwrap());
assert_eq!(&helper(216)[..], decode(b"Oslash").unwrap());
assert_eq!(&helper(217)[..], decode(b"Ugrave").unwrap());
assert_eq!(&helper(218)[..], decode(b"Uacute").unwrap());
assert_eq!(&helper(219)[..], decode(b"Ucirc").unwrap());
assert_eq!(&helper(220)[..], decode(b"Uuml").unwrap());
assert_eq!(&helper(221)[..], decode(b"Yacute").unwrap());
assert_eq!(&helper(222)[..], decode(b"THORN").unwrap());
assert_eq!(&helper(223)[..], decode(b"szlig").unwrap());
assert_eq!(&helper(224)[..], decode(b"agrave").unwrap());
assert_eq!(&helper(225)[..], decode(b"aacute").unwrap());
assert_eq!(&helper(226)[..], decode(b"acirc").unwrap());
assert_eq!(&helper(227)[..], decode(b"atilde").unwrap());
assert_eq!(&helper(228)[..], decode(b"auml").unwrap());
assert_eq!(&helper(229)[..], decode(b"aring").unwrap());
assert_eq!(&helper(230)[..], decode(b"aelig").unwrap());
assert_eq!(&helper(231)[..], decode(b"ccedil").unwrap());
assert_eq!(&helper(232)[..], decode(b"egrave").unwrap());
assert_eq!(&helper(233)[..], decode(b"eacute").unwrap());
assert_eq!(&helper(234)[..], decode(b"ecirc").unwrap());
assert_eq!(&helper(235)[..], decode(b"euml").unwrap());
assert_eq!(&helper(236)[..], decode(b"igrave").unwrap());
assert_eq!(&helper(237)[..], decode(b"iacute").unwrap());
assert_eq!(&helper(238)[..], decode(b"icirc").unwrap());
assert_eq!(&helper(239)[..], decode(b"iuml").unwrap());
assert_eq!(&helper(240)[..], decode(b"eth").unwrap());
assert_eq!(&helper(241)[..], decode(b"ntilde").unwrap());
assert_eq!(&helper(242)[..], decode(b"ograve").unwrap());
assert_eq!(&helper(243)[..], decode(b"oacute").unwrap());
assert_eq!(&helper(244)[..], decode(b"ocirc").unwrap());
assert_eq!(&helper(245)[..], decode(b"otilde").unwrap());
assert_eq!(&helper(246)[..], decode(b"ouml").unwrap());
assert_eq!(&helper(247)[..], decode(b"divide").unwrap());
assert_eq!(&helper(248)[..], decode(b"oslash").unwrap());
assert_eq!(&helper(249)[..], decode(b"ugrave").unwrap());
assert_eq!(&helper(250)[..], decode(b"uacute").unwrap());
assert_eq!(&helper(251)[..], decode(b"ucirc").unwrap());
assert_eq!(&helper(252)[..], decode(b"uuml").unwrap());
assert_eq!(&helper(253)[..], decode(b"yacute").unwrap());
assert_eq!(&helper(254)[..], decode(b"thorn").unwrap());
assert_eq!(&helper(255)[..], decode(b"yuml").unwrap());
assert_eq!(&helper(338)[..], decode(b"OElig").unwrap());
assert_eq!(&helper(339)[..], decode(b"oelig").unwrap());
assert_eq!(&helper(352)[..], decode(b"Scaron").unwrap());
assert_eq!(&helper(353)[..], decode(b"scaron").unwrap());
assert_eq!(&helper(376)[..], decode(b"Yuml").unwrap());
assert_eq!(&helper(402)[..], decode(b"fnof").unwrap());
assert_eq!(&helper(710)[..], decode(b"circ").unwrap());
assert_eq!(&helper(732)[..], decode(b"tilde").unwrap());
assert_eq!(&helper(913)[..], decode(b"Alpha").unwrap());
assert_eq!(&helper(914)[..], decode(b"Beta").unwrap());
assert_eq!(&helper(915)[..], decode(b"Gamma").unwrap());
assert_eq!(&helper(916)[..], decode(b"Delta").unwrap());
assert_eq!(&helper(917)[..], decode(b"Epsilon").unwrap());
assert_eq!(&helper(918)[..], decode(b"Zeta").unwrap());
assert_eq!(&helper(919)[..], decode(b"Eta").unwrap());
assert_eq!(&helper(920)[..], decode(b"Theta").unwrap());
assert_eq!(&helper(921)[..], decode(b"Iota").unwrap());
assert_eq!(&helper(922)[..], decode(b"Kappa").unwrap());
assert_eq!(&helper(923)[..], decode(b"Lambda").unwrap());
assert_eq!(&helper(924)[..], decode(b"Mu").unwrap());
assert_eq!(&helper(925)[..], decode(b"Nu").unwrap());
assert_eq!(&helper(926)[..], decode(b"Xi").unwrap());
assert_eq!(&helper(927)[..], decode(b"Omicron").unwrap());
assert_eq!(&helper(928)[..], decode(b"Pi").unwrap());
assert_eq!(&helper(929)[..], decode(b"Rho").unwrap());
assert_eq!(&helper(931)[..], decode(b"Sigma").unwrap());
assert_eq!(&helper(932)[..], decode(b"Tau").unwrap());
assert_eq!(&helper(933)[..], decode(b"Upsilon").unwrap());
assert_eq!(&helper(934)[..], decode(b"Phi").unwrap());
assert_eq!(&helper(935)[..], decode(b"Chi").unwrap());
assert_eq!(&helper(936)[..], decode(b"Psi").unwrap());
assert_eq!(&helper(937)[..], decode(b"Omega").unwrap());
assert_eq!(&helper(945)[..], decode(b"alpha").unwrap());
assert_eq!(&helper(946)[..], decode(b"beta").unwrap());
assert_eq!(&helper(947)[..], decode(b"gamma").unwrap());
assert_eq!(&helper(948)[..], decode(b"delta").unwrap());
assert_eq!(&helper(949)[..], decode(b"epsilon").unwrap());
assert_eq!(&helper(950)[..], decode(b"zeta").unwrap());
assert_eq!(&helper(951)[..], decode(b"eta").unwrap());
assert_eq!(&helper(952)[..], decode(b"theta").unwrap());
assert_eq!(&helper(953)[..], decode(b"iota").unwrap());
assert_eq!(&helper(954)[..], decode(b"kappa").unwrap());
assert_eq!(&helper(955)[..], decode(b"lambda").unwrap());
assert_eq!(&helper(956)[..], decode(b"mu").unwrap());
assert_eq!(&helper(957)[..], decode(b"nu").unwrap());
assert_eq!(&helper(958)[..], decode(b"xi").unwrap());
assert_eq!(&helper(959)[..], decode(b"omicron").unwrap());
assert_eq!(&helper(960)[..], decode(b"pi").unwrap());
assert_eq!(&helper(961)[..], decode(b"rho").unwrap());
assert_eq!(&helper(962)[..], decode(b"sigmaf").unwrap());
assert_eq!(&helper(963)[..], decode(b"sigma").unwrap());
assert_eq!(&helper(964)[..], decode(b"tau").unwrap());
assert_eq!(&helper(965)[..], decode(b"upsilon").unwrap());
assert_eq!(&helper(966)[..], decode(b"phi").unwrap());
assert_eq!(&helper(967)[..], decode(b"chi").unwrap());
assert_eq!(&helper(968)[..], decode(b"psi").unwrap());
assert_eq!(&helper(969)[..], decode(b"omega").unwrap());
assert_eq!(&helper(977)[..], decode(b"thetasym").unwrap());
assert_eq!(&helper(978)[..], decode(b"upsih").unwrap());
assert_eq!(&helper(982)[..], decode(b"piv").unwrap());
assert_eq!(&helper(8194)[..], decode(b"ensp").unwrap());
assert_eq!(&helper(8195)[..], decode(b"emsp").unwrap());
assert_eq!(&helper(8201)[..], decode(b"thinsp").unwrap());
assert_eq!(&helper(8204)[..], decode(b"zwnj").unwrap());
assert_eq!(&helper(8205)[..], decode(b"zwj").unwrap());
assert_eq!(&helper(8206)[..], decode(b"lrm").unwrap());
assert_eq!(&helper(8207)[..], decode(b"rlm").unwrap());
assert_eq!(&helper(8211)[..], decode(b"ndash").unwrap());
assert_eq!(&helper(8212)[..], decode(b"mdash").unwrap());
assert_eq!(&helper(8216)[..], decode(b"lsquo").unwrap());
assert_eq!(&helper(8217)[..], decode(b"rsquo").unwrap());
assert_eq!(&helper(8218)[..], decode(b"sbquo").unwrap());
assert_eq!(&helper(8220)[..], decode(b"ldquo").unwrap());
assert_eq!(&helper(8221)[..], decode(b"rdquo").unwrap());
assert_eq!(&helper(8222)[..], decode(b"bdquo").unwrap());
assert_eq!(&helper(8224)[..], decode(b"dagger").unwrap());
assert_eq!(&helper(8225)[..], decode(b"Dagger").unwrap());
assert_eq!(&helper(8226)[..], decode(b"bull").unwrap());
assert_eq!(&helper(8230)[..], decode(b"hellip").unwrap());
assert_eq!(&helper(8240)[..], decode(b"permil").unwrap());
assert_eq!(&helper(8242)[..], decode(b"prime").unwrap());
assert_eq!(&helper(8243)[..], decode(b"Prime").unwrap());
assert_eq!(&helper(8249)[..], decode(b"lsaquo").unwrap());
assert_eq!(&helper(8250)[..], decode(b"rsaquo").unwrap());
assert_eq!(&helper(8254)[..], decode(b"oline").unwrap());
assert_eq!(&helper(8260)[..], decode(b"frasl").unwrap());
assert_eq!(&helper(8364)[..], decode(b"euro").unwrap());
assert_eq!(&helper(8465)[..], decode(b"image").unwrap());
assert_eq!(&helper(8472)[..], decode(b"weierp").unwrap());
assert_eq!(&helper(8476)[..], decode(b"real").unwrap());
assert_eq!(&helper(8482)[..], decode(b"trade").unwrap());
assert_eq!(&helper(8501)[..], decode(b"alefsym").unwrap());
assert_eq!(&helper(8592)[..], decode(b"larr").unwrap());
assert_eq!(&helper(8593)[..], decode(b"uarr").unwrap());
assert_eq!(&helper(8594)[..], decode(b"rarr").unwrap());
assert_eq!(&helper(8595)[..], decode(b"darr").unwrap());
assert_eq!(&helper(8596)[..], decode(b"harr").unwrap());
assert_eq!(&helper(8629)[..], decode(b"crarr").unwrap());
assert_eq!(&helper(8656)[..], decode(b"lArr").unwrap());
assert_eq!(&helper(8657)[..], decode(b"uArr").unwrap());
assert_eq!(&helper(8658)[..], decode(b"rArr").unwrap());
assert_eq!(&helper(8659)[..], decode(b"dArr").unwrap());
assert_eq!(&helper(8660)[..], decode(b"hArr").unwrap());
assert_eq!(&helper(8661)[..], decode(b"vArr").unwrap());
assert_eq!(&helper(8666)[..], decode(b"lAarr").unwrap());
assert_eq!(&helper(8667)[..], decode(b"rAarr").unwrap());
assert_eq!(&helper(8669)[..], decode(b"rarrw").unwrap());
assert_eq!(&helper(8704)[..], decode(b"forall").unwrap());
assert_eq!(&helper(8705)[..], decode(b"comp").unwrap());
assert_eq!(&helper(8706)[..], decode(b"part").unwrap());
assert_eq!(&helper(8707)[..], decode(b"exist").unwrap());
assert_eq!(&helper(8708)[..], decode(b"nexist").unwrap());
assert_eq!(&helper(8709)[..], decode(b"empty").unwrap());
assert_eq!(&helper(8711)[..], decode(b"nabla").unwrap());
assert_eq!(&helper(8712)[..], decode(b"isin").unwrap());
assert_eq!(&helper(8713)[..], decode(b"notin").unwrap());
assert_eq!(&helper(8714)[..], decode(b"epsis").unwrap());
assert_eq!(&helper(8715)[..], decode(b"ni").unwrap());
assert_eq!(&helper(8716)[..], decode(b"notni").unwrap());
assert_eq!(&helper(8717)[..], decode(b"bepsi").unwrap());
assert_eq!(&helper(8719)[..], decode(b"prod").unwrap());
assert_eq!(&helper(8720)[..], decode(b"coprod").unwrap());
assert_eq!(&helper(8721)[..], decode(b"sum").unwrap());
assert_eq!(&helper(8722)[..], decode(b"minus").unwrap());
assert_eq!(&helper(8723)[..], decode(b"mnplus").unwrap());
assert_eq!(&helper(8724)[..], decode(b"plusdo").unwrap());
assert_eq!(&helper(8726)[..], decode(b"setmn").unwrap());
assert_eq!(&helper(8727)[..], decode(b"lowast").unwrap());
assert_eq!(&helper(8728)[..], decode(b"compfn").unwrap());
assert_eq!(&helper(8730)[..], decode(b"radic").unwrap());
assert_eq!(&helper(8733)[..], decode(b"prop").unwrap());
assert_eq!(&helper(8734)[..], decode(b"infin").unwrap());
assert_eq!(&helper(8735)[..], decode(b"ang90").unwrap());
assert_eq!(&helper(8736)[..], decode(b"ang").unwrap());
assert_eq!(&helper(8737)[..], decode(b"angmsd").unwrap());
assert_eq!(&helper(8738)[..], decode(b"angsph").unwrap());
assert_eq!(&helper(8739)[..], decode(b"mid").unwrap());
assert_eq!(&helper(8740)[..], decode(b"nmid").unwrap());
assert_eq!(&helper(8741)[..], decode(b"par").unwrap());
assert_eq!(&helper(8742)[..], decode(b"npar").unwrap());
assert_eq!(&helper(8743)[..], decode(b"and").unwrap());
assert_eq!(&helper(8744)[..], decode(b"or").unwrap());
assert_eq!(&helper(8745)[..], decode(b"cap").unwrap());
assert_eq!(&helper(8746)[..], decode(b"cup").unwrap());
assert_eq!(&helper(8747)[..], decode(b"int").unwrap());
assert_eq!(&helper(8750)[..], decode(b"conint").unwrap());
assert_eq!(&helper(8756)[..], decode(b"there4").unwrap());
assert_eq!(&helper(8757)[..], decode(b"becaus").unwrap());
assert_eq!(&helper(8764)[..], decode(b"sim").unwrap());
assert_eq!(&helper(8765)[..], decode(b"bsim").unwrap());
assert_eq!(&helper(8768)[..], decode(b"wreath").unwrap());
assert_eq!(&helper(8769)[..], decode(b"nsim").unwrap());
assert_eq!(&helper(8771)[..], decode(b"sime").unwrap());
assert_eq!(&helper(8772)[..], decode(b"nsime").unwrap());
assert_eq!(&helper(8773)[..], decode(b"cong").unwrap());
assert_eq!(&helper(8775)[..], decode(b"ncong").unwrap());
assert_eq!(&helper(8776)[..], decode(b"asymp").unwrap());
assert_eq!(&helper(8777)[..], decode(b"nap").unwrap());
assert_eq!(&helper(8778)[..], decode(b"ape").unwrap());
assert_eq!(&helper(8780)[..], decode(b"bcong").unwrap());
assert_eq!(&helper(8781)[..], decode(b"asympeq").unwrap());
assert_eq!(&helper(8782)[..], decode(b"bump").unwrap());
assert_eq!(&helper(8783)[..], decode(b"bumpe").unwrap());
assert_eq!(&helper(8800)[..], decode(b"ne").unwrap());
assert_eq!(&helper(8801)[..], decode(b"equiv").unwrap());
assert_eq!(&helper(8804)[..], decode(b"le").unwrap());
assert_eq!(&helper(8805)[..], decode(b"ge").unwrap());
assert_eq!(&helper(8806)[..], decode(b"lE").unwrap());
assert_eq!(&helper(8807)[..], decode(b"gE").unwrap());
assert_eq!(&helper(8808)[..], decode(b"lnE").unwrap());
assert_eq!(&helper(8809)[..], decode(b"gnE").unwrap());
assert_eq!(&helper(8810)[..], decode(b"Lt").unwrap());
assert_eq!(&helper(8811)[..], decode(b"Gt").unwrap());
assert_eq!(&helper(8812)[..], decode(b"twixt").unwrap());
assert_eq!(&helper(8814)[..], decode(b"nlt").unwrap());
assert_eq!(&helper(8815)[..], decode(b"ngt").unwrap());
assert_eq!(&helper(8816)[..], decode(b"nles").unwrap());
assert_eq!(&helper(8817)[..], decode(b"nges").unwrap());
assert_eq!(&helper(8818)[..], decode(b"lsim").unwrap());
assert_eq!(&helper(8819)[..], decode(b"gsim").unwrap());
assert_eq!(&helper(8822)[..], decode(b"lg").unwrap());
assert_eq!(&helper(8823)[..], decode(b"gl").unwrap());
assert_eq!(&helper(8826)[..], decode(b"pr").unwrap());
assert_eq!(&helper(8827)[..], decode(b"sc").unwrap());
assert_eq!(&helper(8828)[..], decode(b"cupre").unwrap());
assert_eq!(&helper(8829)[..], decode(b"sscue").unwrap());
assert_eq!(&helper(8830)[..], decode(b"prsim").unwrap());
assert_eq!(&helper(8831)[..], decode(b"scsim").unwrap());
assert_eq!(&helper(8832)[..], decode(b"npr").unwrap());
assert_eq!(&helper(8833)[..], decode(b"nsc").unwrap());
assert_eq!(&helper(8834)[..], decode(b"sub").unwrap());
assert_eq!(&helper(8835)[..], decode(b"sup").unwrap());
assert_eq!(&helper(8836)[..], decode(b"nsub").unwrap());
assert_eq!(&helper(8837)[..], decode(b"nsup").unwrap());
assert_eq!(&helper(8838)[..], decode(b"sube").unwrap());
assert_eq!(&helper(8839)[..], decode(b"supe").unwrap());
assert_eq!(&helper(8853)[..], decode(b"oplus").unwrap());
assert_eq!(&helper(8855)[..], decode(b"otimes").unwrap());
assert_eq!(&helper(8869)[..], decode(b"perp").unwrap());
assert_eq!(&helper(8901)[..], decode(b"sdot").unwrap());
assert_eq!(&helper(8968)[..], decode(b"lceil").unwrap());
assert_eq!(&helper(8969)[..], decode(b"rceil").unwrap());
assert_eq!(&helper(8970)[..], decode(b"lfloor").unwrap());
assert_eq!(&helper(8971)[..], decode(b"rfloor").unwrap());
assert_eq!(&helper(9001)[..], decode(b"lang").unwrap());
assert_eq!(&helper(9002)[..], decode(b"rang").unwrap());
assert_eq!(&helper(9674)[..], decode(b"loz").unwrap());
assert_eq!(&helper(9824)[..], decode(b"spades").unwrap());
assert_eq!(&helper(9827)[..], decode(b"clubs").unwrap());
assert_eq!(&helper(9829)[..], decode(b"hearts").unwrap());
assert_eq!(&helper(9830)[..], decode(b"diams").unwrap());
}
} |
OCaml | hhvm/hphp/hack/src/utils/http_lite/http_lite.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
(* This is a lightweight library for reading and writing messages in the HTTP
format, with headers and body. So far it only supports the small set of
features needed by the Language Server Protocol. It follows the internet
robustness principle of being as permissive as possible in what it expects,
i.e. no validation beyond what it essentially needs. *)
exception Malformed of string
(** read_headers: reads LF/CRLF-terminated lines until it gets an empty line *)
let read_headers (reader : Buffered_line_reader.t) : string list =
let rec read_internal acc =
try
match Buffered_line_reader.get_next_line reader with
| "" -> acc
| line -> read_internal (line :: acc)
with
| Unix.Unix_error _ -> raise (Malformed "Can't read next header")
in
List.rev (read_internal [])
(** parse_headers_to_lowercase_map: turns list of "Key: Value" string headers
* into a map, with keys normalized to lower-case. HTTP actually allows
* multiple headers of the same key, but we prefer the simplicity of
* returning just a string map so we only take the last header for
* a given key. Note: if any header isn't in Key:Value format, we ignore it. *)
let parse_headers_to_lowercase_map (headers : string list) : string SMap.t =
let rec parse_internal acc = function
| [] -> acc
| line :: rest -> begin
match Str.bounded_split (Str.regexp ":") line 2 with
| [k; v] ->
let (k', v') = (String.lowercase k, String.strip v) in
parse_internal (SMap.add k' v' acc) rest
| _ -> parse_internal acc rest
end
in
parse_internal SMap.empty headers
(** parse_charset: given a Content-Type value like "mime/type; charset=foo"
* it returns the "foo" bit of it, if present.
* https://www.w3.org/Protocols/rfc1341/4_Content-Type.html
* Note: RFC822 allows comments in this string, but we don't handle them.
*)
let parse_charset (header_value : string) : string option =
(* charset_value: if given a param string "charset=b" then it returns Some b *)
let charset_value param =
match Str.bounded_split (Str.regexp "=") param 2 with
| [k; v] when String.equal (String.strip k) "charset" ->
Some (String.strip v)
| _ -> None
in
match Str.split (Str.regexp ";") header_value with
| _mime_type :: params -> List.find_map params ~f:charset_value
| _ -> None
(** read_message_utf8: reads an http-style message "Headers...Body"
* The headers must include at least Content-Length (to know how long is
* the body). If they also include Content-Type, then the charset must be utf-8
* or absent. Errors in these respects produce a Malformed exception.
* The content of all other headers are ignored.
* This function returns an OCaml string, which is a sequence of 8bit bytes,
* so it's up to the caller to handle any unicode characters and their
* encoding. *)
let read_message_utf8 (reader : Buffered_line_reader.t) : string =
let headers = read_headers reader |> parse_headers_to_lowercase_map in
let len =
try SMap.find "content-length" headers |> int_of_string with
| _ -> raise (Malformed "Missing Content-Length")
in
let charset =
try SMap.find "content-type" headers |> parse_charset with
| _ -> None
in
let body = Buffered_line_reader.get_next_bytes reader len in
(match charset with
| Some s when String.equal s "utf-8" -> ()
| None -> ()
| Some _ -> raise (Malformed "Charset not utf-8"));
body
(** write_message: writes "Content-Length:...body" *)
let write_message (outchan : Out_channel.t) (body : string) : unit =
(* Without this, Windows will change the \r\n to \r\r\n *)
Stdlib.set_binary_mode_out outchan true;
Printf.fprintf outchan "Content-Length: %n\r\n" (String.length body);
Printf.fprintf outchan "\r\n";
Printf.fprintf outchan "%s" body;
Out_channel.flush outchan |
OCaml Interface | hhvm/hphp/hack/src/utils/http_lite/http_lite.mli | (*
* Copyright (c) 2019, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
exception Malformed of string
val write_message : out_channel -> string -> unit
val read_headers : Buffered_line_reader.t -> string list
(* The rest of these methods are exposed solely for unit tests. *)
val parse_headers_to_lowercase_map : string list -> string SMap.t
val parse_charset : string -> string option
val read_message_utf8 : Buffered_line_reader.t -> string |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.