language
stringlengths 0
24
| filename
stringlengths 9
214
| code
stringlengths 99
9.93M
|
---|---|---|
C | hhvm/hphp/hack/src/heap/dictionary/stubs/data.c | /**
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*/
#include <stddef.h>
const unsigned char* dictionary_data = NULL;
const unsigned int dictionary_data_len = 0; |
hhvm/hphp/hack/src/heap/dictionary/stubs/dune | (library
(name dictionary_data_stubs)
(wrapped false)
(modules)
(foreign_stubs
(language c)
(names data)
(flags
(:standard)))
(c_library_flags
(:standard))) |
|
hhvm/hphp/hack/src/hhi/dune | (library
(name hhi_get)
(wrapped false)
(modules hhi_get)
(preprocess (pps lwt_ppx ppx_deriving.std ppx_deriving.enum))
)
(library
(name hhi)
(wrapped false)
(libraries
global_config
relative_path
sys_utils)
(modules hhi)
(preprocessor_deps
(source_tree %{project_root}/hack/hhi)
;; This one is critical to work with the `dir` in hphp/hsl/dune
(file %{project_root}/hsl/generated_hhis)
(glob_files_rec ${project_root}/hsl/generated_hhis/**hhi)
;; TODO: that last one could probably be removed
(file %{project_root}/hsl/hsl_generated_hhis.stamp)
)
;; the hhi-dir and hsl-dir must be local paths from the project_root
;; since this is where the ppx.exe will be run from
(preprocess (pps ppx_gen_hhi --
-hhi-dir hack/hhi
-hsl-dir hsl/generated_hhis)
)
) |
|
OCaml | hhvm/hphp/hack/src/hhi/hhi.ml | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(* OCaml handles the value restriction much better than SML. <3 *)
let root = ref None
(* Compiler embeds the hhi contents directly into the source *)
let hhi_contents = [%hhi_contents]
let get_raw_hhi_contents () = hhi_contents
let write_hhi dir (filename, contents) =
let file = Path.(concat dir filename |> to_string) in
(* Make sure the subdirectory exists; this structure is nested *)
Sys_utils.mkdir_p (Filename.dirname file);
Sys_utils.write_file ~file contents
let extract_hhis dir = Array.iter (write_hhi dir) hhi_contents
(* Touch functionality for all hhis below root *)
let touch_root r =
let filter file = Filename.check_suffix file ".hhi" in
Find.iter_files
~filter
[r]
Sys_utils.(try_touch (Touch_existing { follow_symlinks = true }))
let touch () =
match !root with
| Some r -> touch_root r
| _ -> ()
(* Entry points to actually extract the files and set up the hhi path.
*
* We want this to be idempotent so that later code can check if a given file
* came from the hhi unarchive directory or not, to provide better error
* messages. *)
let get_hhi_root ?(force_write = false) () =
match (!root, force_write) with
| (Some r, false) -> r
| (_, true)
| (None, _) ->
let tmpdir = Path.make (Tmp.temp_dir GlobalConfig.tmp_dir "hhi") in
extract_hhis tmpdir;
root := Some tmpdir;
Relative_path.set_path_prefix Relative_path.Hhi tmpdir;
tmpdir
let set_hhi_root_for_unit_test dir =
(* no need to call realpath() on this; we never extract the hhi files for our
* unit tests, so this is just a dummy value and does not need to be a real
* path*)
root := Some dir;
Relative_path.set_path_prefix Relative_path.Hhi dir;
extract_hhis dir
let set_custom_hhi_root dir =
root := Some dir;
Relative_path.set_path_prefix Relative_path.Hhi dir |
OCaml Interface | hhvm/hphp/hack/src/hhi/hhi.mli | (*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val get_hhi_root : ?force_write:bool -> unit -> Path.t
val set_hhi_root_for_unit_test : Path.t -> unit
val set_custom_hhi_root : Path.t -> unit
val get_raw_hhi_contents : unit -> (string * string) array
val touch : unit -> unit |
OCaml | hhvm/hphp/hack/src/hhi/hhi_get.ml | (*
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "flow" directory of this source tree.
*
*)
let with_in_channel filename f =
let ic = open_in_bin filename in
try
let res = f ic in
close_in ic;
res
with
| exn ->
close_in ic;
raise exn
(* Helper to extract the file contents *)
let string_of_file filename =
let string_size = 32759 and buffer_size = 1000 in
with_in_channel filename @@ fun ic ->
let s = Bytes.create string_size in
let b = Buffer.create buffer_size in
let rec iter ic b s =
let nread = input ic s 0 string_size in
if nread > 0 then (
Buffer.add_substring b (Bytes.to_string s) 0 nread;
iter ic b s
)
in
iter ic b s;
Buffer.contents b
(* Normalize the directory by removing a trailing directory separator *)
let normalize_dir dir =
let sep = Filename.dir_sep in
let sep_len = String.length sep in
(* Check the last sep_len characters *)
let trailing = String.sub dir (String.length dir - sep_len) sep_len in
(* Strip the trailing separators if necessary *)
if String.equal trailing sep then
String.sub dir 0 (String.length dir - sep_len)
else
dir
(* Read in all the files below the hhi directory *)
let get_recursive_files root =
let rec loop dirs files =
match dirs with
| [] -> files
| d :: ds ->
let curr_files = Sys.readdir d in
(* Process the files in the next directory *)
let (dirs', files') =
Array.fold_left
begin
fun (d_acc, f_acc) f ->
let f = Filename.concat d f in
if Sys.is_directory f then
(f :: d_acc, f_acc)
else
(d_acc, f :: f_acc)
end
(ds, files)
curr_files
in
(* And then process the rest, eventually we'll exhaust the dirs list *)
loop dirs' files'
in
loop [root] []
let get_hhis_in_dir dir =
(* Chop off the trailing slash, since the full filename from the recursive
* walk will always join paths *)
let dir = normalize_dir dir in
let dir_offset = String.length dir + String.length Filename.dir_sep in
get_recursive_files dir
|> List.fold_left
(fun acc file ->
(* Skip non-hhi in the directory *)
if not (Filename.check_suffix file "hhi") then
acc
else
let contents = string_of_file file in
let file =
String.sub file dir_offset (String.length file - dir_offset)
in
(file, contents) :: acc)
[]
let get_hhis hhi_dir hsl_dir =
let handwritten_hhis = get_hhis_in_dir hhi_dir in
let generated_hsl_hhis =
get_hhis_in_dir hsl_dir
|> List.map (fun (name, contents) -> ("hsl_generated/" ^ name, contents))
in
handwritten_hhis @ generated_hsl_hhis |
OCaml Interface | hhvm/hphp/hack/src/hhi/hhi_get.mli | (*
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "flow" directory of this source tree.
*
*)
val get_hhis_in_dir : string -> (string * string) list
val get_hhis : string -> string -> (string * string) list |
Rust | hhvm/hphp/hack/src/hhi/rust/build.rs | use std::path::Path;
use std::path::PathBuf;
fn main() {
const ROOT_PATH: &str = "../../../../..";
let root_path = Path::new(ROOT_PATH);
let hhi_lib = root_path.join("hphp/hack/hhi");
let hsl = root_path.join("hphp/hsl");
let out_dir = std::env::var("OUT_DIR").unwrap();
let out_dir = Path::new(&out_dir);
let mut contents: Vec<(PathBuf, String)> = Vec::new();
for src in [hhi_lib, hsl] {
for entry in walkdir::WalkDir::new(src) {
let entry = entry.unwrap();
if !entry.file_type().is_file() {
continue;
}
let path = entry.path();
let ext = path.extension().unwrap_or(std::ffi::OsStr::new(""));
if ext != "php" && ext != "hack" {
continue;
}
let mut out: Vec<u8> = Vec::new();
if generate_hhi_lib::run(&mut out, path).is_ok() {
rerun_if_changed(path);
contents.push((path.to_path_buf(), String::from_utf8(out).unwrap()));
}
}
}
gen_hhi_contents_lib::write_hhi_contents_file(&out_dir.join("hhi_contents.rs"), &contents)
.unwrap();
rerun_if_changed("build.rs");
rerun_if_changed("gen_hhi_contents.rs");
}
fn rerun_if_changed<P: AsRef<Path>>(f: P) {
println!("cargo:rerun-if-changed={}", f.as_ref().to_str().unwrap());
} |
TOML | hhvm/hphp/hack/src/hhi/rust/Cargo.toml | # @generated by autocargo
[package]
name = "hhi"
version = "0.0.0"
edition = "2021"
[lib]
path = "hhi.rs"
test = false
doctest = false
[build-dependencies]
clap = { version = "3.2.25", features = ["derive", "env", "regex", "unicode", "wrap_help"] }
gen_hhi_contents_lib = { version = "0.0.0", path = "cargo/gen_hhi_contents_lib" }
generate_hhi_lib = { version = "0.0.0", path = "../../generate_hhi/cargo/generate_hhi_lib" }
walkdir = "2.3" |
Rust | hhvm/hphp/hack/src/hhi/rust/gen_hhi_contents.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::path::Path;
use std::path::PathBuf;
use clap::Parser;
#[derive(Debug, Parser)]
struct Options {
/// The directory containing HHI files.
#[clap(long)]
hhi_dir: PathBuf,
/// The directory containing this stamp is the directory to search for HHIs
/// generated from the HSL. These will be placed in the final hhi directory
/// under a subdirectory named "hsl_generated".
#[clap(long)]
hsl_stamp: PathBuf,
}
fn main() {
// This is the entrypoint when used from buck.
let opts = Options::parse();
let out_dir = std::env::var("OUT").unwrap(); // $OUT implicitly provided by buck
run(opts, &out_dir)
}
fn run(opts: Options, out_dir: &str) {
let hsl_dir = opts.hsl_stamp.parent().unwrap();
let mut hhi_contents = vec![];
hhi_contents.extend(get_hhis_in_dir(&opts.hhi_dir));
hhi_contents.extend(
get_hhis_in_dir(hsl_dir)
.map(|(path, contents)| (PathBuf::from("hsl_generated").join(path), contents)),
);
let out_filename = PathBuf::from(out_dir).join("lib.rs");
gen_hhi_contents_lib::write_hhi_contents_file(&out_filename, &hhi_contents).unwrap();
}
fn get_hhis_in_dir(root: &Path) -> impl Iterator<Item = (PathBuf, String)> + '_ {
walkdir::WalkDir::new(root)
.sort_by_file_name()
.into_iter()
.map(|e| e.unwrap())
.filter(|e| e.file_type().is_file())
.filter(|e| e.path().extension().and_then(|s| s.to_str()) == Some("hhi"))
.map(move |e| {
let contents = std::fs::read_to_string(e.path()).unwrap();
let relative_path = e.path().strip_prefix(root).unwrap().to_owned();
(relative_path, contents)
})
} |
Rust | hhvm/hphp/hack/src/hhi/rust/gen_hhi_contents_lib.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
pub fn write_hhi_contents_file(
out_filename: &Path,
hhi_contents: &[(PathBuf, String)],
) -> std::io::Result<()> {
let mut out_file = std::fs::File::create(out_filename)?;
writeln!(out_file, "pub const HHI_CONTENTS: &[(&str, &str)] = &[")?;
for (path, contents) in hhi_contents {
writeln!(
out_file,
" (\"{}\", r###\"{}\"###),",
path.display(),
contents
)?;
}
writeln!(out_file, "];")?;
Ok(())
} |
Rust | hhvm/hphp/hack/src/hhi/rust/hhi.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::io::Write;
use std::path::Path;
// generated by gen_hhi_contents.rs
#[cfg(fbcode_build)]
mod hhi_contents;
#[cfg(not(fbcode_build))]
mod hhi_contents {
include!(concat!(env!("OUT_DIR"), "/hhi_contents.rs"));
}
pub use hhi_contents::HHI_CONTENTS;
pub fn write_hhi_files(dir: &Path) -> std::io::Result<()> {
for (filename, contents) in HHI_CONTENTS {
let filename = dir.join(filename);
std::fs::create_dir_all(filename.parent().unwrap())?;
let mut file = std::fs::File::create(filename)?;
file.write_all(contents.as_bytes())?;
}
Ok(())
} |
TOML | hhvm/hphp/hack/src/hhi/rust/cargo/gen_hhi_contents_lib/Cargo.toml | # @generated by autocargo
[package]
name = "gen_hhi_contents_lib"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../gen_hhi_contents_lib.rs"
test = false
doctest = false |
Python | hhvm/hphp/hack/src/hh_asdiff/diff.py | #!/usr/bin/env python3
# pyre-strict
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
import difflib
from heapq import heappush, heappushpop
from itertools import zip_longest
from typing import AnyStr, Iterator, List, Sequence, Tuple
def equal_lines(expected: Iterator[AnyStr], actual: Iterator[AnyStr]) -> bool:
"""Compares each line of expected input against the actual input,
where the inputs may differ in their type (i.e., opposite);
returns true if and only if the sequences have the same number of elements
and each corresponding element is equal as defined by operator `==`.
Prefer this function over the error prone `==`, which doesn't work if:
- one line iterator is infinite (e.g., piped input & infinite loop)
- sequences are of different types (e.g., list vs tuple vs ...)
"""
# Use zip_longest to properly handle the case when one iterable
# produces more elements - the other should then produce None, so that
# inputs aren't equal (the longer cannot produce None due to typing)
return all(exp == act for exp, act in zip_longest(expected, actual))
Entry = Tuple[Tuple[int, str], Sequence[str], Sequence[str]]
class UnifiedDiffRanker:
"""Ranks the diffs according to how many lines need to be added or deleted,
counting either with equal weight. Calling this object registers a diff,
and iterating over it gives up to `limit` smallest diffs, typed as:
((num_lines_changed, key), lines1, lines2): Entry
where `key` should be unique, and `lines1`/`lines2` correspond
to the entire chunk of input being diffed, and `num_lines_changed` to the
number of extra/missing lines (denoted by `+`/`-` in diff output).
Example:
ranker = UnifiedDiffRanker(2)
ranker("file1", ["line1\n", "line2\n"], ["1\n", "2\n"])
ranker("file2", [], ["1\n", "2\n", "3\n", "4\n", "5\n"])
ranker("file3", ["a\n", "b\n"], ["a\n", "c\n"])
for rank, ((filename, _), _, _) in enumerate(ranker, from=1):
print(f"Rank #{rank}: {filename}")
Output:
Rank #1: file3 # because all 1 extra & 1 missing lines (b/c)
Rank #2: file1 # because 4 lines differ
Note: calling this function-like object with the same key multiple times
may incur significantly slower performance (as lines will be compared).
"""
# Max-heap storing K smallest entries (or less than <K calls on self made)
# Invariant: after N <= limit calls on self, contains keys for N smallest
_heap: List[Entry]
def __init__(self, limit: int) -> None:
self._heap = []
self._limit = limit
def __call__(self, key: str, lines1: Sequence[str], lines2: Sequence[str]) -> None:
if self._limit == 0:
return # skip expensive diffing if limit is 0
diff_size = 0
for d in difflib.unified_diff(lines1, lines2, n=0):
if len(d) >= 1 and (
(d[0] == "+" and not d.startswith("+++"))
or (d[0] == "-" and not d.startswith("---"))
):
diff_size += 1
# Ensure: old max is replaced with a new diff in top `limit` smallest
op = heappushpop if len(self._heap) >= self._limit else heappush
op(self._heap, ((-diff_size, key), lines1, lines2))
def __iter__(self) -> Iterator[Entry]:
# By induction, the heap holds (up to) `limit` smallest diffs
for e in sorted(self._heap, key=lambda e: (-(e[0][0]), e[0][1])):
(neg_diff_size, path), lines1, lines2 = e
yield (-neg_diff_size, path), lines1, lines2 |
Python | hhvm/hphp/hack/src/hh_asdiff/hh_asdiff.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
import argparse
import difflib
import itertools
import logging
import os
import sys
from typing import AnyStr, Dict, Iterator, List, Optional, Sequence, Tuple
from hphp.hack.src.hh_asdiff import diff, parsing
log = logging.getLogger("hack.asdiff")
def parse_args(argv: List[str]) -> argparse.Namespace:
program = os.path.basename(argv[0])
ap = argparse.ArgumentParser(
program,
description="""\
A tool to compare and diff/pretty-print bytecode differences based on at least
one precomputed (expected) output. It is powerful enough to compare multiple
input files (e.g., recursively the entire codebase written in Hack) without
storing the actual output on disk (for convenience & efficiency), as well as
simple ranking/filtering (e.g., on which files HHAS differs least/most lines).
""",
epilog=f"""\
Examples:
# Compare two compilation results stored in an expected and an actual file
{program} my/hack/test_dir/compiled1.hhas /tmp/changed-bc/compiled1.hhas
# Compare compilation output of multiple files on-the-fly
find all-hack -name '*.php' | hh_single_compile --input-file-list /dev/stdin\
| {program} all-hack.expected.hhas # stops on first mismatch unless --all
""",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
ap.add_argument(
"--all",
action="store_true",
help="do not stop after a single HHAS mismatches (consider with --no-diff)",
)
ap.add_argument(
"--no-diff",
dest="diff",
action="store_false",
help="do not show any diffs, just fail if HHAS mismatches",
)
ap.add_argument(
"--diff-smallest",
metavar="K",
type=int,
default=0,
help="if positive, show smallest K diffs (implies --all)"
" (note: K inputs are kept in memory, and O(log K) time factor)",
)
ap.add_argument(
"-s",
"--report-identical-files",
action="store_true",
help="report when two HHASes are identical",
)
ap.add_argument("--report-summary", action="store_true", help="report")
ap.add_argument(
"expected_file", type=argparse.FileType("r+b"), help="Expected HHAS"
)
ap.add_argument(
"actual_file",
nargs="?",
type=argparse.FileType("r+b"),
default=sys.stdin,
help="Actual HHAS; a .hhas file or stdin (piped from hh_single_compile)",
)
opts = ap.parse_args(argv[1:])
return opts
HhasResult = Tuple[parsing.MarkedFilename, Sequence[str]]
class DiffHandler:
def __init__(self, opts: argparse.Namespace):
self._opts = opts
self.call_count = 0
self.ranker = diff.UnifiedDiffRanker(opts.diff_smallest)
def __call__(self, exp: HhasResult, act: HhasResult):
exp_filename, exp_lines = exp
act_filename, act_lines = act
def unified_diff():
return difflib.unified_diff(
exp_lines, act_lines, fromfile=exp_filename, tofile=act_filename
)
if self._opts.diff_smallest:
self.ranker(exp_filename or "", exp_lines, act_lines)
elif self._opts.diff:
sys.stdout.writelines(unified_diff())
self.call_count += 1
def __str__(self) -> str:
"""Prints the info for the current smallest diffs"""
ret = f"diffs handled: {self.call_count}\n"
if self._opts.diff_smallest:
smallest = "\n".join(
f"size={size} @ {path}" for (size, path), _, _ in iter(self.ranker)
)
ret += f"K-th smallest:\n{smallest}\n\n"
return ret
def report_summary(exp_file: Iterator[AnyStr], act_file: Iterator[AnyStr]):
act: Dict[Optional[str], Sequence[str]] = {}
exp: Dict[Optional[str], Sequence[str]] = {}
for fn, content in parsing.split_lines(exp_file):
if fn is None:
log.error("Found file without name")
exp[fn] = content
act = {}
for fn, content in parsing.split_lines(act_file):
if fn is None:
log.error("Found file without name")
act[fn] = content
exp_keys = set(exp.keys())
act_keys = set(act.keys())
# pyre-fixme[6]: Expected `Iterable[Variable[_LT (bound to _SupportsLessThan)]]`
# for 1st param but got `Set[Optional[str]]`.
all_keys = sorted(exp_keys.union(act_keys))
for k in all_keys:
from_exp = exp.get(k)
from_act = act.get(k)
if from_exp is None:
print("|NotInExp|", k)
elif from_act is None:
print("|NotInAct|", k)
elif (
next(filter(lambda l: "#### NotImpl:" in l, iter(from_act)), None)
is not None
):
print("|PrintNotImplA|", k)
elif (
next(filter(lambda l: "#### NotImpl:" in l, iter(from_exp)), None)
is not None
):
print("|PrintNotImplE|", k)
elif diff.equal_lines(iter(from_exp), iter(from_act)):
print("|Identical|", k)
else:
print("|Mismatch|", k)
def main(args: List[str]) -> int:
exit_code = 0
opts = parse_args(sys.argv)
compare_all = opts.all or opts.diff_smallest
try:
if opts.report_summary:
report_summary(opts.expected_file, opts.actual_file)
return exit_code
compare_count = 0
diff_handler = DiffHandler(opts)
for exp, act in itertools.zip_longest(
parsing.split_lines(opts.expected_file),
parsing.split_lines(opts.actual_file),
fillvalue=None,
):
compare_count += 1
if None in (exp, act):
exit_code |= 1 << 1
source, filename = (
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
("expected", act[0])
if exp is None
else ("actual", exp[0])
)
log.error(f"missing filename in {source} HHAS: {filename}")
if not compare_all:
break
continue
# pyre-fixme[23]: Unable to unpack `Optional[Tuple[Optional[str],
# Sequence[str]]]` into 2 values.
exp_filename, exp_lines = exp
# pyre-fixme[23]: Unable to unpack `Optional[Tuple[Optional[str],
# Sequence[str]]]` into 2 values.
act_filename, act_lines = act
if exp_filename != act_filename:
exit_code |= 1 << 2
log.error(
"filename mismatch:\n"
f"expected: {exp_filename}\n"
f" actual: {act_filename}\n"
"Did you ensure stable order in `hh_(single_)compile "
"--input-file-list`?"
)
if not compare_all:
break
continue
if diff.equal_lines(exp_lines, act_lines):
if opts.report_identical_files:
print("identical:", exp_filename)
continue
exit_code |= 1 << 3
# pyre-fixme[6]: For 1st argument expected `Tuple[Optional[str],
# Sequence[str]]` but got `Optional[Tuple[Optional[str], Sequence[str]]]`.
# pyre-fixme[6]: For 2nd argument expected `Tuple[Optional[str],
# Sequence[str]]` but got `Optional[Tuple[Optional[str], Sequence[str]]]`.
diff_handler(exp, act)
if not compare_all:
break
print("files checked:", compare_count)
print(str(diff_handler), end="")
if opts.diff:
for rank, ((_, exp_file), exp_lines, act_lines) in enumerate(
diff_handler.ranker, start=1
):
print(f"\n== Rank #{rank} diff:", exp_file)
gen = difflib.unified_diff(exp_lines, act_lines)
next(gen) # skip: +++ file1
next(gen) # skip: --- file2
sys.stdout.writelines(gen)
finally:
opts.expected_file.close()
opts.actual_file.close()
return exit_code
if __name__ == "__main__":
logging.basicConfig(format="%(levelname)s: %(message)s")
log.setLevel(logging.WARN)
sys.exit(main(sys.argv)) |
Python | hhvm/hphp/hack/src/hh_asdiff/parsing.py | #!/usr/bin/python3
# pyre-strict
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
import re
import typing as ty
from typing import AnyStr, Callable, Iterator, List, Optional, Tuple
def canon(string_like: AnyStr) -> str:
"""Canonicalizes a line of HHAS output by escaping non-UTF8 chars"""
if isinstance(string_like, bytes):
return string_like.decode("utf-8", "backslashreplace")
assert isinstance(string_like, str)
return string_like
# Note: must be consistent with hhbc_hhas.{ml,rs} files
_MARKER_BEGIN_RE: ty.Pattern[str] = re.compile(r"#(.*)starts here")
_MARKER_END_RE: ty.Pattern[str] = re.compile(r"#(.*)ends here")
MarkedFilename = Optional[str]
def split_lines(
linegen: Iterator[AnyStr], on_ignore: Callable[[str], None] = lambda s: None
) -> Iterator[Tuple[MarkedFilename, ty.Sequence[str]]]:
"""Splits HHAS output (bytecode) that may contain starts/ends-here markers
into pairs (marked_filename, hhas_lines) by only keeping one file in memory,
useful splitting potentially huge output of "hh_(single_)compile DIR".
For extra sanity checking, an optional list to which ignored lines
(i.e., those outside the marked section for each file) can be passed.
Example:
ignored_lines = []
single_hhas_gen = split_lines(hh_compile_output_lines):
for filename, hhas in single_hhas_gen:
print("=== FILENAME: {filename} ===")
sys.stdout.writelines(hhas)
assert not ignored_lines # make sure no lines is silently ignored
"""
filename: MarkedFilename
hhas_lines: List[str] = []
hhas_active = False
for line0 in linegen:
line = canon(line0)
beg_match = _MARKER_BEGIN_RE.match(line)
if beg_match:
filename = beg_match.group(1).strip() or None # None if empty
hhas_lines = [] # avoid .clear()) so caller doesn't have to copy
hhas_active = True
elif _MARKER_END_RE.match(line):
hhas_active = False
yield filename, hhas_lines
hhas_lines = []
elif hhas_active:
hhas_lines.append(line)
elif line:
on_ignore(line)
# Include HHAS segments without an end marked in ignored list
for line in hhas_lines:
if line:
on_ignore(line) |
TOML | hhvm/hphp/hack/src/hh_codegen/Cargo.toml | # @generated by autocargo
[package]
name = "hh_codegen"
version = "0.0.0"
edition = "2021"
[[bin]]
name = "hh_codegen"
path = "hh_codegen.rs"
[dependencies]
anyhow = "1.0.71"
clap = { version = "4.3.5", features = ["derive", "env", "string", "unicode", "wrap_help"] }
hash = { version = "0.0.0", path = "../utils/hash" }
proc-macro2 = { version = "1.0.64", features = ["span-locations"] }
quote = "1.0.29"
signed_source = { version = "0.0.0", path = "../utils/rust/signed_source" }
syn = { version = "1.0.109", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] }
synstructure = "0.12" |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_asts.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
pub use crate::common::args::CommonArgs as Args;
pub fn run(args: &Args) -> anyhow::Result<Vec<(std::path::PathBuf, String)>> {
let files = crate::common::parse_all(&args.input)?;
let defs: Vec<(syn::Ident, syn::Item)> = files
.into_iter()
.flat_map(|(filename, items)| {
// assuming file name is the module name
let mod_name = filename.file_stem().and_then(|stem| stem.to_str()).unwrap();
let mod_name = quote::format_ident!("{}", mod_name);
items.into_iter().map(move |item| (mod_name.clone(), item))
})
.collect();
let results = vec![("ast.rs", gen_ast(&defs)), ("nast.rs", gen_ast(&defs))];
Ok(results
.iter()
.map(|(filename, source)| (args.output.join(filename), source.to_string()))
.collect())
}
// Could be written as a function if we wrote a trait to access `ident` and
// `generics` on `syn::Item{Struct,Enum,Type}`. Duck typing is easier.
macro_rules! alias {
($module:ident, $ast:ident) => {
if $ast.generics.params.is_empty() {
None
} else {
let ty = &$ast.ident;
let (_, ty_generics, _) = $ast.generics.split_for_impl();
Some(quote!(pub type #ty = #$module :: #ty #ty_generics;))
}
}
}
fn gen_ast(defs: &[(syn::Ident, syn::Item)]) -> TokenStream {
let types = defs.iter().filter_map(|(module, item)| match item {
syn::Item::Struct(item) => alias!(module, item),
syn::Item::Enum(item) => alias!(module, item),
syn::Item::Type(item) => alias!(module, item),
_ => None,
});
quote! {
use crate::{ast_defs, aast_defs};
pub use ast_defs::*;
pub use aast_defs::*;
/// Expressions have no type annotation.
type Ex = ();
/// Toplevel definitions and methods have no "environment" annotation.
type En = ();
#(#types)*
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/hh_codegen.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod common;
mod gen_asts;
mod gen_by_ref_decl_visitor;
mod gen_elab_transform;
mod gen_enum_helper;
mod gen_visitor;
mod quote_helper;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use anyhow::Context;
use anyhow::Result;
use clap::Parser;
use common::*;
#[derive(Debug, Parser)]
struct Opts {
/// Command to regenerate the output. This text will be included in generated file headers.
#[clap(long)]
regen_cmd: Option<String>,
/// Path to a Rust formatter binary, which will be used on the generated output.
#[clap(long)]
rustfmt: Option<String>,
/// The codegen task to run.
#[clap(subcommand)]
subcommand: Subcommand,
}
#[derive(Debug, Parser)]
enum Subcommand {
/// Generate convenient factory functions and predicates for enum types.
EnumHelpers(gen_enum_helper::Args),
/// Generate Visitor and VisitorMut traits.
Visitor(gen_visitor::Args),
/// Generate AST and NAST modules containing instantiated (non-generic) AAST types.
Asts(gen_asts::Args),
/// Generate a Visitor trait for by-reference types.
ByRefDeclVisitor(gen_by_ref_decl_visitor::Args),
/// Generate a transformer and Pass trait for AST elaboration.
ElabTransform(gen_elab_transform::Args),
}
fn main() -> Result<()> {
let opts = Opts::parse();
let formatter = opts.rustfmt.as_deref();
eprintln!("Rust formatter set to {:?}", formatter);
let regencmd = opts.regen_cmd.as_deref();
eprintln!("Re-generate cmd set to {:?}", regencmd);
let files = match opts.subcommand {
Subcommand::EnumHelpers(args) => gen_enum_helper::run(&args)?,
Subcommand::Visitor(args) => gen_visitor::run(&args)?,
Subcommand::Asts(args) => gen_asts::run(&args)?,
Subcommand::ByRefDeclVisitor(args) => gen_by_ref_decl_visitor::run(&args)?,
Subcommand::ElabTransform(args) => gen_elab_transform::run(&args)?,
};
let output_files = files
.into_iter()
.map(|f| write_file(f, regencmd))
.collect::<Result<Vec<_>>>()?;
if let Err(e) = output_files
.iter()
.map(|o| format(formatter, o))
.collect::<Result<Vec<_>>>()
{
eprintln!("formatter failed:\n {:#?}", e);
}
if let Err(e) = output_files
.iter()
.map(|o| sign(o))
.collect::<Result<Vec<_>>>()
{
eprintln!("signer failed:\n {:#?}", e);
}
Ok(())
}
fn write_file(output: (PathBuf, String), regencmd: Option<&str>) -> Result<PathBuf> {
let mut file = File::create(&output.0)?;
let content = insert_header(&output.1[..], regencmd.unwrap_or(""))?;
file.write_all(content.as_bytes())?;
Ok(output.0)
}
fn format(formatter: Option<&str>, file: &Path) -> Result<()> {
match formatter {
Some(formatter) => {
let output = Command::new(formatter).arg(file).output()?;
if !output.status.success() {
eprintln!("formatter failed:\n {:#?}", output);
}
}
_ => eprintln!("Skip: formatter not found"),
}
Ok(())
}
fn sign(file: &Path) -> Result<()> {
let contents = std::fs::read(file).context("Failed to read file for signing")?;
let new_contents = signed_source::sign_file(&contents)?;
std::fs::write(file, new_contents).context("Failed to write signed file")?;
Ok(())
} |
Rust | hhvm/hphp/hack/src/hh_codegen/quote_helper.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
use quote::ToTokens;
pub fn join(mut tokens: impl Iterator<Item = impl ToTokens>, sep: impl ToTokens) -> TokenStream {
let mut acc: Vec<TokenStream> = vec![];
if let Some(t) = tokens.next() {
acc.push(quote! {#t});
}
for t in tokens {
acc.push(quote! {#sep #t});
}
quote! { #(#acc)* }
}
pub fn with_paren(tokens: impl ToTokens) -> TokenStream {
quote! {( #tokens )}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/common/args.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::path::PathBuf;
#[derive(Debug, clap::Parser)]
pub struct CommonArgs {
/// Rust files containing the types for which codegen will be performed.
/// All types reachable from the given root type must be defined in one of
/// the files provided as `--input`.
#[clap(short, long)]
pub input: Vec<PathBuf>,
/// The root type of the AST. All types reachable from this type will be
/// visited by the generated visitor.
#[clap(short, long)]
pub root: String,
/// The directory to which generated files will be written.
#[clap(short, long)]
pub output: PathBuf,
} |
Rust | hhvm/hphp/hack/src/hh_codegen/common/by_ref_node.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
pub fn node() -> TokenStream {
quote! {
use super::Visitor;
pub trait Node<'a> {
fn accept(&'a self, v: &mut dyn Visitor<'a>) {
self.recurse(v)
}
fn recurse(&'a self, _v: &mut dyn Visitor<'a>) {}
}
}
}
pub fn node_impl() -> TokenStream {
quote! {
use super::{node::Node, visitor::Visitor};
impl<'a> Node<'a> for () {}
impl<'a> Node<'a> for bool {}
impl<'a> Node<'a> for isize {}
impl<'a> Node<'a> for str {}
impl<'a> Node<'a> for bstr::BStr {}
impl<'a> Node<'a> for crate::file_info::Mode {}
impl<'a> Node<'a> for crate::local_id::LocalId<'a> {}
impl<'a> Node<'a> for crate::method_flags::MethodFlags {}
impl<'a> Node<'a> for crate::pos::Pos<'a> {}
impl<'a> Node<'a> for crate::prop_flags::PropFlags {}
impl<'a> Node<'a> for crate::tany_sentinel::TanySentinel {}
impl<'a> Node<'a> for crate::typing_defs_flags::FunParamFlags {}
impl<'a> Node<'a> for crate::typing_defs_flags::FunTypeFlags {}
impl<'a, T: Node<'a> + ?Sized> Node<'a> for &'a T {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
(*self).accept(v)
}
}
impl<'a, T: Node<'a>> Node<'a> for [T] {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
for x in self {
x.accept(v)
}
}
}
impl<'a, T: Node<'a>> Node<'a> for Option<T> {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
match self {
Some(t) => t.accept(v),
_ => {}
}
}
}
impl<'a, T: Node<'a>> Node<'a> for crate::lazy::Lazy<T> {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
self.0.accept(v)
}
}
impl<'a, T: Node<'a>> Node<'a> for arena_collections::List<'a, T> {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
for elt in self.iter() {
elt.accept(v);
}
}
}
impl<'a, K: Node<'a>, V: Node<'a>> Node<'a> for arena_collections::map::Map<'a, K, V> {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
for (key, value) in self.iter() {
key.accept(v);
value.accept(v);
}
}
}
impl<'a, K: Node<'a>, V: Node<'a>> Node<'a> for arena_collections::SortedAssocList<'a, K, V> {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
for (key, value) in self.iter() {
key.accept(v);
value.accept(v);
}
}
}
impl<'a, T: Node<'a>> Node<'a> for arena_collections::SortedSet<'a, T> {
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
for elt in self.iter() {
elt.accept(v);
}
}
}
impl<'a, T1, T2> Node<'a> for (T1, T2)
where
T1: Node<'a>,
T2: Node<'a>,
{
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
self.0.accept(v);
self.1.accept(v);
}
}
impl<'a, T1, T2, T3> Node<'a> for (T1, T2, T3)
where
T1: Node<'a>,
T2: Node<'a>,
T3: Node<'a>,
{
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
self.0.accept(v);
self.1.accept(v);
self.2.accept(v);
}
}
impl<'a, T1, T2, T3, T4> Node<'a> for (T1, T2, T3, T4)
where
T1: Node<'a>,
T2: Node<'a>,
T3: Node<'a>,
T4: Node<'a>,
{
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
self.0.accept(v);
self.1.accept(v);
self.2.accept(v);
self.3.accept(v);
}
}
impl<'a, T1, T2, T3, T4, T5> Node<'a> for (T1, T2, T3, T4, T5)
where
T1: Node<'a>,
T2: Node<'a>,
T3: Node<'a>,
T4: Node<'a>,
T5: Node<'a>,
{
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
self.0.accept(v);
self.1.accept(v);
self.2.accept(v);
self.3.accept(v);
self.4.accept(v);
}
}
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/common/context.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::VecDeque;
use std::path::Path;
use anyhow::anyhow;
use anyhow::Result;
use hash::IndexMap;
use hash::IndexSet;
use synstructure::Structure;
use crate::common::syn_helpers;
/// A simplified version of `crate::gen_visitor::context::Context`. Contains all
/// of the ASTs for `syn::Item` definitions provided in the constructor, except
/// those whose types are not reachable from the given root type.
pub struct Context {
defs: IndexMap<String, syn::DeriveInput>,
mods: IndexSet<String>,
}
impl Context {
/// Construct a `Context` containing the ASTs of all type definitions
/// reachable from the `root` type. Each type must have a unique name (even
/// if the types are declared in different modules).
pub fn new(files: &[(&Path, Vec<syn::Item>)], root: &str) -> Result<Self> {
Self::with_extern_files(files, &[], root)
}
/// Construct a `Context` containing the ASTs of all type definitions
/// reachable from the `root` type. Each type must have a unique name (even
/// if the types are declared in different modules).
///
/// `extern_files` is used to provide the definitions of types which are
/// declared in `extern_files` and re-exported in `files` (e.g., when using
/// `oxidized_by_ref` for `files`, use `oxidized` for `extern_files`, since
/// `oxidized_by_ref` re-exports types defined in `oxidized`).
pub fn with_extern_files(
files: &[(&Path, Vec<syn::Item>)],
extern_files: &[(&Path, Vec<syn::Item>)],
root: &str,
) -> Result<Self> {
let mut defs = IndexMap::default();
let mut mods = IndexSet::default();
for (filename, items) in files {
eprintln!("Processing {:?}", filename);
for item in items.iter() {
if let Ok(name) = syn_helpers::get_ty_def_name(item) {
if defs.contains_key(&name) {
return Err(anyhow!("Type {} already exists, file {:?}", name, filename));
}
defs.insert(name, item);
}
}
// assuming file name is the module name
mods.insert(
filename
.file_stem()
.and_then(|stem| stem.to_str())
.unwrap()
.into(),
);
}
// The "extern" files provide the definitions of types which were
// imported from the oxidized crate to the oxidized_by_ref crate via
// an extern_types.txt file.
for (filename, items) in extern_files {
eprintln!("Processing extern file {:?}", filename);
for item in items.iter() {
if let Ok(name) = syn_helpers::get_ty_def_name(item) {
// Don't overwrite a definition if one is already there--we
// only need to fill in the ones which are missing (because
// they were re-exported from oxidized).
defs.entry(name).or_insert(item);
}
}
}
let reachable = Self::get_all_tys(&defs, root)?;
let defs = defs
.into_iter()
.filter(|(ty_name, _)| reachable.contains(ty_name.as_str()))
.filter_map(|(ty_name, item)| {
use syn::Item::*;
match item {
Struct(item_struct) => Some((ty_name, item_struct.clone().into())),
Enum(item_enum) => Some((ty_name, item_enum.clone().into())),
_ => None,
}
})
.collect();
Ok(Self { defs, mods })
}
/// Return all the names of modules provided in the `files` argument to
/// `Self::new`. Assumes that each file has the same name as the module it
/// declares (i.e., no mod.rs files).
pub fn modules(&self) -> impl Iterator<Item = &str> {
self.mods.iter().map(|s| s.as_ref())
}
pub fn types(&self) -> impl Iterator<Item = &syn::DeriveInput> {
self.defs.values()
}
pub fn type_structures(&self) -> impl Iterator<Item = Structure<'_>> {
self.types().map(Structure::new)
}
fn get_all_tys(defs: &IndexMap<String, &syn::Item>, root: &str) -> Result<IndexSet<String>> {
let defined_types = defs.keys().map(|s| s.as_str()).collect();
let mut visited = IndexSet::<String>::default();
let mut q = VecDeque::new();
q.push_back(root.into());
while let Some(ty) = q.pop_front() {
let item = defs
.get(&ty)
.ok_or_else(|| anyhow!("Type {} not found", ty))?;
visited.insert(ty);
let deps = syn_helpers::get_dep_tys(&defined_types, item)?;
for d in deps.into_iter() {
if !visited.contains(&d) {
q.push_back(d);
}
}
}
Ok(visited)
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/common/mod.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
pub mod args;
pub mod by_ref_node;
pub mod context;
use std::fmt::Write;
use std::path::Path;
use std::path::PathBuf;
pub use anyhow::Result;
pub fn parse_all(files: &[PathBuf]) -> Result<Vec<(&Path, Vec<syn::Item>)>> {
files
.iter()
.map(|filename| -> Result<(&Path, Vec<syn::Item>)> {
let src = std::fs::read_to_string(filename)?;
let file = syn::parse_file(&src)?;
Ok((filename, file.items.into_iter().collect()))
})
.collect()
}
pub fn to_snake(s: &str) -> String {
let mut r = String::new();
let chars: Vec<char> = s.chars().collect();
for i in 0..chars.len() {
if chars[i].is_ascii_uppercase() {
if i != 0
&& chars[i - 1].is_ascii_lowercase()
&& (i + 1 == chars.len() || chars[i + 1].is_ascii_lowercase())
{
r.push('_');
}
r.push(chars[i].to_ascii_lowercase());
} else {
r.push(chars[i])
}
}
r
}
pub fn insert_header(s: &str, command: &str) -> Result<String> {
let mut content = String::new();
#[allow(clippy::write_literal)]
write!(
&mut content,
"
// Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the \"hack\" directory of this source tree.
//
// {}
//
// To regenerate this file, run:
// {}
{}
",
signed_source::SIGNING_TOKEN,
command,
s
)?;
Ok(content)
}
pub mod gen_helpers {
use proc_macro2::TokenStream;
use quote::format_ident;
use quote::quote;
pub fn gen_module_uses(ms: impl Iterator<Item = impl AsRef<str>>) -> TokenStream {
let mods = ms.map(|m| format_ident!("{}", m.as_ref()));
quote! {
use crate::{*, #(#mods::{self, *},)*};
}
}
}
pub mod syn_helpers {
use std::collections::HashSet;
use anyhow::anyhow;
use anyhow::Result;
use syn::*;
pub fn get_ty_def_name(i: &Item) -> Result<String> {
use Item::*;
match i {
Enum(ItemEnum { ident, .. })
| Struct(ItemStruct { ident, .. })
| Type(ItemType { ident, .. }) => Ok(ident.to_string()),
_ => Err(anyhow!("Not supported {:?}", i)),
}
}
pub fn get_dep_tys(defined_types: &HashSet<&str>, i: &Item) -> Result<Vec<String>> {
use Item::*;
match i {
Enum(ItemEnum { variants, .. }) => Ok(variants
.iter()
.fold(HashSet::<String>::new(), |mut a, v| {
for ty in LeafTyCollector::on_fields(Some(defined_types), &v.fields) {
a.insert(ty);
}
a
})
.into_iter()
.collect()),
Type(ItemType { ty, .. }) => {
Ok(LeafTyCollector::on_type(Some(defined_types), ty.as_ref()).collect())
}
Struct(ItemStruct { fields, .. }) => {
Ok(LeafTyCollector::on_fields(Some(defined_types), fields).collect())
}
_ => Err(anyhow!("Not supported {:?}", i)),
}
}
struct LeafTyCollector {
discovered_types: HashSet<String>,
}
impl LeafTyCollector {
pub fn new() -> Self {
Self {
discovered_types: HashSet::new(),
}
}
pub fn on_type<'a>(
filter: Option<&'a HashSet<&'a str>>,
ty: &Type,
) -> impl Iterator<Item = String> + 'a {
let mut collector = Self::new();
visit::visit_type(&mut collector, ty);
collector
.discovered_types
.into_iter()
.filter(move |s| filter.map_or(true, |f| f.contains(s.as_str())))
}
pub fn on_fields<'a>(
filter: Option<&'a HashSet<&'a str>>,
fields: &Fields,
) -> impl Iterator<Item = String> + 'a {
let mut collector = Self::new();
visit::visit_fields(&mut collector, fields);
collector
.discovered_types
.into_iter()
.filter(move |s| filter.map_or(true, |f| f.contains(s.as_str())))
}
}
impl<'ast> visit::Visit<'ast> for LeafTyCollector {
fn visit_path_segment(&mut self, node: &'ast PathSegment) {
let ty = node.ident.to_string();
self.discovered_types.insert(ty);
visit::visit_path_segment(self, node);
}
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_by_ref_decl_visitor/args.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::path::PathBuf;
#[derive(Debug, clap::Parser)]
pub struct Args {
#[clap(flatten)]
pub common: crate::common::args::CommonArgs,
/// Additional Rust files containing types for which codegen will be performed.
/// All types reachable from the given root type must be defined in on of
/// the files provided as `--input` or `--extern-input`.
#[clap(short, long)]
pub extern_input: Vec<PathBuf>,
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_by_ref_decl_visitor/mod.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod args;
mod node_impl_generator;
mod visitor_trait_generator;
pub use args::Args;
use crate::common::context::Context;
pub fn run(args: &Args) -> anyhow::Result<Vec<(std::path::PathBuf, String)>> {
let files = crate::common::parse_all(&args.common.input)?;
let extern_files = crate::common::parse_all(&args.extern_input)?;
let ctx =
Context::with_extern_files(files.as_slice(), extern_files.as_slice(), &args.common.root)?;
let results = vec![
("node.rs", crate::common::by_ref_node::node()),
("node_impl.rs", crate::common::by_ref_node::node_impl()),
("node_impl_gen.rs", node_impl_generator::gen(&ctx)),
("visitor.rs", visitor_trait_generator::gen(&ctx)),
];
Ok(results
.iter()
.map(|(filename, source)| (args.common.output.join(filename), source.to_string()))
.collect())
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_by_ref_decl_visitor/node_impl_generator.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
use super::visitor_trait_generator;
use super::Context;
use crate::common::gen_helpers;
pub fn gen(ctx: &Context) -> TokenStream {
let uses = gen_helpers::gen_module_uses(ctx.modules());
let impls = ctx.type_structures().map(gen_node_impl).collect::<Vec<_>>();
quote! {
#![allow(unused_braces)]
#![allow(unused_imports)]
#![allow(unused_variables)]
use super::node::Node;
use super::visitor::Visitor;
#uses
#(#impls)*
}
}
fn gen_node_impl(s: synstructure::Structure<'_>) -> TokenStream {
let ty_name = &s.ast().ident;
let (_, ty_generics, _) = s.ast().generics.split_for_impl();
let visit_fn = visitor_trait_generator::gen_visit_fn_name(ty_name.to_string());
let recurse_body = s.each(|bi| quote! { #bi.accept(v) });
// Sanity check: ensure that all types have at most one lifetime with name `'a`.
let lifetimes = s.ast().generics.lifetimes().collect::<Vec<_>>();
assert!(lifetimes.len() <= 1);
if let Some(lifetime) = lifetimes.first() {
assert_eq!("a", lifetime.lifetime.ident.to_string());
}
quote! {
impl<'a> Node<'a> for #ty_name #ty_generics {
fn accept(&'a self, v: &mut dyn Visitor<'a>) {
v.#visit_fn(self)
}
fn recurse(&'a self, v: &mut dyn Visitor<'a>) {
match self { #recurse_body }
}
}
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_by_ref_decl_visitor/visitor_trait_generator.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::format_ident;
use quote::quote;
use super::Context;
use crate::common::gen_helpers;
use crate::common::to_snake;
pub fn gen(ctx: &Context) -> TokenStream {
let uses = gen_helpers::gen_module_uses(ctx.modules());
let visit_functions = ctx.types().map(gen_visit_function).collect::<Vec<_>>();
quote! {
#![allow(unused_imports)]
#![allow(unused_variables)]
#uses
use super::node::Node;
pub trait Visitor<'a> {
fn object(&mut self) -> &mut dyn Visitor<'a>;
#(#visit_functions)*
}
}
}
fn gen_visit_function(ast: &syn::DeriveInput) -> TokenStream {
let ty = &ast.ident;
let name = gen_visit_fn_name(ty.to_string());
let (_, ty_generics, _) = ast.generics.split_for_impl();
quote! {
fn #name(&mut self, p: &'a #ty #ty_generics) {
p.recurse(self.object())
}
}
}
pub fn gen_visit_fn_name(ty: impl AsRef<str>) -> syn::Ident {
format_ident!("visit_{}", to_snake(ty.as_ref()))
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_elab_transform/mod.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod pass_generator;
mod transform_generator;
pub use crate::common::args::CommonArgs as Args;
use crate::common::context::Context;
use crate::common::to_snake;
pub enum Direction {
TopDown,
BottomUp,
}
impl Direction {
pub fn to_string(&self) -> &str {
match self {
Direction::TopDown => "top_down",
Direction::BottomUp => "bottom_up",
}
}
}
pub fn run(args: &Args) -> anyhow::Result<Vec<(std::path::PathBuf, String)>> {
let files = crate::common::parse_all(&args.input)?;
let ctx = Context::new(files.as_slice(), &args.root)?;
let results = vec![
("pass.rs", pass_generator::gen(&ctx)),
("transform.rs", transform_generator::gen(&ctx)),
];
Ok(results
.iter()
.map(|(filename, source)| (args.output.join(filename), source.to_string()))
.collect())
}
fn gen_pass_method_name(ty: impl AsRef<str>, dir: Direction) -> syn::Ident {
quote::format_ident!("on_ty_{}_{}", to_snake(ty.as_ref()), dir.to_string())
}
fn gen_pass_fld_method_name(
ty: impl AsRef<str>,
field: impl AsRef<str>,
dir: Direction,
) -> syn::Ident {
quote::format_ident!(
"on_fld_{}_{}_{}",
to_snake(ty.as_ref()),
to_snake(field.as_ref()),
dir.to_string(),
)
}
fn gen_pass_ctor_method_name(
ty: impl AsRef<str>,
field: impl AsRef<str>,
dir: Direction,
) -> syn::Ident {
quote::format_ident!(
"on_ctor_{}_{}_{}",
to_snake(ty.as_ref()),
to_snake(field.as_ref()),
dir.to_string()
)
}
fn contains_ocaml_attr(attrs: &[syn::Attribute], attr: &'static str) -> bool {
fn get_rust_to_ocaml_meta_items(attr: &syn::Attribute) -> Option<Vec<syn::NestedMeta>> {
if !attr.path.is_ident("rust_to_ocaml") {
return None;
}
match attr.parse_meta() {
Ok(syn::Meta::List(meta)) => Some(meta.nested.into_iter().collect()),
_ => None,
}
}
fn get_lit_str<'a>(_attr_name: &'static str, lit: &'a syn::Lit) -> Option<&'a syn::LitStr> {
if let syn::Lit::Str(lit) = lit {
Some(lit)
} else {
None
}
}
attrs
.iter()
.flat_map(get_rust_to_ocaml_meta_items)
.flatten()
.any(|item| {
use syn::Meta::NameValue;
use syn::NestedMeta::Meta;
match item {
Meta(NameValue(m)) if m.path.is_ident("attr") => {
if let Some(s) = get_lit_str("attr", &m.lit) {
return s.value() == attr;
}
false
}
_ => false,
}
})
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_elab_transform/pass_generator.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
use super::contains_ocaml_attr;
use super::Context;
use super::Direction;
pub fn gen(ctx: &Context) -> TokenStream {
let pass_methods: Vec<_> = ctx
.type_structures()
.flat_map(|s| gen_pass_methods(s, Body::Default))
.collect();
let passes_methods: Vec<_> = ctx
.type_structures()
.flat_map(|s| gen_pass_methods(s, Body::Passes))
.collect();
quote! {
#![allow(unused_variables, non_snake_case)]
use std::ops::ControlFlow;
use std::ops::ControlFlow::Continue;
use oxidized::ast_defs::*;
use oxidized::aast_defs::*;
use crate::env::Env;
type Ex = ();
type En = ();
pub trait Pass {
#(#pass_methods)*
}
pub struct Passes<P, Q>
where
P: Pass,
Q: Pass,
{
pub fst: P,
pub snd: Q,
}
impl<P, Q> Clone for Passes<P, Q>
where
P: Pass + Clone,
Q: Pass + Clone,
{
fn clone(&self) -> Self {
Passes { fst: self.fst.clone(), snd: self.snd.clone() }
}
}
impl<P, Q> Pass for Passes<P, Q>
where
P: Pass,
Q: Pass,
{
#(#passes_methods)*
}
}
}
fn gen_pass_methods(s: synstructure::Structure<'_>, body_type: Body) -> TokenStream {
// If the type is marked opaque, generate no methods; they won't be called.
if contains_ocaml_attr(&s.ast().attrs, "transform.opaque") {
return quote!();
}
let ty = &s.ast().ident;
let name_td = super::gen_pass_method_name(ty.to_string(), Direction::TopDown);
let name_bu = super::gen_pass_method_name(ty.to_string(), Direction::BottomUp);
let (_, ty_generics, _) = s.ast().generics.split_for_impl();
let fld_methods = gen_fld_methods(&s, body_type);
let body_td = body_type.gen(&name_td);
let body_bu = body_type.gen(&name_bu);
quote! {
#[inline(always)]
fn #name_td(
&mut self,
env: &Env,
elem: &mut #ty #ty_generics,
) -> ControlFlow<()> {
#body_td
}
#[inline(always)]
fn #name_bu(
&mut self,
env: &Env,
elem: &mut #ty #ty_generics,
) -> ControlFlow<()> {
#body_bu
}
#(#fld_methods)*
}
}
#[derive(Copy, Clone, Debug)]
enum Body {
Default,
Passes,
}
impl Body {
fn gen(self, name: &syn::Ident) -> TokenStream {
match self {
Body::Default => quote!(Continue(())),
Body::Passes => quote! {
self.fst.#name(env, elem)?;
self.snd.#name(env, elem)
},
}
}
}
fn gen_fld_methods(s: &synstructure::Structure<'_>, body_type: Body) -> Vec<TokenStream> {
let ty_name = s.ast().ident.to_string();
match &s.ast().data {
syn::Data::Struct(data) => match &data.fields {
syn::Fields::Named(..) => (s.variants().iter().flat_map(|v| v.bindings()))
.filter(|field| contains_ocaml_attr(&field.ast().attrs, "transform.explicit"))
.map(|bi| gen_fld_method(&ty_name, bi, body_type))
.collect(),
_ => vec![],
},
syn::Data::Enum(..) => (s.variants().iter())
.filter(|variant| contains_ocaml_attr(variant.ast().attrs, "transform.explicit"))
.map(|variant| gen_ctor_method(&ty_name, variant, body_type))
.collect(),
_ => vec![],
}
}
fn gen_fld_method(
ty_name: &str,
binding_info: &synstructure::BindingInfo<'_>,
body_type: Body,
) -> TokenStream {
let ast = binding_info.ast();
let name_td = super::gen_pass_fld_method_name(
ty_name,
ast.ident.as_ref().unwrap().to_string(),
Direction::TopDown,
);
let name_bu = super::gen_pass_fld_method_name(
ty_name,
ast.ident.as_ref().unwrap().to_string(),
Direction::BottomUp,
);
let field_ty = &ast.ty;
let body_td = body_type.gen(&name_td);
let body_bu = body_type.gen(&name_bu);
quote! {
#[inline(always)]
fn #name_td(
&mut self,
env: &Env,
elem: &mut #field_ty,
) -> ControlFlow<()> {
#body_td
}
#[inline(always)]
fn #name_bu(
&mut self,
env: &Env,
elem: &mut #field_ty,
) -> ControlFlow<()> {
#body_bu
}
}
}
fn gen_ctor_method(
ty_name: &str,
variant_info: &synstructure::VariantInfo<'_>,
body_type: Body,
) -> TokenStream {
let ast = variant_info.ast();
let variant_ty = match &ast.fields {
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => &fields.unnamed[0].ty,
_ => panic!("transform.explicit only supports tuple-like variants with 1 field"),
};
let name_td =
super::gen_pass_ctor_method_name(ty_name, ast.ident.to_string(), Direction::TopDown);
let name_bu =
super::gen_pass_ctor_method_name(ty_name, ast.ident.to_string(), Direction::BottomUp);
let body_td = body_type.gen(&name_td);
let body_bu = body_type.gen(&name_bu);
quote! {
#[inline(always)]
fn #name_td(
&mut self,
env: &Env,
elem: &mut #variant_ty,
) -> ControlFlow<()> {
#body_td
}
#[inline(always)]
fn #name_bu(
&mut self,
env: &Env,
elem: &mut #variant_ty,
) -> ControlFlow<()> {
#body_bu
}
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_elab_transform/transform_generator.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
#![allow(unused)]
use proc_macro2::TokenStream;
use quote::quote;
use super::contains_ocaml_attr;
use super::Context;
use super::Direction;
use crate::common::to_snake;
pub fn gen(ctx: &Context) -> TokenStream {
let manual_impls = gen_manual_impls();
let impls: Vec<_> = ctx
.type_structures()
.map(|s| gen_transform_and_traverse(ctx, s))
.collect();
quote! {
#![allow(unused_variables)]
#![allow(unused_braces)]
#![allow(clippy::match_single_binding)]
use std::ops::ControlFlow::Break;
use oxidized::nast::*;
use crate::env::Env;
use crate::Pass;
pub trait Transform {
#[inline(always)]
fn transform(
&mut self,
env: &Env,
pass: &mut (impl Pass + Clone),
) {
self.traverse(env, pass);
}
#[inline(always)]
fn traverse(
&mut self,
env: &Env,
pass: &mut (impl Pass + Clone),
) {}
}
impl Transform for () {}
impl Transform for bool {}
impl Transform for isize {}
impl Transform for String {}
impl Transform for bstr::BString {}
impl Transform for oxidized::pos::Pos {}
impl Transform for oxidized::file_info::Mode {}
impl Transform for oxidized::namespace_env::Env {}
impl Transform for oxidized::LocalIdMap<(Pos, ())> {}
#(#manual_impls)*
#(#impls)*
}
}
fn gen_transform_and_traverse(ctx: &Context, mut s: synstructure::Structure<'_>) -> TokenStream {
// By default, if you are deriving an impl of trait Foo for generic type
// X<T>, synstructure will add Foo as a bound not only for the type
// parameter T, but also for every type which appears as a field in X. This
// is not necessary for our use case--we can just require that the type
// parameters implement our trait.
s.add_bounds(synstructure::AddBounds::Generics);
// We are mutating the AST and need &mut references to fields.
s.bind_with(|_bi| synstructure::BindStyle::RefMut);
// Cleaner generated syntax for `gen impl` hygiene. Not supported by Rust
// versions before 1.37. We don't really need the hygiene at all (the quote
// we pass to `gen_impl` doesn't define any items other than the trait
// impl), but I don't think there's a way to turn it off.
s.underscore_const(true);
// If the type is marked opaque, generate a no-op Transform impl.
if contains_ocaml_attr(&s.ast().attrs, "transform.opaque") {
return s.gen_impl(quote! {
gen impl Transform for @Self {}
});
}
// Don't visit fields or variants marked opaque.
s.filter(|bi| !contains_ocaml_attr(&bi.ast().attrs, "transform.opaque"));
s.filter_variants(|v| !contains_ocaml_attr(v.ast().attrs, "transform.opaque"));
let ty_name = s.ast().ident.to_string();
let transform_body = gen_transform_body(
&super::gen_pass_method_name(&ty_name, Direction::TopDown),
&super::gen_pass_method_name(&ty_name, Direction::BottomUp),
quote!(self),
quote!(self.traverse(env, pass)),
);
let traverse_body = gen_traverse_body(&ty_name, &s);
let ty_name = quote::format_ident!("{}", ty_name);
quote! {
impl Transform for #ty_name {
fn transform(
&mut self,
env: &Env,
pass: &mut (impl Pass + Clone),
) {
#transform_body
}
fn traverse(
&mut self,
env: &Env,
pass: &mut (impl Pass + Clone),
) {
match self { #traverse_body }
}
}
}
}
fn gen_traverse_body(ty_name: &str, s: &synstructure::Structure<'_>) -> TokenStream {
s.variants()
.iter()
.map(|v| gen_variant_traverse(ty_name, v))
.chain(std::iter::once(if s.omitted_variants() {
quote!(_ => {})
} else {
quote!()
}))
.collect()
}
fn gen_variant_traverse(ty_name: &str, v: &synstructure::VariantInfo<'_>) -> TokenStream {
if !contains_ocaml_attr(v.ast().attrs, "transform.explicit") {
return v.each(|bi| gen_fld_traverse(ty_name, bi));
}
let pass_method_td =
super::gen_pass_ctor_method_name(ty_name, v.ast().ident.to_string(), Direction::TopDown);
let pass_method_bu =
super::gen_pass_ctor_method_name(ty_name, v.ast().ident.to_string(), Direction::BottomUp);
if v.ast().fields.len() != 1 {
panic!("transform.explicit only supports variants with 1 field")
}
v.each(|bi| {
gen_transform_body_explicit(
&pass_method_td,
&pass_method_bu,
quote!(#bi),
quote! { #bi.transform(env, &mut td_pass) },
)
})
}
fn gen_fld_traverse(ty_name: &str, bi: &synstructure::BindingInfo<'_>) -> TokenStream {
let transform_bi = quote! { #bi.transform(env, pass) };
if !contains_ocaml_attr(&bi.ast().attrs, "transform.explicit") {
return transform_bi;
}
// Since Transform is a trait, we can't have special implementations
// of Transform::transform for fields. Instead, we inline the
// transform_fld function here (in the Transform::traverse body).
let pass_method_td = super::gen_pass_fld_method_name(
ty_name,
(bi.ast().ident.as_ref())
.map(|i| i.to_string())
.unwrap_or_default(),
Direction::TopDown,
);
let pass_method_bu = super::gen_pass_fld_method_name(
ty_name,
(bi.ast().ident.as_ref())
.map(|i| i.to_string())
.unwrap_or_default(),
Direction::BottomUp,
);
gen_transform_body_explicit(&pass_method_td, &pass_method_bu, quote!(#bi), transform_bi)
}
fn gen_transform_body(
pass_method_td: &syn::Ident,
pass_method_bu: &syn::Ident,
elem: TokenStream,
recurse: TokenStream,
) -> TokenStream {
quote! {
let mut in_pass = pass.clone();
if let Break(..) = pass.#pass_method_td(env, #elem) {
return;
}
#recurse;
in_pass.#pass_method_bu(env, #elem);
}
}
fn gen_transform_body_explicit(
pass_method_td: &syn::Ident,
pass_method_bu: &syn::Ident,
elem: TokenStream,
recurse: TokenStream,
) -> TokenStream {
let body = gen_transform_body(&pass_method_td, &pass_method_bu, elem, recurse);
quote! {
{
let pass = &mut pass.clone();
#body
}
}
}
fn gen_manual_impls() -> Vec<TokenStream> {
let transform = quote!(transform(env, &mut pass.clone()));
#[rustfmt::skip]
let manual_impls = vec![
(vec!["T"], quote!(&mut T), quote!((**self).#transform)),
(vec!["T"], quote!(Box<T>), quote!((**self).#transform)),
(vec!["L", "R"], quote!(itertools::Either<L, R>), quote! {
match self {
Self::Left(x) => x.#transform,
Self::Right(x) => x.#transform,
}
}),
(vec!["T"], quote!(Vec<T>), quote! {
for x in self.iter_mut() {
x.#transform;
}
}),
(vec!["T"], quote!(Option<T>), quote! {
match self {
Some(x) => x.#transform,
None => {}
}
}),
(vec!["T"], quote!(oxidized::lazy::Lazy<T>), quote!(self.0.#transform)),
(vec!["K", "V"], quote!(std::collections::BTreeMap<K, V>), quote! {
for x in self.values_mut() {
x.#transform;
}
}),
(vec!["T"], quote!(std::sync::Arc<T>), quote! {
if let Some(x) = std::sync::Arc::get_mut(self) {
x.#transform;
}
}),
(vec!["T"], quote!(std::rc::Rc<T>), quote! {
if let Some(x) = std::rc::Rc::get_mut(self) {
x.#transform;
}
}),
(vec!["T1", "T2"], quote!((T1, T2)), quote! {
self.0.#transform;
self.1.#transform;
}),
(vec!["T1", "T2", "T3"], quote!((T1, T2, T3)), quote! {
self.0.#transform;
self.1.#transform;
self.2.#transform;
}),
(vec!["T1", "T2", "T3", "T4"], quote!((T1, T2, T3, T4)), quote! {
self.0.#transform;
self.1.#transform;
self.2.#transform;
self.3.#transform;
}),
(vec!["T1", "T2", "T3", "T4", "T5"], quote!((T1, T2, T3, T4, T5)), quote! {
self.0.#transform;
self.1.#transform;
self.2.#transform;
self.3.#transform;
self.4.#transform;
}),
];
manual_impls
.into_iter()
.map(|(tp, ty, body)| gen_manual_impl(tp, ty, body))
.collect()
}
fn gen_manual_impl(
typarams: Vec<&'static str>,
ty: TokenStream,
traverse_body: TokenStream,
) -> TokenStream {
let typarams: Vec<_> = typarams
.into_iter()
.map(|tp| quote::format_ident!("{}", tp))
.collect();
quote! {
impl<#(#typarams,)*> Transform for #ty
where
#(#typarams: Transform,)*
{
fn traverse(
&mut self,
env: &Env,
pass: &mut (impl Pass + Clone),
) {
#traverse_body
}
}
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_enum_helper/mod.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod ref_kind;
use std::fmt::Write;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
use anyhow::anyhow;
use anyhow::Result;
use clap::Parser;
use proc_macro2::TokenStream;
use quote::format_ident;
use quote::quote;
use ref_kind::RefKind;
use syn::*;
use crate::common::*;
use crate::quote_helper::*;
#[derive(Debug, Parser)]
pub struct Args {
/// Rust files containing the enum types for which codegen will be performed.
#[clap(short, long)]
input: Vec<String>,
/// The directory to which generated files will be written.
#[clap(short, long)]
output: PathBuf,
}
pub fn run(args: &Args) -> Result<Vec<(PathBuf, String)>> {
let inputs = &args.input;
let output_dir = &args.output;
let mut result = vec![];
let mut mods = vec![];
for input in inputs {
let (file, uses) = parse_input_arg(input);
eprintln!("Process: {}", file);
let mut output_filename = Path::new(file)
.file_stem()
.ok_or_else(|| anyhow!("Unable to get file stem"))?
.to_os_string();
output_filename.push("_impl_gen");
mods.push(output_filename.clone().into_string().unwrap());
output_filename.push(".rs");
let output_file = output_dir.join(Path::new(&output_filename));
let mut file = File::open(file)?;
let mut src = String::new();
file.read_to_string(&mut src)?;
let file = syn::parse_file(&src)?;
let output = mk_file(&file, uses);
let mut output_content = String::new();
write!(&mut output_content, "{}", output)?;
eprintln!("Output: {:?}", output_file);
result.push((output_file, output_content));
}
let mut mod_filename = output_dir.to_path_buf();
mod_filename.push("mod.rs");
let mod_content = mk_mod_file(mods)?;
result.push((mod_filename, mod_content));
Ok(result)
}
fn mk_mod_file(mods: Vec<String>) -> Result<String> {
let mods = mods.into_iter().map(|m| format_ident!("{}", m));
let content = quote! {
#(pub mod #mods;)*
};
let mut result = String::new();
write!(&mut result, "{}", content)?;
Ok(result)
}
fn parse_input_arg<'a>(file_with_uses: &'a str) -> (&'a str, Vec<&'a str>) {
let mut data = file_with_uses.split('|');
(data.next().unwrap(), data.collect::<Vec<_>>())
}
fn mk_file(file: &syn::File, uses: Vec<&str>) -> TokenStream {
let uses = uses
.into_iter()
.map(|u| syn::parse_str::<UseTree>(u).unwrap());
let enums = get_enums(file);
let content = enums.into_iter().map(mk_impl);
quote! {
#(use #uses;)*
#(#content)*
}
}
fn mk_impl(e: &ItemEnum) -> TokenStream {
let name = &e.ident;
let is_singleton = e.variants.len() < 2;
let generics = &e.generics;
let constrs = e.variants.iter().map(|v| mk_constr(name, v));
let is_functions = e
.variants
.iter()
.map(|v| mk_is_function(name, v, is_singleton));
let as_ref_functions = e
.variants
.iter()
.map(|v| mk_as_function("", name, v, RefKind::Ref, is_singleton));
let as_mut_functions = e
.variants
.iter()
.map(|v| mk_as_function("_mut", name, v, RefKind::RefMut, is_singleton));
let as_into_functions = e
.variants
.iter()
.map(|v| mk_as_function("_into", name, v, RefKind::Owned, is_singleton));
quote! {
impl #generics #name #generics {
#(#constrs)*
#(#is_functions)*
#(#as_ref_functions)*
#(#as_mut_functions)*
#(#as_into_functions)*
}
}
}
fn mk_as_function(
fn_name_suffix: &str,
enum_name: &Ident,
v: &Variant,
ref_kind: RefKind,
is_singleton: bool,
) -> TokenStream {
let name = &v.ident;
let fn_name = format_ident!("as_{}{}", &to_snake(&name.to_string()), fn_name_suffix);
match &v.fields {
Fields::Unit => quote! {},
Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => {
let mut field_match: Vec<TokenStream> = vec![];
let mut results: Vec<TokenStream> = vec![];
let mut return_tys: Vec<TokenStream> = vec![];
for (i, field) in unnamed.into_iter().enumerate() {
let matched = format_ident!("p{}", i.to_string());
field_match.push(quote! { #matched, });
let tys = unbox(&field.ty);
if tys.is_empty() {
results.push(ref_kind.mk_value(&matched, false, None));
return_tys.push(ref_kind.mk_ty(&field.ty));
} else if tys.len() == 1 {
results.push(ref_kind.mk_value(&matched, true, None));
return_tys.push(ref_kind.mk_ty(&tys[0]));
} else {
for (j, ty) in tys.into_iter().enumerate() {
results.push(ref_kind.mk_value(&matched, true, Some(j)));
return_tys.push(ref_kind.mk_ty(ty));
}
}
}
let sep = <Token![,]>::default();
let return_tys = if return_tys.len() > 1 {
with_paren(join(return_tys.iter(), sep))
} else {
join(return_tys.iter(), sep)
};
let results = if results.len() > 1 {
with_paren(join(results.iter(), sep))
} else {
join(results.iter(), sep)
};
let self_ = ref_kind.mk_ty(&format_ident!("self"));
let else_ = if is_singleton {
quote! {}
} else {
quote! { _ => None, }
};
quote! {
pub fn #fn_name(#self_) -> Option<#return_tys> {
match self {
#enum_name::#name(#(#field_match)*) => Some(#results),
#else_
}
}
}
}
Fields::Named(_) => {
eprintln!("Warning: not support named field: {:?}", &v.ident);
quote! {}
}
}
}
fn mk_is_function(enum_name: &Ident, v: &Variant, is_singleton: bool) -> TokenStream {
let name = &v.ident;
let field_match = if let Fields::Unit = &v.fields {
quote! {}
} else {
quote! { (..) }
};
let body = if is_singleton {
quote! { true }
} else {
quote! {
match self {
#enum_name::#name #field_match => true,
_ => false,
}
}
};
let fn_name = format_ident!("is_{}", &to_snake(&name.to_string()));
quote! {
pub fn #fn_name(&self) -> bool { #body }
}
}
fn mk_constr(enum_name: &Ident, v: &Variant) -> TokenStream {
let name = &v.ident;
let fn_name = format_ident!("mk_{}", &to_snake(&name.to_string()));
match &v.fields {
Fields::Unit => quote! {
pub fn #fn_name () -> Self {
#enum_name::#name
}
},
Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => {
let mut i = 0;
let mut params: Vec<TokenStream> = vec![];
let mut args: Vec<TokenStream> = vec![];
for f in unnamed {
let ty = &f.ty;
let boxed_tys = unbox(ty);
if boxed_tys.is_empty() {
let param = format_ident!("p{}", i.to_string());
params.push(quote! {#param : #ty, });
args.push(quote! {#param ,});
i += 1;
} else if boxed_tys.len() == 1 {
let param = format_ident!("p{}", i.to_string());
let ty = boxed_tys[0];
params.push(quote! {#param : #ty, });
args.push(quote! {Box::new(#param) ,});
i += 1;
} else {
let mut tuple_items: Vec<TokenStream> = vec![];
for ty in boxed_tys.iter() {
let param = format_ident!("p{}", i.to_string());
params.push(quote! {#param : #ty, });
tuple_items.push(quote! {#param, });
i += 1;
}
args.push(quote! {Box::new(( #(#tuple_items)* )) ,});
}
}
quote! {
pub fn #fn_name (#(#params)*) -> Self {
#enum_name::#name(#(#args)*)
}
}
}
Fields::Named(_) => {
eprintln!("Warning: not support named field: {:?}", &v.ident);
quote! {}
}
}
}
fn get_enums(file: &syn::File) -> Vec<&ItemEnum> {
let mut r = vec![];
for i in file.items.iter() {
if let Item::Enum(e) = i {
r.push(e);
}
}
r
}
fn unbox(ty: &Type) -> Vec<&Type> {
if let Type::Path(TypePath { path, .. }) = ty {
if let Some(path_seg) = path.segments.first() {
if path_seg.ident == "Box" {
if let syn::PathArguments::AngleBracketed(args) = &path_seg.arguments {
match args.args.first() {
Some(GenericArgument::Type(Type::Tuple(syn::TypeTuple {
elems, ..
}))) => {
return elems.iter().collect::<Vec<_>>();
}
Some(GenericArgument::Type(ty)) => {
return vec![ty];
}
_ => {
eprintln!("Warnning: box missing type argument");
return vec![ty];
}
}
}
}
}
}
vec![]
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_enum_helper/ref_kind.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
use quote::ToTokens;
use syn::*;
pub enum RefKind {
Ref,
RefMut,
Owned,
}
impl RefKind {
pub fn mk_value(
&self,
var: &impl ToTokens,
is_box: bool,
tuple_accessor: Option<usize>,
) -> TokenStream {
let accessor = tuple_accessor.map(Index::from);
match (self, is_box, accessor) {
(_, false, _) => quote! { #var },
(RefKind::Ref, true, None) => quote! { &#var },
(RefKind::Ref, true, Some(i)) => quote! { &#var.#i },
(RefKind::RefMut, true, None) => quote! { #var.as_mut() },
(RefKind::RefMut, true, Some(i)) => quote! { &mut #var.#i },
(RefKind::Owned, true, None) => quote! { *#var },
(RefKind::Owned, true, Some(i)) => quote! { (*#var).#i },
}
}
pub fn mk_ty(&self, ty: &impl ToTokens) -> proc_macro2::TokenStream {
match self {
RefKind::Ref => quote! { &#ty },
RefKind::RefMut => quote! { &mut #ty },
RefKind::Owned => quote! { #ty },
}
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/context.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::Path;
use anyhow::anyhow;
use anyhow::Result;
use proc_macro2::TokenStream;
use quote::format_ident;
use syn::*;
use super::gen_helper;
use super::syn_helper::*;
pub struct Context<'a> {
/// type declerations, no visit function will be generated for
/// any type *not* in this map.
pub defs: HashMap<String, &'a Item>,
/// modules contain the `defs`.
pub mods: HashSet<String>,
/// root is a type from `defs`, a visit function will be generated
/// if a type is in `defs` and transitively depended by `root`.
pub root: &'a str,
/// a set of types transitively depended by `root`.
types: Vec<String>,
/// a list of type parameters in the root type
pub root_ty_params: Vec<String>,
/// the name of `Context` type in `Visitor`
pub context: String,
/// the type param name for `Error`, which is used in `Result<(), Error>`
pub error_ty_param: String,
pub node_lifetime: String,
}
impl<'a> Context<'a> {
pub fn new(files: &'a [(syn::File, &'a Path)], root: &'a str) -> Result<Self> {
let mut defs = HashMap::new();
let mut mods = HashSet::new();
for (f, fp) in files {
eprintln!("Processing {:?}", fp);
for i in f.items.iter() {
if let Ok(name) = get_ty_def_name(i) {
if let Some(old) = defs.insert(name, i) {
return Err(anyhow!("Type {:?} already exists, file {:?}", old, f));
}
}
}
// assuming file name is the module name
mods.insert(fp.file_stem().and_then(|fs| fs.to_str()).unwrap().into());
}
let root_item = defs
.get(root)
.ok_or_else(|| anyhow!("Root {} not found", root))?;
let root_ty_params = get_ty_params(root_item)?;
let types = Self::get_all_tys(&defs, root)?;
Ok(Self {
mods,
defs,
root,
root_ty_params,
types,
context: "Context".into(),
error_ty_param: "Error".into(),
node_lifetime: "node".into(),
})
}
pub fn context_ident(&self) -> Ident {
format_ident!("{}", self.context)
}
pub fn error_ident(&self) -> Ident {
format_ident!("{}", self.error_ty_param)
}
pub fn node_lifetime_ident(&self) -> Ident {
format_ident!("{}", self.node_lifetime)
}
pub fn node_lifetime_ident_with_quote(&self) -> TokenStream {
let l = self.node_lifetime_ident();
gen_helper::make_lifetime(&l)
}
pub fn is_root_ty_param(&self, ty_param: &str) -> bool {
self.root_ty_params.iter().any(|t| t == ty_param)
}
pub fn root_ty_params_raw(&'a self) -> impl Iterator<Item = &'a String> {
self.root_ty_params.iter()
}
pub fn root_ty_params_(&'a self) -> impl Iterator<Item = Ident> + 'a {
self.root_ty_params_raw().map(|t| format_ident!("{}", t))
}
pub fn root_ty_params_with_context_raw(&'a self) -> impl Iterator<Item = &'a String> {
vec![&self.context, &self.error_ty_param]
.into_iter()
.chain(self.root_ty_params.iter())
}
pub fn root_ty_params_with_context(&'a self) -> impl Iterator<Item = Ident> + 'a {
self.root_ty_params_with_context_raw()
.map(|t| format_ident!("{}", t))
}
pub fn modules(&'a self) -> impl Iterator<Item = impl AsRef<str> + 'a> {
self.mods.iter()
}
pub fn non_alias_types(&'a self) -> impl Iterator<Item = impl AsRef<str> + 'a> {
self.types
.iter()
.filter(move |ty| self.defs.get(*ty).map_or(false, |def| !is_alias(def)))
}
fn get_ty_names_<'b>(defs: &'b HashMap<String, &'b Item>) -> HashSet<&'b str> {
defs.keys().map(|s| s.as_str()).collect()
}
fn get_all_tys(defs: &HashMap<String, &Item>, root: &'a str) -> Result<Vec<String>> {
let defined_types = Self::get_ty_names_(defs);
let mut visited = HashSet::<String>::new();
let mut q = VecDeque::new();
q.push_back(root.into());
while let Some(ty) = q.pop_front() {
let item = defs
.get(&ty)
.ok_or_else(|| anyhow!("Type {} not found", ty))?;
visited.insert(get_ty_def_name(item)?);
let deps = get_dep_tys(&defined_types, item)?;
for d in deps.into_iter() {
if !visited.contains(&d) {
q.push_back(d);
}
}
}
let mut types: Vec<String> = visited.into_iter().collect();
types.sort();
Ok(types)
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/generator.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use super::context::Context;
use crate::common::*;
#[macro_export]
macro_rules! impl_generator {
($ty:ty, $base:ident) => {
impl Generator for $ty {
fn filename(&self) -> String {
<Self as $base>::filename()
}
fn gen(&self, ctx: &Context<'_>) -> Result<TokenStream> {
<Self as $base>::gen(ctx)
}
}
};
}
pub trait Generator {
fn filename(&self) -> String;
fn gen(&self, ctx: &Context<'_>) -> Result<TokenStream>;
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/gen_helper.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::Ident;
use proc_macro2::Punct;
use proc_macro2::Spacing;
use proc_macro2::TokenStream;
use quote::quote;
pub use crate::common::gen_helpers::gen_module_uses;
pub fn gen_ty_params(tys: impl Iterator<Item = syn::Ident>) -> TokenStream {
let ty_idents = tys.map(|ty| quote! { P::#ty, }).collect::<Vec<_>>();
if ty_idents.is_empty() {
quote! {}
} else {
quote! {<#(#ty_idents)*>}
}
}
pub fn gen_ty_params_with_self(tys: impl Iterator<Item = syn::Ident>) -> TokenStream {
let ty_idents = tys
.map(|ty| quote! { <Self::Params as Params>::#ty, })
.collect::<Vec<_>>();
if ty_idents.is_empty() {
quote! {}
} else {
quote! {<#(#ty_idents)*>}
}
}
pub fn single_quote() -> Punct {
Punct::new('\'', Spacing::Joint)
}
pub fn make_lifetime(lifetime: &Ident) -> TokenStream {
let q = single_quote();
quote! {#q #lifetime}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/mod.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
mod context;
mod gen_helper;
mod node_impl_generator;
mod node_trait_generator;
mod run;
mod syn_helper;
mod type_params_generator;
mod visitor_trait_generator;
#[macro_use]
mod generator;
pub use run::*; |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/node_impl_generator.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use anyhow::anyhow;
use anyhow::Result;
use proc_macro2::Ident;
use proc_macro2::TokenStream;
use quote::format_ident;
use quote::quote;
use super::context::Context;
use super::gen_helper::*;
use super::generator::Generator;
use super::syn_helper::*;
use super::visitor_trait_generator;
use crate::impl_generator;
pub trait NodeImpl {
fn filename() -> String;
fn node_trait_name() -> syn::Ident;
fn self_ref_kind(lifetime: Option<&Ident>) -> TokenStream;
fn visitor_trait_name() -> syn::Ident;
fn use_node() -> TokenStream;
fn use_visitor() -> TokenStream;
fn gen(ctx: &Context<'_>) -> Result<TokenStream> {
let impls = ctx
.non_alias_types()
.map(|ty| {
let ty = ty.as_ref();
let item = ctx
.defs
.get(ty)
.ok_or_else(|| anyhow!("Type {} not found", ty))?;
Self::gen_node_impl(ctx, ty, item)
})
.collect::<Result<Vec<_>>>()?;
let use_node = Self::use_node();
let use_visitor = Self::use_visitor();
let uses = gen_module_uses(ctx.modules());
Ok(quote! {
#![allow(unused_imports)]
#![allow(unused_variables)]
#use_node
#use_visitor
#uses
use super::type_params::Params;
#(#impls)*
})
}
fn gen_node_impl(ctx: &Context<'_>, ty_name: &str, ty_def: &syn::Item) -> Result<TokenStream> {
let recurse_body = Self::gen_recurse_body(ctx, ty_name, ty_def)?;
let visit_fn = visitor_trait_generator::gen_visit_fn_name(ty_name);
let ty_name = format_ident!("{}", ty_name);
let ty_params = gen_ty_params(get_ty_param_idents(ty_def)?);
let node_trait_name = Self::node_trait_name();
let node_lifetime = ctx.node_lifetime_ident();
let self_ref_kind = Self::self_ref_kind(Some(&node_lifetime));
let node_lifetime = make_lifetime(&node_lifetime);
let visitor_trait_name = Self::visitor_trait_name();
let context = ctx.context_ident();
let error = ctx.error_ident();
Ok(quote! {
impl<P: Params> #node_trait_name<P> for #ty_name #ty_params {
fn accept<#node_lifetime>(
#self_ref_kind self,
c: &mut P::#context,
v: &mut dyn #visitor_trait_name<#node_lifetime, Params = P>
) -> Result<(), P::#error> {
v.#visit_fn(c, self)
}
fn recurse<#node_lifetime>(
#self_ref_kind self,
c: &mut P::#context,
v: &mut dyn #visitor_trait_name<#node_lifetime, Params = P>
) -> Result<(), P::#error> {
#recurse_body
}
}
})
}
fn try_simple_ty_param(ctx: &Context<'_>, ty: &syn::Type) -> Option<String> {
try_simple_type(ty).filter(|t| ctx.is_root_ty_param(t))
}
fn try_gen_simple_ty_param_visit_call(
ctx: &Context<'_>,
ty: &syn::Type,
last: bool,
accessor: TokenStream,
) -> Option<TokenStream> {
let ref_kind = Self::self_ref_kind(None);
try_simple_type(ty)
.filter(|t| ctx.is_root_ty_param(t))
.map(|ty| {
let fn_name = visitor_trait_generator::gen_visit_fn_name(ty);
if !last {
quote! {v.#fn_name( c, #ref_kind #accessor )?;}
} else {
quote! {v.#fn_name( c, #ref_kind #accessor )}
}
})
}
fn gen_recurse_body(ctx: &Context<'_>, ty_name: &str, ty: &syn::Item) -> Result<TokenStream> {
use syn::Item::*;
use syn::*;
match ty {
Struct(ItemStruct { fields, .. }) => Self::gen_recurse_struct_body(ctx, fields),
Enum(ItemEnum { variants, .. }) => Self::gen_recurse_enum_body(ctx, ty_name, variants),
_ => Ok(quote! {}),
}
}
fn gen_recurse_struct_body(ctx: &Context<'_>, fields: &syn::Fields) -> Result<TokenStream> {
use syn::*;
match fields {
Fields::Named(fields) => {
let last_field = fields.named.len() - 1;
let fields = get_field_and_type_from_named(fields);
let calls = fields.iter().enumerate().map(|(i, (name, ty))| {
let accessor = format_ident!("{}", name);
Self::try_gen_simple_ty_param_visit_call(
ctx,
ty,
i == last_field,
quote! { self.#accessor },
)
.unwrap_or_else(|| {
if i != last_field {
quote! { self.#accessor.accept(c, v)?; }
} else {
quote! { self.#accessor.accept(c, v) }
}
})
});
Ok(quote! { #(#calls)* })
}
Fields::Unnamed(fields) => {
let last_field = fields.unnamed.len() - 1;
let fields = get_field_and_type_from_unnamed(fields);
let calls = fields.map(|(i, ty)| {
let accessor = Index::from(i);
Self::try_gen_simple_ty_param_visit_call(
ctx,
ty,
i == last_field,
quote! { self.#accessor },
)
.unwrap_or_else(|| {
if i != last_field {
quote! {self.#accessor.accept(c, v)?; }
} else {
quote! {self.#accessor.accept(c, v) }
}
})
});
Ok(quote! { #(#calls)* })
}
Fields::Unit => Ok(quote! {}),
}
}
fn gen_recurse_enum_body(
ctx: &Context<'_>,
ty_name: &str,
variants: &syn::punctuated::Punctuated<syn::Variant, syn::token::Comma>,
) -> Result<TokenStream> {
use syn::*;
let node_lifetime = ctx.node_lifetime_ident();
let self_ref_kind = Self::self_ref_kind(Some(&node_lifetime));
let node_lifetime = make_lifetime(&node_lifetime);
let mut arms: Vec<TokenStream> = Vec::new();
let mut helpers: Vec<TokenStream> = Vec::new();
for Variant {
ident: variant_name,
fields,
..
} in variants.iter()
{
let ty_name = format_ident!("{}", ty_name);
match fields {
Fields::Named(_fields) => {
return Err(anyhow!(
"Enum with named fields not supported yet. Enum {:?}",
ty_name
));
}
Fields::Unnamed(fields) => {
let mut pattern = vec![];
let mut calls = vec![];
if let Some((len, tys)) = try_get_types_from_box_tuple(fields) {
let v = format_ident!("a");
let helper_name = format_ident!("helper{}", helpers.len());
pattern.push(quote! {#v});
let mut arm_calls: Vec<TokenStream> = Vec::new();
let mut arm_tys: Vec<&syn::Type> = Vec::new();
for (i, ty) in tys {
let accessor = &Index::from(i);
let call = Self::try_gen_simple_ty_param_visit_call(
ctx,
ty,
i == len - 1,
quote! { #v.#accessor },
)
.unwrap_or_else(|| {
if i != len - 1 {
quote! { #v.#accessor.accept(c, v)?; }
} else {
quote! { #v.#accessor.accept(c, v) }
}
});
arm_calls.push(call);
arm_tys.push(ty);
}
if len > 1 {
// Emit a helper since the match has multiple accept() calls.
// This [inline] attribute doesn't affect mode/dbg but
// reduces stack size and improves perf at higher opt levels.
let visitor_trait_name = Self::visitor_trait_name();
calls.push(quote! { #helper_name(#v, c, v) });
helpers.push(quote! {
#[inline]
fn #helper_name<
#node_lifetime,
P: Params + Params<Ex=Ex> + Params<En=En>,
Ex,
En
>(
a: #self_ref_kind Box<(#(#arm_tys,)*)>,
c: &mut P::Context,
v: &mut dyn #visitor_trait_name<'node, Params = P>,
) -> Result<(), P::Error> {
#(#arm_calls)*
}
});
} else {
calls.extend(arm_calls);
}
} else {
let last_field = fields.unnamed.len() - 1;
for (i, ty) in get_field_and_type_from_unnamed(fields) {
let v = format_ident!("a{}", i);
pattern.push(quote! {#v,});
calls.push(
Self::try_gen_simple_ty_param_visit_call(
ctx,
ty,
i == last_field,
quote! { #v },
)
.unwrap_or_else(|| {
if i != last_field {
quote! { #v.accept(c, v)?; }
} else {
quote! { #v.accept(c, v) }
}
}),
);
}
}
arms.push(quote! {
#ty_name::#variant_name(#(# pattern)*) => {
#(#calls)*
}
});
}
Fields::Unit => arms.push(quote! {
#ty_name::#variant_name => {Ok(())}
}),
}
}
match arms.as_slice() {
[] => Ok(quote! {}),
_ => Ok(quote! {
#(#helpers)*
match self {
#(#arms)*
}
}),
}
}
}
pub struct RefNodeImpl;
impl NodeImpl for RefNodeImpl {
fn filename() -> String {
"node_impl_gen.rs".into()
}
fn node_trait_name() -> syn::Ident {
format_ident!("Node")
}
fn self_ref_kind(lifetime: Option<&Ident>) -> TokenStream {
match lifetime {
Some(l) => {
let l = make_lifetime(l);
quote! {&#l}
}
None => quote! {&},
}
}
fn visitor_trait_name() -> syn::Ident {
format_ident!("Visitor")
}
fn use_node() -> TokenStream {
quote! { use super::node::Node; }
}
fn use_visitor() -> TokenStream {
quote! { use super::visitor::Visitor; }
}
}
impl_generator!(RefNodeImpl, NodeImpl);
pub struct MutNodeImpl;
impl NodeImpl for MutNodeImpl {
fn filename() -> String {
"node_mut_impl_gen.rs".into()
}
fn node_trait_name() -> syn::Ident {
format_ident!("NodeMut")
}
fn self_ref_kind(lifetime: Option<&Ident>) -> TokenStream {
match lifetime {
Some(l) => {
let l = make_lifetime(l);
quote! {&#l mut}
}
None => quote! {&mut},
}
}
fn visitor_trait_name() -> syn::Ident {
format_ident!("VisitorMut")
}
fn use_node() -> TokenStream {
quote! { use super::node_mut::NodeMut; }
}
fn use_visitor() -> TokenStream {
quote! { use super::visitor_mut::VisitorMut; }
}
}
impl_generator!(MutNodeImpl, NodeImpl); |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/node_trait_generator.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::Ident;
use proc_macro2::TokenStream;
use quote::format_ident;
use quote::quote;
use super::context::Context;
use super::gen_helper;
use super::generator::Generator;
use crate::common::*;
use crate::impl_generator;
trait NodeTrait {
fn filename() -> String;
fn trait_name() -> syn::Ident;
fn receiver(lifttime: &Ident) -> TokenStream;
fn visitor() -> syn::Ident;
fn use_visitor() -> TokenStream;
fn gen(ctx: &Context<'_>) -> Result<TokenStream> {
let trait_name = Self::trait_name();
let node_lifetime = ctx.node_lifetime_ident();
let receiver = Self::receiver(&node_lifetime);
let node_lifetime = gen_helper::make_lifetime(&node_lifetime);
let visitor = Self::visitor();
let use_visitor = Self::use_visitor();
let context = ctx.context_ident();
let error = ctx.error_ident();
Ok(quote! {
#![allow(unused_variables)]
#use_visitor
use super::type_params::Params;
pub trait #trait_name<P: Params> {
fn accept<#node_lifetime>(
#receiver,
ctx: &mut P::#context,
v: &mut dyn #visitor<#node_lifetime, Params = P>,
) -> Result<(), P::#error> {
self.recurse(ctx, v)
}
fn recurse<#node_lifetime>(
#receiver,
ctx: &mut P::#context,
v: &mut dyn #visitor<#node_lifetime, Params = P>,
) -> Result<(), P::#error> {
Ok(())
}
}
})
}
}
pub struct RefNodeTrait;
impl NodeTrait for RefNodeTrait {
fn filename() -> String {
"node.rs".into()
}
fn trait_name() -> syn::Ident {
format_ident!("Node")
}
fn receiver(lifetime: &Ident) -> TokenStream {
let l = gen_helper::make_lifetime(lifetime);
quote! {&#l self}
}
fn visitor() -> syn::Ident {
format_ident!("Visitor")
}
fn use_visitor() -> TokenStream {
quote! { use super::visitor::Visitor; }
}
}
impl_generator!(RefNodeTrait, NodeTrait);
pub struct MutNodeTrait;
impl NodeTrait for MutNodeTrait {
fn filename() -> String {
"node_mut.rs".into()
}
fn trait_name() -> syn::Ident {
format_ident!("NodeMut")
}
fn receiver(lifetime: &Ident) -> TokenStream {
let l = gen_helper::make_lifetime(lifetime);
quote! {&#l mut self}
}
fn visitor() -> syn::Ident {
format_ident!("VisitorMut")
}
fn use_visitor() -> TokenStream {
quote! { use super::visitor_mut::VisitorMut; }
}
}
impl_generator!(MutNodeTrait, NodeTrait); |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/run.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
use anyhow::Result;
use super::context::Context;
use super::generator::Generator;
use super::node_impl_generator::*;
use super::node_trait_generator::*;
use super::type_params_generator::*;
use super::visitor_trait_generator::*;
pub use crate::common::args::CommonArgs as Args;
pub fn run(args: &Args) -> Result<Vec<(PathBuf, String)>> {
let inputs = &args.input;
let output_dir = &args.output;
let root = &args.root;
let files = inputs
.iter()
.map(|file| -> Result<(syn::File, &Path)> {
let file_path = Path::new(file);
let mut file = File::open(file)?;
let mut src = String::new();
file.read_to_string(&mut src)?;
Ok((syn::parse_file(&src)?, file_path))
})
.collect::<Result<Vec<_>>>()?;
let ctx = Context::new(files.as_slice(), root)?;
let generators: Vec<Box<dyn Generator>> = vec![
Box::new(TypeParamGenerator),
Box::new(RefNodeTrait),
Box::new(MutNodeTrait),
Box::new(RefNodeImpl),
Box::new(MutNodeImpl),
Box::new(RefVisitorTrait),
Box::new(MutVisitorTrait),
];
generators
.into_iter()
.map(|g| {
let code = g.gen(&ctx)?;
let filepath = output_dir.join(g.filename());
Ok((filepath, format!("{}", code)))
})
.collect::<Result<Vec<_>>>()
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/syn_helper.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use anyhow::anyhow;
use anyhow::Result;
use quote::format_ident;
use syn::*;
pub use crate::common::syn_helpers::get_dep_tys;
pub use crate::common::syn_helpers::get_ty_def_name;
pub fn is_alias(i: &Item) -> bool {
match i {
Item::Type(_) => true,
_ => false,
}
}
pub fn get_ty_params(i: &Item) -> Result<Vec<String>> {
use Item::*;
match i {
Enum(ItemEnum { generics, .. })
| Struct(ItemStruct { generics, .. })
| Type(ItemType { generics, .. }) => Ok(TypeParamCollector::on_generics(generics)),
_ => Err(anyhow!("Not supported {:?}", i)),
}
}
pub fn get_ty_param_idents(i: &Item) -> Result<impl Iterator<Item = Ident>> {
Ok(get_ty_params(i)?
.into_iter()
.map(|t| format_ident!("{}", t)))
}
pub fn get_field_and_type_from_named<'a>(
FieldsNamed { named, .. }: &'a FieldsNamed,
) -> Vec<(String, &'a Type)> {
named
.iter()
.map(|f| (f.ident.as_ref().unwrap().to_string(), &f.ty))
.collect()
}
pub fn get_field_and_type_from_unnamed(
FieldsUnnamed { unnamed, .. }: &FieldsUnnamed,
) -> impl Iterator<Item = (usize, &Type)> {
unnamed.into_iter().map(|f| &f.ty).enumerate()
}
struct TypeParamCollector(Vec<String>);
impl TypeParamCollector {
pub fn on_generics(g: &Generics) -> Vec<String> {
let mut collector = Self(vec![]);
visit::visit_generics(&mut collector, g);
collector.0
}
}
impl<'ast> visit::Visit<'ast> for TypeParamCollector {
fn visit_type_param(&mut self, node: &'ast TypeParam) {
self.0.push(node.ident.to_string())
}
}
pub fn try_get_types_from_box_tuple(
FieldsUnnamed { unnamed, .. }: &FieldsUnnamed,
) -> Option<(usize, impl Iterator<Item = (usize, &Type)>)> {
let fields = unnamed.into_iter().collect::<Vec<_>>();
if fields.len() == 1 {
if let Type::Path(TypePath { path, .. }) = &fields[0].ty {
if let Some(path_seg) = path.segments.first() {
if path_seg.ident == "Box" {
if let syn::PathArguments::AngleBracketed(args) = &path_seg.arguments {
if let Some(GenericArgument::Type(Type::Tuple(syn::TypeTuple {
elems,
..
}))) = args.args.first()
{
return Some((elems.len(), elems.iter().enumerate()));
}
}
}
}
}
}
None
}
pub fn try_simple_type(ty: &Type) -> Option<String> {
if let Type::Path(TypePath { path, .. }) = ty {
if path.segments.len() == 1 {
let ty = path.segments.first().unwrap();
if ty.arguments.is_empty() {
return Some(ty.ident.to_string());
}
}
}
None
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/type_params_generator.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use proc_macro2::TokenStream;
use quote::quote;
use super::context::Context;
use super::generator::Generator;
use crate::common::*;
pub struct TypeParamGenerator;
impl Generator for TypeParamGenerator {
fn filename(&self) -> String {
"type_params.rs".into()
}
fn gen(&self, ctx: &Context<'_>) -> Result<TokenStream> {
let ty_params = ctx.root_ty_params_with_context();
Ok(quote! {
pub trait Params {
#(type #ty_params;)*
}
})
}
} |
Rust | hhvm/hphp/hack/src/hh_codegen/gen_visitor/visitor_trait_generator.rs | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use anyhow::anyhow;
use anyhow::Result;
use proc_macro2::TokenStream;
use quote::format_ident;
use quote::quote;
use super::context::Context;
use super::gen_helper::*;
use super::generator::Generator;
use super::syn_helper::*;
use crate::common::*;
use crate::impl_generator;
pub trait VisitorTrait {
fn filename() -> String;
fn trait_name() -> syn::Ident;
fn node_ref_kind(lifetime: &TokenStream) -> TokenStream;
fn use_node() -> TokenStream;
fn node_trait_name() -> syn::Ident;
fn gen(ctx: &Context<'_>) -> Result<TokenStream> {
let use_node = Self::use_node();
let trait_name = Self::trait_name();
let node_dispatcher_function = Self::gen_node_dispatcher_function(ctx)?;
let visit_functions = Self::gen_visit_functions(ctx)?;
let visit_ty_params = Self::gen_visit_ty_params(ctx)?;
let uses = gen_module_uses(ctx.modules());
let lifetime = ctx.node_lifetime_ident();
let lifetime = make_lifetime(&lifetime);
Ok(quote! {
#![allow(unused_imports)]
#![allow(unused_variables)]
#uses
#use_node
use super::type_params::Params;
#node_dispatcher_function
pub trait #trait_name<#lifetime> {
type Params: Params;
fn object(&mut self) -> &mut dyn #trait_name<#lifetime, Params = Self::Params>;
#(#visit_ty_params)*
#(#visit_functions)*
}
})
}
fn gen_visit_ty_params(ctx: &Context<'_>) -> Result<Vec<TokenStream>> {
let lifetime = ctx.node_lifetime_ident();
let lifetime = make_lifetime(&lifetime);
let ref_kind = Self::node_ref_kind(&lifetime);
let context = ctx.context_ident();
let error = ctx.error_ident();
Ok(ctx
.root_ty_params_()
.map(|ty| {
let name = gen_visit_fn_name(ty.to_string());
quote! {
fn #name(&mut self, c: &mut <Self::Params as Params>::#context, p: #ref_kind <Self::Params as Params>::#ty) -> Result<(), <Self::Params as Params>::#error> {
Ok(())
}
}
})
.collect())
}
fn gen_visit_functions(ctx: &Context<'_>) -> Result<Vec<TokenStream>> {
let lifetime = ctx.node_lifetime_ident_with_quote();
let ref_kind = Self::node_ref_kind(&lifetime);
let context = ctx.context_ident();
let error = ctx.error_ident();
let mut visit_fn = vec![];
for ty in ctx.non_alias_types() {
let ty = ty.as_ref();
let def = ctx
.defs
.get(ty)
.ok_or_else(|| anyhow!("Type {} not found", ty))?;
let ty_params = get_ty_param_idents(def)?;
let ty_args = gen_ty_params_with_self(ty_params);
let name = gen_visit_fn_name(ty);
let ty = format_ident!("{}", ty);
visit_fn.push(quote! {
fn #name(&mut self, c: &mut <Self::Params as Params>::#context, p: #ref_kind #ty #ty_args) -> Result<(), <Self::Params as Params>::#error> {
p.recurse(c, self.object())
}
});
}
Ok(visit_fn)
}
fn gen_node_dispatcher_function(ctx: &Context<'_>) -> Result<TokenStream> {
let visitor_trait_name = Self::trait_name();
let context = ctx.context_ident();
let error = ctx.error_ident();
let lifetime = ctx.node_lifetime_ident_with_quote();
let node_ref_kind = Self::node_ref_kind(&lifetime);
let node_trait_name = Self::node_trait_name();
Ok(quote! {
pub fn visit<#lifetime, P: Params>(
v: &mut impl #visitor_trait_name<#lifetime, Params = P>,
c: &mut P::#context,
p: #node_ref_kind impl #node_trait_name<P>,
) -> Result<(), P::#error> {
p.accept(c, v)
}
})
}
}
pub fn gen_visit_fn_name(ty: impl AsRef<str>) -> syn::Ident {
format_ident!("visit_{}", to_snake(ty.as_ref()))
}
pub struct RefVisitorTrait;
impl VisitorTrait for RefVisitorTrait {
fn filename() -> String {
"visitor.rs".into()
}
fn trait_name() -> syn::Ident {
format_ident!("Visitor")
}
fn node_ref_kind(lifetime: &TokenStream) -> TokenStream {
quote! { &#lifetime }
}
fn use_node() -> TokenStream {
quote! { use super::node::Node; }
}
fn node_trait_name() -> syn::Ident {
format_ident!("Node")
}
}
impl_generator!(RefVisitorTrait, VisitorTrait);
pub struct MutVisitorTrait;
impl VisitorTrait for MutVisitorTrait {
fn filename() -> String {
"visitor_mut.rs".into()
}
fn trait_name() -> syn::Ident {
format_ident!("VisitorMut")
}
fn node_ref_kind(lifetime: &TokenStream) -> TokenStream {
quote! { &#lifetime mut }
}
fn use_node() -> TokenStream {
quote! { use super::node_mut::NodeMut; }
}
fn node_trait_name() -> syn::Ident {
format_ident!("NodeMut")
}
}
impl_generator!(MutVisitorTrait, VisitorTrait); |
Python | hhvm/hphp/hack/src/hh_codesynthesis/agentGenerator.py | #!/usr/bin/env python3
# pyre-strict
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
#
# A library/binary take the output from hh_single_type_check --dump-deps.
# And synthesis code based on the dependency graph.
#
# The synthesized code should produce the same dependency graph as given input.
#
# This library will construct a logic program using the given dependency graph,
# invoke Clingo to solve the logic program to get stable models, and we
# interpret the stable model to produce valid code.
#
# Currently, we are synthesising Hack code only, we will support more
# languages like C#, Java later on.
import argparse
import importlib
import logging
import os
import sys
from typing import List, Optional, Union
import clingo
from hphp.hack.src.hh_codesynthesis.codeGenerator import ClingoContext, CodeGenerator
from hphp.hack.src.hh_codesynthesis.hackGenerator import HackCodeGenerator
# If libfb.py library exists, we run in the internal environment.
try:
importlib.util.find_spec("libfb.py")
from libfb.py import parutil
g_internal_run = True
except ModuleNotFoundError:
g_internal_run = False
# Helper classes to handle each dependency edge.
# lhs_parser parse the left hand side symbols into potential depends on the
# separator. For example,
# Class::Method with "::" will produce [Class, Method]
# Class with None will produce [Class]
class DependencyEdgeHandler:
def lhs_parser(self, lhs: str, separator: Optional[str]) -> List[str]:
return lhs.split(separator)
# The following rule_writers takes the return value from `lhs_parser` along
# with a rhs dep_symbol to output a rule accordingly.
class ExtendEdgeHandler(DependencyEdgeHandler):
def parse(self, lhs: str) -> List[str]:
return self.lhs_parser(lhs, None)
def rule_writer(self, lhs: List[str], rhs: str) -> str:
return f'extends_to("{lhs[0]}", "{rhs}").'
class TypeEdgeHandler(DependencyEdgeHandler):
def parse(self, lhs: str) -> List[str]:
return self.lhs_parser(lhs, None)
def rule_writer(self, lhs: List[str], rhs: str) -> str:
return f'type("{lhs[0]}", "{rhs}").'
class MethodEdgeHandler(DependencyEdgeHandler):
def parse(self, lhs: str) -> List[str]:
return self.lhs_parser(lhs, "::")
def rule_writer(self, lhs: List[str], rhs: str) -> str:
return f'method("{lhs[0]}", "{lhs[1]}", "{rhs}").'
class SMethodEdgeHandler(DependencyEdgeHandler):
def parse(self, lhs: str) -> List[str]:
return self.lhs_parser(lhs, "::")
def rule_writer(self, lhs: List[str], rhs: str) -> str:
return f'static_method("{lhs[0]}", "{lhs[1]}", "{rhs}").'
class FunEdgeHandler(DependencyEdgeHandler):
def parse(self, lhs: str) -> List[str]:
return self.lhs_parser(lhs, None)
def rule_writer(self, lhs: List[str], rhs: str) -> str:
return f'invoked_by("{lhs[0]}", "{rhs}").'
# Generate logic rules based on given parameters.
def generate_logic_rules(
solving_context: ClingoContext, agent_name: str = ""
) -> List[str]:
rules: List[str] = []
if solving_context.number_of_nodes > 0 and (
solving_context.min_depth > solving_context.number_of_nodes
or sum(solving_context.degree_distribution) > solving_context.number_of_nodes
):
raise RuntimeError("Received unreasonable parameters.")
# Creating n symbols.
symbols = []
for i in range(solving_context.number_of_nodes):
# The number part is easier for reasoning to generate the graph. We are
# adding a "S" prefix to each symbol to construct a string. So that the
# synthesized code will has a valid class/interface name.
symbols.append(f'"{agent_name}S{i}", {i}')
# The actual rule will be like,
# internal_symbols("S0", 0; "S1", 1; "S2", 2).
rules.append("internal_symbols({}).".format(";".join(symbols)))
# Creating backbone hierarchy with minimum depth using normal Distribution.
# We separated the below part from "graph_generator.lp" to avoid "grounding bottleneck."
# And we are using normal distrubution to create a sequence of extends_to among n nodes.
interval = solving_context.number_of_nodes // solving_context.min_depth or 1
for i in range(interval, solving_context.number_of_nodes, interval):
rules.append(f'extends_to("S{i-interval}", "S{i}").')
# Creating a node distribution for each degree.
# We separated the below part from "graph_generator.lp" to narrow down the
# search scope.
for degree, minimum_nodes in enumerate(solving_context.degree_distribution):
rules.append(f":- #count{{X : in_degree(X, {degree})}} < {minimum_nodes}.")
return rules
# Extract logic rules from file format.
def extract_logic_rules(lines: List[str]) -> List[str]:
rules = []
symbols = set()
funcs = set()
handlers = {
"Extends": ExtendEdgeHandler(),
"Type": TypeEdgeHandler(),
"Method": MethodEdgeHandler(),
"SMethod": SMethodEdgeHandler(),
"Fun": FunEdgeHandler(),
}
collectors = {
"Extends": symbols,
"Type": symbols,
"Method": symbols,
"SMethod": symbols,
"Fun": funcs,
}
for line in lines:
# Required input formatting to get "Extends A -> Type B, Type C, Type D".
# And split get
# lhs = "Extends A"
# rhs = "Type B, Type C, Type D"
# T94428437 Temporary skipping all built-in functions for now.
# T92593014 We do not support namespace at this moment.
# HH\ PHP\ FB\Vec namespace\class etc.
if "\\" in line:
continue
line = line.strip().split("->")
if len(line) != 2:
# ToDo: Add logging if we needed to track wrong format on missing "->".
continue
(lhs, rhs) = line
lhs = lhs.split()
# The lhs length must be 2.
if len(lhs) != 2:
# ToDo: Add logging if we needed to track wrong format on lhs.
continue
# Dict{"lhs[0]": "handler to convert"}.
if lhs[0] not in handlers:
continue
handler = handlers[lhs[0]]
lhs_tokens = handler.parse(lhs[1])
# Updating collections.
collector = collectors.get(lhs[0], symbols)
collector.add(f'"{lhs_tokens[0]}"')
# Processing each deps ["Type B", "Type C", "Type D", "Fun E"].
for dep in rhs.rstrip("\n").split(","):
# dep = "Type X" / "Fun X".
dep = dep.split()
if len(dep) != 2:
# ToDo: Add logging if we needed to track wrong format on rhs.
continue
(dep_type, dep_symbol) = dep
# Right hand side could only be "Type"/"Fun".
if dep_type not in ["Type", "Fun"]:
raise NotImplementedError(
f"Not supported {dep_type} on the right hand side."
)
collector = collectors.get(dep_type, symbols)
collector.add(f'"{dep_symbol}"')
rules.append(handler.rule_writer(lhs_tokens, dep_symbol))
rules.append("symbols({}).".format(";".join(sorted(symbols))))
if len(funcs) != 0:
rules.append("funcs({}).".format(";".join(sorted(funcs))))
return rules
# Take in a dependency graph and a code generator to emit code.
def do_reasoning(additional_programs: List[str], generator: CodeGenerator) -> None:
# Logic programs for code synthesis.
asp_files = "hphp/hack/src/hh_codesynthesis"
if g_internal_run:
# Check if we are running in the internal environment.
asp_files = os.path.join(
parutil.get_dir_path("hphp/hack/src/hh_codesynthesis/"), "asp_code"
)
# Clingo interfaces.
ctl = clingo.Control()
# Load LP for code emitting.
ctl.load(asp_files + "/dep_graph_reasoning.lp")
# Load LP for graph generating.
with open(asp_files + "/graph_generator.lp") as fp:
ctl.add("base", [], fp.read())
# Load extra dependency graph given by the user.
ctl.add("base", [], "\n".join(additional_programs))
ctl.ground([("base", [])], context=generator.solving_context)
# ToDo: Hardcode the number of threads for now, change to parameter later.
# Pyre-ignore: [16] Configuration not in pyre stubs since it's dynamic
ctl.configuration.solve.parallel_mode = "4"
# Pyre-ignore: [16] Configuration not in pyre stubs since it's dynamic
ctl.configuration.solve.models = generator.model_count
logging.info("Finished grounding.")
result: Union[clingo.solving.SolveHandle, clingo.solving.SolveResult] = ctl.solve(
on_model=generator.on_model
)
if isinstance(result, clingo.solving.SolveResult):
if result.unsatisfiable:
raise RuntimeError("Unsatisfiable.")
# Read dependency graph from file or stdin.
def read_from_file_or_stdin(filename: Optional[str] = None) -> List[str]:
if filename:
with open(filename) as fp:
return fp.readlines()
# No filename, try stdin.
return sys.stdin.readlines()
# Write code to file or stdout.
def output_to_file_or_stdout(
generator: CodeGenerator, filename: Optional[str] = None
) -> int:
if filename:
with open(filename, "w") as fp:
fp.write(str(generator))
else:
print(generator)
return 0
def main() -> int:
generators = {"raw": CodeGenerator, "hack": HackCodeGenerator}
# Parse the arguments
parser = argparse.ArgumentParser()
parser.add_argument("--input_file", type=os.path.abspath)
parser.add_argument("--target_lang", type=str)
parser.add_argument("--output_file", type=os.path.abspath)
parser.add_argument("--n", type=int, default=0)
parser.add_argument("--min_depth", type=int, default=1)
parser.add_argument("--min_classes", type=int, default=1)
parser.add_argument("--min_interfaces", type=int, default=1)
parser.add_argument("--min_stub_classes", type=int, default=0)
parser.add_argument("--min_stub_interfaces", type=int, default=0)
# Parameters that narrow the search space to speed up the computation.
parser.add_argument("--degree_distribution", nargs="*", default=[], type=int)
parser.add_argument("--lower_bound", type=int, default=1)
parser.add_argument("--higher_bound", type=int, default=1)
parser.add_argument("--log", type=str)
args: argparse.Namespace = parser.parse_args()
# Setup log level and mark the start of our program.
log_level = getattr(logging, args.log.upper(), logging.WARN)
logging.basicConfig(
format="%(asctime)s %(message)s",
datefmt="%Y/%m/%d %I:%M:%S %p",
level=log_level,
)
logging.info("Started.")
# Set graph generating parameters. (If any)
solving_context = ClingoContext(
number_of_nodes=args.n,
min_depth=args.min_depth,
min_classes=args.min_classes,
min_interfaces=args.min_interfaces,
min_stub_classes=args.min_stub_classes,
min_stub_interfaces=args.min_stub_interfaces,
degree_distribution=args.degree_distribution,
lower_bound=args.lower_bound,
higher_bound=args.higher_bound,
)
# Load dependency graph.
lines = read_from_file_or_stdin(filename=args.input_file)
# T92303034 Temporary handle for the multiple lines like,
# Extend A -> Type A,
# Type B,
# Type C,
# Type D
graph = "".join(lines).replace(",\n", ",").split("\n")
# Output target language.
generator = generators.get(args.target_lang, CodeGenerator)(solving_context)
combined_rules = generate_logic_rules(solving_context) + extract_logic_rules(graph)
logging.info("Extracted all rules.")
logging.info(f"Number of depedency edges extracted: {len(combined_rules)}")
do_reasoning(combined_rules, generator)
logging.info("Finished reasoning.")
return output_to_file_or_stdout(generator=generator, filename=args.output_file)
if __name__ == "__main__":
sys.exit(main()) |
Python | hhvm/hphp/hack/src/hh_codesynthesis/agentGraphGenerator.py | #!/usr/bin/env python3
# pyre-strict
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
#
# This library will construct a logic program using the given parameter,
# invoke Clingo to solve the logic program to produce an agent graph,
# and we create multiple agents in parallel to produce valid codebase.
import argparse
import sys
from typing import Callable, Dict, List, Optional, Set, Union
import clingo
from clingo.symbol import Number, Symbol
class AgentGraphClingoContext:
"""Context class interact with Python and Clingo.
We can customize each value, range, and function to experiment on
different settings. Refer to the test case `test_small_agent_graph`
for more detail usages."""
def __init__(
self,
number_of_leaves: int,
number_of_infra_agents: int,
number_of_product_agents: int,
infra_agent_profile: Dict[str, List[int]],
product_agent_profile: Dict[str, List[int]],
) -> None:
if (
number_of_leaves <= 0
or number_of_infra_agents <= 0
or number_of_product_agents <= 0
or len(infra_agent_profile) < 2
or len(product_agent_profile) < 2
):
raise RuntimeError("Invalid agent graph metrics.")
self.number_of_leaves = number_of_leaves
self.number_of_infra_agents = number_of_infra_agents
self.number_of_product_agents = number_of_product_agents
self.infra_agent_profile = infra_agent_profile
self.product_agent_profile = product_agent_profile
def nl(self) -> Symbol:
return Number(self.number_of_leaves)
def nia(self) -> Symbol:
return Number(self.number_of_infra_agents)
def npa(self) -> Symbol:
return Number(self.number_of_product_agents)
def infra_agent_out_degree_low(self, agent: Number) -> Symbol:
return Number(self.infra_agent_profile["out_degree"][0])
def infra_agent_out_degree_high(self, agent: Number) -> Symbol:
return Number(self.infra_agent_profile["out_degree"][1])
def infra_agent_in_degree_low(self, agent: Number) -> Symbol:
return Number(self.infra_agent_profile["in_degree"][0])
def infra_agent_in_degree_high(self, agent: Number) -> Symbol:
return Number(self.infra_agent_profile["in_degree"][1])
def product_agent_out_degree_low(self, agent: Number) -> Symbol:
return Number(self.product_agent_profile["out_degree"][0])
def product_agent_out_degree_high(self, agent: Number) -> Symbol:
return Number(self.product_agent_profile["out_degree"][1])
def product_agent_in_degree_low(self, agent: Number) -> Symbol:
return Number(self.product_agent_profile["in_degree"][0])
def product_agent_in_degree_high(self, agent: Number) -> Symbol:
return Number(self.product_agent_profile["in_degree"][1])
# A connector that decide whether or not two agents can be connected using
# their number and level. We are not connecting two agents here, only added
# a choice to the solver, the solver can decide how to connect two agents
# based on other constraints.
# @param source_agent, the number of source agent.
# @param target_agent, the number of target agent.
# @param source_level, the level of source agent.
# @param target_level, the level of target agent.
# @return 1 if source_argent can be connected to target_agent.
def agent_connector(
self,
source_agent: Number,
target_agent: Number,
source_level: Number,
target_level: Number,
) -> Symbol:
src_agent_number = source_agent.number
tgt_agent_number = target_agent.number
src_level_number = source_level.number
tgt_level_number = target_level.number
# Since clingo doesn't support Boolean return, we are returning
# Number(1) for true, and Number(0) for false.
if (
src_agent_number < tgt_agent_number + 200
and tgt_agent_number < src_agent_number
and tgt_level_number < src_level_number
):
return Number(1)
else:
return Number(0)
# Similar to `agent_connector`, this connector works for deciding a product
# agent could have an edge to an infra agent or not. We are not connecting
# two agents here, only added a choice to the solver, the solver can decide
# how to connect two agents based on other constraints.
# @param source_agent, the number of source agent.
# @param target_agent, the number of target agent.
# @return 1 if product argent can be connected to infra agent.
def product_and_infra_agent_connector(
self, source_agent: Number, target_agent: Number
) -> Symbol:
src_agent_number = source_agent.number
tgt_agent_number = target_agent.number
# Since clingo doesn't support Boolean return, we are returning
# Number(1) for true, and Number(0) for false.
if tgt_agent_number * 3 + 200 < src_agent_number:
return Number(1)
else:
return Number(0)
class AgentGraphGenerator:
"""A generator that could produce an agent graph. We are providing an
evaluation function along with an actual generate function. So that
the user can do a dry run to evaluate the quality of actual graph.
The user must specify which on_model is going to use."""
def __init__(
self, agent_distribution: List[int], solving_context: AgentGraphClingoContext
) -> None:
self.infra_agents: List[int] = []
self.product_agents: List[int] = []
self.edges: List[Set] = []
self._raw_model = ""
self.agent_distribution = agent_distribution
self.solving_context = solving_context
def add_infra_agent(self, agent_number: int) -> None:
self.infra_agents.append(agent_number)
def add_product_agent(self, agent_number: int) -> None:
self.product_agents.append(agent_number)
def add_edge(self, left_agent: int, right_agent: int) -> None:
self.edges[left_agent].add(right_agent)
def validate_range_in_profile(self, degrees: List[int], direction: str) -> bool:
for node, degree in enumerate(degrees):
# Select a right profile to use.
if node in self.infra_agents:
agent_profile = self.solving_context.infra_agent_profile
elif node in self.product_agents:
agent_profile = self.solving_context.product_agent_profile
else:
raise RuntimeError("Can't find a profile for agent {0}".format(node))
# Check the degree within the range or not.
assert (
degree >= agent_profile[direction][0]
and degree < agent_profile[direction][1]
), "Node {0}'s {1}: {2} is out of range {3}, {4}.".format(
node,
direction,
degree,
agent_profile[direction][0],
agent_profile[direction][1],
)
return True
def validate(
self,
customize_validator: Optional[
Callable[[int, int, List[int], List[int]], bool]
] = None,
) -> bool:
# Graph validation using the constraints specified in the context.
assert (
len(self.infra_agents) >= self.solving_context.number_of_infra_agents
), "Expected to get at least {0}, but only have {1} infra agents.".format(
len(self.infra_agents), self.solving_context.number_of_infra_agents
)
assert (
len(self.product_agents) >= self.solving_context.number_of_product_agents
), "Expected to get at least {0}, but only have {1} product agents.".format(
len(self.product_agents), self.solving_context.number_of_product_agents
)
# Compute in/out degree.
in_degrees = [0] * sum(self.agent_distribution)
out_degrees = [0] * sum(self.agent_distribution)
# Iterate through adjacency matrix.
for node, depends_on in enumerate(self.edges):
in_degrees[node] += len(depends_on)
for x in depends_on:
out_degrees[x] += 1
# Validate the degrees are in the range specified by the profile.
self.validate_range_in_profile(in_degrees, "in_degree")
self.validate_range_in_profile(out_degrees, "out_degree")
# Customized range function needs a customized validator.
if customize_validator is not None:
return customize_validator(
self.infra_agents, self.product_agents, in_degrees, out_degrees
)
return True
def evaluate(self, m: clingo.Model) -> None:
self.generate(m)
print("Number of infra agents: {0}".format(len(self.infra_agents)))
print("Number of product agents: {0}".format(len(self.product_agents)))
print("Number of edges: {0}".format(sum([len(x) for x in self.edges])))
similar_agents = 0
checked_agents = set()
similar_relations = []
for outer_agent, outer_dependents in enumerate(self.edges):
for inner_agent, inner_dependents in enumerate(
self.edges[outer_agent + 1 :]
):
if (
outer_dependents == inner_dependents
and len(outer_dependents) != 0
and inner_agent not in checked_agents
):
similar_agents += 1
checked_agents.add(inner_agent)
if inner_dependents not in similar_relations:
similar_relations.append(inner_dependents)
print("There are {0} similar agents.".format(similar_agents))
print("Common dependents set is {0}".format(similar_relations))
def generate(self, m: clingo.Model) -> None:
self._raw_model = m.__str__()
self.infra_agents = []
self.product_agents = []
predicates = m.symbols(atoms=True)
node_func = {
"infra_agent": self.add_infra_agent,
"product_agent": self.add_product_agent,
}
edge_func = {"depends_on": self.add_edge}
for predicate in predicates:
if predicate.name in node_func:
node_func[predicate.name](predicate.arguments[0].number)
self.edges = [
set() for x in range(len(self.infra_agents) + len(self.product_agents))
]
for predicate in predicates:
if predicate.name in edge_func:
edge_func[predicate.name](
predicate.arguments[0].number, predicate.arguments[1].number
)
print(self._raw_model)
def on_model(self, m: clingo.Model) -> None:
raise RuntimeError("Must specify a valid method.")
# Generate a set of agents using given distribution. The distribution has the
# critical path requirement (levels) as well as how many roots are in the graph.
def generating_agent_distribution(agent_distribution: List[int]) -> List[str]:
if not all(i > 0 for i in agent_distribution):
raise RuntimeError("Agent distribution must have all positive integers.")
# The levels in the generated agent graph is the length of this list.
return [
"agents({0}..{1}, {2}).".format(
sum(agent_distribution[:level]),
sum(agent_distribution[:level]) + number_of_agents_at_this_level - 1,
level,
)
for level, number_of_agents_at_this_level in enumerate(agent_distribution)
]
# Take in an agent distribution and a generator to create an agent graph.
def generating_an_agent_graph(generator: AgentGraphGenerator) -> None:
# Logic programs for code synthesis.
asp_files = "hphp/hack/src/hh_codesynthesis"
# Clingo interfaces.
ctl = clingo.Control()
# Load LP for agent graph generating.
ctl.load(asp_files + "/agent_graph_generator.lp")
# Load LP for agent distribution.
ctl.add(
"base",
[],
"\n".join(generating_agent_distribution(generator.agent_distribution)),
)
ctl.ground([("base", [])], context=generator.solving_context)
result: Union[clingo.solving.SolveHandle, clingo.solving.SolveResult] = ctl.solve(
on_model=generator.on_model
)
if isinstance(result, clingo.solving.SolveResult):
if result.unsatisfiable:
raise RuntimeError("Unsatisfiable.")
def main() -> None:
# Parse the arguments.
parser = argparse.ArgumentParser()
parser.add_argument(
"--agents",
nargs="+",
type=int,
default=[5, 10, 10, 10, 10, 10, 10, 15, 20],
help=(
"A sequence of numbers indicating the number of agents at each level. For"
" example, [2, 4, 4] means the agent graph has three levels in it, and each"
" level has 2, 4, 4 agents respectively."
),
)
parser.add_argument(
"--number_of_infra_agents",
type=int,
default=20,
help="Number of infra agents in the generated agent graph.",
)
parser.add_argument(
"--number_of_product_agents",
type=int,
default=60,
help="Number of product agents in the generated agent graph.",
)
parser.add_argument(
"--number_of_leaves",
type=int,
default=30,
help="Number of leaves in the generated agent graph.",
)
parser.add_argument(
"--infra_agent_indegrees",
nargs=2,
type=int,
default=[0, 10],
help=(
"A boundary for describing one infra agent can dependent on how many other"
" agents."
),
)
parser.add_argument(
"--infra_agent_outdegrees",
nargs=2,
type=int,
default=[5, 100],
help=(
"A boundary for describing how many other agents can depent on one infra"
" agent."
),
)
parser.add_argument(
"--product_agent_indegrees",
nargs=2,
type=int,
default=[5, 20],
help=(
"A boundary for describing one product agent can dependent on how many"
" other agents."
),
)
parser.add_argument(
"--product_agent_outdegrees",
nargs=2,
type=int,
default=[0, 5],
help=(
"A boundary for describing how many other agents can depent on one product"
" agent."
),
)
parser.add_argument("--evaluate", action=argparse.BooleanOptionalAction)
args: argparse.Namespace = parser.parse_args()
# Setup generator and context.
agent_graph_generator = AgentGraphGenerator(
agent_distribution=args.agents,
solving_context=AgentGraphClingoContext(
number_of_infra_agents=args.number_of_infra_agents,
number_of_product_agents=args.number_of_product_agents,
number_of_leaves=args.number_of_leaves,
infra_agent_profile={
"in_degree": args.infra_agent_indegrees,
"out_degree": args.infra_agent_outdegrees,
},
product_agent_profile={
"in_degree": args.product_agent_indegrees,
"out_degree": args.product_agent_outdegrees,
},
),
)
agent_graph_generator.on_model = agent_graph_generator.generate
if args.evaluate:
agent_graph_generator.on_model = agent_graph_generator.evaluate
generating_an_agent_graph(agent_graph_generator)
if __name__ == "__main__":
sys.exit(main()) |
hhvm/hphp/hack/src/hh_codesynthesis/agent_graph_generator.lp | % agent_graph_generator.lp -> Agent Graph generating algorithm in Clingo.
% Can be combined with Clingo Python/Lua interface
% Some placeholders in this file can be run in command line with below section.
% Or "clingo agent_graph_generator.lp -c number_of_infra_agents=100
% -c number_of_product_agents=100 ", etc
% =============================================================================
#const number_of_leaves = @nl().
#const number_of_infra_agents = @nia().
#const number_of_product_agents = @npa().
% We organize the agents into layers, and an agent on level X can only depend
% on agents on level Y, where Y < X. These rules are important to making the
% construction realistic.
% This is very slow, so we changed to write down the level specifically in the
% Python interface, `generating_agent_distribution`.
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Generating n candidate agents.
% =============================================================================
% candidate_agents(0..number_of_agents-1).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Critical path requirement.
% =============================================================================
% levels(0..number_of_levels-1).
% 1 {agents(X, L): levels(L)} 1 :- candidate_agents(X).
% Constraint on number of agents in each level.
% number_of_agents_at_each_level(Total, L) :- Total = #count{ X : agents(X, L) }, levels(L).
% :- number_of_agents_at_each_level(Total, L), Total < @agents_at_each_level(L).
% An agent X could be an infra agent or a product agent.
1 {infra_agent(X); product_agent(X)} 1 :- agents(X, L).
% Constraint on number of infra agents.
:- #count{ X : infra_agent(X) } < number_of_infra_agents.
% Constraint on number of product agents.
:- #count{ X : product_agent(X) } < number_of_product_agents.
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Degree requirement.
% =============================================================================
% Calculate the in/out degree of each agent.
in_degree_of_agent(X, InDegree) :- agents(X, _), InDegree = #count{ Y : depends_on(X, Y) }.
out_degree_of_agent(X, OutDegree) :- agents(X, _), OutDegree = #count{ Y : depends_on(Y, X) }.
% Constraint on number of roots in the agent graph.
% This is the same as the agent at level 0, so we don't need this constraint.
% :- #count{ X : in_degree_of_agent(X, 0) } < number_of_roots.
% Constraint on number of leaves in the agent graph.
:- #count{ X : out_degree_of_agent(X, 0) } < number_of_leaves.
% Constraint on root agent, must be at level 0.
:- in_degree_of_agent(X, 0), agents(X, Level), Level != 0.
% Constraint on root agent, in degree must be 0.
:- in_degree_of_agent(X, Degree), agents(X, 0), Degree != 0.
% Constraint on each type of the agent.
% If we adjust the function defined in Python, we could specify not only for
% each agent type, but also for individual agent.
:- out_degree_of_agent(X, D), infra_agent(X), D < @infra_agent_out_degree_low(X).
:- out_degree_of_agent(X, D), infra_agent(X), D >= @infra_agent_out_degree_high(X).
:- in_degree_of_agent(X, D), infra_agent(X), D < @infra_agent_in_degree_low(X).
:- in_degree_of_agent(X, D), infra_agent(X), D >= @infra_agent_in_degree_high(X).
:- out_degree_of_agent(X, D), product_agent(X), D < @product_agent_out_degree_low(X).
:- out_degree_of_agent(X, D), product_agent(X), D >= @product_agent_out_degree_high(X).
:- in_degree_of_agent(X, D), product_agent(X), D < @product_agent_in_degree_low(X).
:- in_degree_of_agent(X, D), product_agent(X), D >= @product_agent_in_degree_high(X).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Edge (Agent relationships) requirement.
% =============================================================================
% An agent_connector and product_and_infra_agent_connector can decide whether
% or not the two agents could have a dependency.
% This is a choice rule, so in Python interface, @agent_connector will decide
% this rule is enable or disable for agent pair <X, Y>. If it's enabled, the
% solver can choose to form an edge or not. The edge constrained by other
% requirements. For example, we said agent 2, 3, 4, 5 could all depend on
% agent 1 (by our @agent_connector), then we also have a constraint says,
% "agent 1 at most can have two other agents depend on it". The solver have to
% pick one pair of agents from {2, 3}, {2, 4}, {2, 5}, {3, 4}, {3, 5}, {4, 5}
% combinations. We can adjust @agent_connector to add more randomness.
% For instance, instead of using consective number 2, 3, 4, 5; we could use a
% set of random agent number like 17, 29, 42, 75.
{depends_on(Y, X) : @agent_connector(Y, X, YLevel, XLevel) != 0} :-
agents(X, XLevel), agents(Y, YLevel).
{depends_on(Y, X) : @product_and_infra_agent_connector(Y, X) != 0} :-
product_agent(Y), infra_agent(X), agents(X, XLevel), agents(Y, YLevel), XLevel < YLevel.
% Constraint on agent relationship.
% An infra agent shouldn't depend on any product agent.
:- depends_on(Y, X), infra_agent(Y), product_agent(X).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% General Constraints
% =============================================================================
% Print SumOfAllDegree.
% allSD(SD) :- SD = #sum{D : out_degree_of_agent(X, D), agents(X)}.
#show product_agent/1.
#show infra_agent/1.
#show depends_on/2. |
|
Python | hhvm/hphp/hack/src/hh_codesynthesis/codeGenerator.py | #!/usr/bin/env python3
# pyre-strict
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
#
# A family of code generators,
# CodeGenerator just emitting LP code(raw Stable Model)
# HackGenerator class extends CodeGenerator to emit its own code.
from typing import List, Optional
import clingo
from clingo import Number, Symbol
class ClingoContext:
"""Context class interact with Python and Clingo."""
def __init__(
self,
number_of_nodes: int = 0,
min_depth: int = 1,
min_classes: int = 1,
min_interfaces: int = 1,
lower_bound: int = 1,
higher_bound: int = 1,
min_stub_classes: int = 0,
min_stub_interfaces: int = 0,
degree_distribution: Optional[List[int]] = None,
) -> None:
self.number_of_nodes = number_of_nodes
self.min_depth = min_depth
self.min_classes = min_classes
self.min_interfaces = min_interfaces
self.lower_bound = lower_bound
self.higher_bound = higher_bound
self.min_stub_classes = min_stub_classes
self.min_stub_interfaces = min_stub_interfaces
self.degree_distribution: List[int] = (
[] if not degree_distribution else degree_distribution
)
def n(self) -> Symbol:
return Number(self.number_of_nodes)
def d(self) -> Symbol:
return Number(self.min_depth)
def c(self) -> Symbol:
return Number(self.min_classes)
def i(self) -> Symbol:
return Number(self.min_interfaces)
def lb(self) -> Symbol:
return Number(self.lower_bound)
def hb(self) -> Symbol:
return Number(self.higher_bound)
def sc(self) -> Symbol:
return Number(self.min_stub_classes)
def si(self) -> Symbol:
return Number(self.min_stub_interfaces)
class CodeGenerator(object):
"""A base generator to emit raw model from Clingo output only
The children classes can extend the functionality to produce
corresponding Hack/Java/C# code.
"""
def __init__(
self, solving_context: Optional[ClingoContext] = None, model_count: int = 1
) -> None:
super(CodeGenerator, self).__init__()
self._raw_model = ""
self.solving_context: ClingoContext = (
ClingoContext() if not solving_context else solving_context
)
self.model_count = model_count
def __str__(self) -> str:
return self._raw_model
"""
Callback function for Clingo on_model event.
"""
def on_model(self, m: clingo.Model) -> bool:
# Same set of parameters and search algorithm will produce the same
# result set. To make sure two different agent using the same settings
# can produce different output, we are counting models in the result
# set. The first agent using the same configuration gets first one,
# the second agent using the same configuration gets second one, and so
# on so forth.
self.model_count -= 1
if self.model_count > 0:
return True
self._raw_model = m.__str__()
return False |
hhvm/hphp/hack/src/hh_codesynthesis/dep_graph_reasoning.lp | % Given input from "hh_single_type_check --dump-deps --no-builtins"
% We represent them as logic rules in Python interface.
%
% Or we can construct sample input.lp like:
% extends_to(b, a).
% extends_to(b, i1).
% extends_to(b, i2).
% extends_to(i1, i).
% extends_to(i2, i).
% symbols(a; b; i; i1; i2).
% reasoning.lp
%%%%%%%%
% Starter 2/Milestone 1.
%%%%%%%%
% Quickly make sure there is no circular dependencies.
indirect_extends_to(X, Y) :- extends_to(X, Y).
indirect_extends_to(X, Y) :- extends_to(X, Z), indirect_extends_to(Z, Y).
:- extends_to(X, Y), indirect_extends_to(Y, X).
% An symbol could be either class or interface.
1 {class(X); interface(X)} 1 :- symbols(X).
% A symbol is unique, can't be a class/interface name and a function name
% at the same time.
:- symbols(X), funcs(X).
% Later on, we can adjust these parameters to balance number of the classes
% and number of interfaces we expect to synthesis.
#const min_classes = @c().
#const min_interfaces = @i().
:- #count{X: class(X)} < min_classes.
:- #count{X: interface(X)} < min_interfaces.
% If x is an interface, it can only extends_to from other interfaces.
% So, all Y must be an interface.
interface(Y) :- interface(X), extends_to(Y, X).
% If x is a class, it can extends_to from either class or interface.
1 {class(Y); interface(Y)} 1 :- class(X), extends_to(Y, X).
% But, at most 1 parent class is allowed.
:- class(X), #count{Y : class(Y), extends_to(Y, X)} > 1.
% Output, convert to Hack code keywords. Send back to Clingo Python/Lua interface.
% From there, we are using these keywords to produce valid Hack code.
implements(X, Y) :- extends_to(Y, X), class(X), interface(Y).
extends(X, Y) :- extends_to(Y, X), interface(X), interface(Y).
extends(X, Y) :- extends_to(Y, X), class(X), class(Y).
%%%%%%%%
% Milestone 2.
%%%%%%%%
% We represent "Type" and "Method" as logic rules in Python interface.
%
% Or we can construct sample input.lp like:
% extends_to("B", "C").
% method("B", "Foo", "C").
% type("B", "C").
% symbols("B"; "C").
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Type Dependency Edge <Type, Type>.
% =============================================================================
% If TypeX needs "Type" check TypeY, but no "Extend" relationship presents, so
% the "Type" edge is coming from a parameter TypeX in a TypeY method call.
has_method_with_parameter(TypeY, TypeX) :-
type(TypeX, TypeY),
not indirect_extends_to(TypeX, TypeY),
symbols(TypeY).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Method Dependency Edge <Method, Type>.
% =============================================================================
% If we see "Method" dependency edge, TypeX must have one method with MethodName.
add_method(TypeX, MethodName) :- method(TypeX, MethodName, _).
% Then the "Method" dependency is either coming from
% 1. overriding TypeX::MethodName in TypeZ. (except TypeZ is an interface,
% and TypeX is a class. See Constraint 2.1.a)
% 2. invoking TypeX->MethodName in TypeZ. (except TypeZ is an interface,
% and it can't invoke. See Constraint 2.2.a)
%
% #############################################################################
% Using some greedy approach in the model to reduce the usage of "choice rule".
% Rule 2.1.1: We say if there is an "Extend" edge and "Method" edge, therefore
% TypeZ overrides TypeX::MethodName
% override(TypeZ, TypeX, MethodName) :-
% method(TypeX, MethodName, TypeZ), extends_to(TypeX, TypeZ).
% Rule 2.1.2: We could say if there is no "Extend" edge, but "Method" edge
% shows up, so TypeZ invokes TypeX->MethodName in any method of TypeZ::__.
% invokes_in_method(TypeZ, TypeX, MethodName) :-
% method(TypeX, MethodName, TypeZ), not extends_to(TypeX, TypeZ).
% #############################################################################
% Rule 2.1.3: However, we give complete flexibility to choose from override or
% invoke a method.
1 {invokes_in_method(TypeZ, TypeX, MethodName);
override(TypeZ, TypeX, MethodName)} 1 :-
method(TypeX, MethodName, TypeZ), symbols(TypeZ).
%
% Currently, it's "non-deterministic" for "Method" edges no matter how "Extends"
% layouts are. If there is a performance issue, feel free to mix up Rule 2.1.1-3
% The performance impact is depending on the size of intersection of "Method"
% and "Extends". If we have some overlapping between "Method" <-> "Extends" we
% deterministically decide "override" using Rule 2.1.1. Therefore, we saved some
% time. We can also enable Rule 2.1.2, so that we deterministically decide
% "invoke". And saved more time. Remember, any combination is valid under
% Declarative Programming.
% As a consequence, if we chose 'override' without an "Extend" edge, we need to
% supplement one "Extend" edge, this ends up the code we synthesized will be a
% Super Set of the given dependency graph, which is totally fine.
extends_to(TypeX, TypeZ) :- override(TypeZ, TypeX, _).
% If TypeZ overrides TypeX::MethodName, we add a method Methodname to TypeZ.
add_method(TypeZ, MethodName) :- override(TypeZ, _, MethodName).
% Or we chose to invoke TypeX->MethodName in a method of TypeZ, we must pass
% TypeX in as a parameter.
has_method_with_parameter(TypeZ, TypeX) :- invokes_in_method(TypeZ, TypeX, _).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% General Constraints
% =============================================================================
% Constraint 2.1.a: TypeZ overrides TypeX::_, TypeZ can't be an
% interface to override a class TypeX.
:- override(TypeZ, TypeX, _), interface(TypeZ), class(TypeX).
% Constraint 2.2.a: Interface TypeZ can't invoke any method inside.
:- invokes_in_method(TypeZ, _, _), interface(TypeZ).
%%%%%%%%
% Milestone 3.
%%%%%%%%
% We represent "FUN" and "SMethod" as logic rules in Python interface.
%
% Or we can construct sample input.lp like:
% extends_to("A", "B").
% method("A", "foo", "B").
% type("A", "B").
% type("B", "C").
% type("C", "B").
%
% invoked_by("fn_bar", "B").
% invoked_by("fn_foo", "B").
% method("C", "call", "fn_foo").
% method("B", "foo", "fn_bar").
% static_method("B", "Bar", "fn_bar").
% static_method("B", "Bar", "C").
%
% type("B", "fn_bar").
% type("C", "fn_foo").
%
% funcs("fn_foo"; "fn_bar").
% symbols("A"; "B"; "C").
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% SMethod Dependency Edge <SMethod, Type>, <SMethod, Fun>.
% =============================================================================
% If TypeX has a static method, TypeX must be a class.
class(TypeX) :- static_method(TypeX, _, _).
% Let's add a static method to the class.
add_static_method(ClassName, MethodName) :- static_method(ClassName, MethodName, _).
% If TypeX is a class or function, and we see a static method edge, then TypeX
% invokes ClassName::MethodName.
invokes_static_method(TypeX, ClassName, MethodName) :-
static_method(ClassName, MethodName, TypeX).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% SMethod Constraints
% =============================================================================
% If TypeX is an interface, it can't invoke a static method.
:- static_method(_, _, TypeX), interface(TypeX).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Fun Dependency Edge <Fun, Type>, <Fun, Fun>.
% =============================================================================
% TypeX could be either a function or a class to invoke FnName.
invokes_function(TypeX, FnName) :- invoked_by(FnName, TypeX).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Fun Constraints
% =============================================================================
% If TypeX is an interface, it can't invoke a function.
:- invoked_by(_, TypeX), interface(TypeX).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Method Dependency Edge <Method, Fun>. Extended Milestone 2 <Method, Type>.
% =============================================================================
% If FnX is a function, and we see a "Method" edge, then TypeY could be a
% class or an interface.
invokes_in_body(FnX, TypeY, MethodName) :-
method(TypeY, MethodName, FnX), funcs(FnX).
% If TypeY is a class, we create an object inside the function body.
creates_in_body(FnX, TypeY) :- method(TypeY, _, FnX), funcs(FnX), class(TypeY).
% If TypeY is an interface, we could only pass by parameter. And the caller must
% create a TypeZ object where TypeZ implements TypeY, as the argument before
% calling this function.
1 {has_parameter_and_argument(FnX, TypeY, ClassZ)} 1 :-
method(TypeY, MethodName, FnX), funcs(FnX), interface(TypeY),
extends_to(TypeY, ClassZ), class(ClassZ).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Type Dependency Edge <Type, Fun>. Extended Milestone 2 <Type, Type>.
% =============================================================================
% If TypeY needs "Type" check FnX, so the "Type" edge is coming from a return
% value, a parameter or an object creation in the function definition.
% We created an object or added a parameter if we see a <Method, Fun> edge,
% which will naturally introduce <Type, Fun> edge. Therefore, here we are dealing
% with those cases where no <Method, Fun> edge shows up.
%
% If TypeY is a class, we create an object inside the function body.
creates_in_body(FnX, TypeY) :-
type(TypeY, FnX), funcs(FnX), class(TypeY),
not method(TypeY, _, FnX), not static_method(TypeY, _, FnX).
% If TypeY is an inferface, we could only pass by parameter. And the caller must
% create a TypeZ object where TypeZ implements TypeY, as the argument before
% calling this function.
1 {has_parameter_and_argument(FnX, TypeY, ClassZ)} 1 :-
type(TypeY, FnX), funcs(FnX), not method(TypeY, _, FnX),
interface(TypeY), extends_to(TypeY, ClassZ), class(ClassZ).
% Here is an example for how 'has_parameter_and_argument' handles the
% <"Interface", "Fun"> scenario.
% <?hh
% interface I {
% public function interface_only_method(): void;
% }
% class C implements I {
% public function interface_only_method(): void {}
% public function class_only_method(): void {}
% }
% function foo(I $I_obj): void {
% $I_obj->interface_only_method();
% }
% function bar(): void {
% $C_obj = new C();
% foo($C_obj)
% }
#show class/1.
#show interface/1.
#show funcs/1.
#show extends/2.
#show implements/2.
#show has_method_with_parameter/2.
#show invokes_function/2.
#show add_method/2.
#show add_static_method/2.
#show creates_in_body/2.
#show invokes_in_body/3.
#show has_parameter_and_argument/3.
#show invokes_in_method/3.
#show invokes_static_method/3. |
|
hhvm/hphp/hack/src/hh_codesynthesis/graph_generator.lp | % generate_gen.lp -> Graph generating algorithm in Clingo.
% Can be combined with Clingo Python/Lua interface, dep_graph_reasoning.lp
% Some placeholders in this file can be run in command line with below section.
% Or "clingo generate_gen.lp -c n=10 -c avg_width=1", etc
% =============================================================================
#const n = @n().
#const number_of_stub_classes = @sc().
#const number_of_stub_interfaces = @si().
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Genrating n symbols.
% =============================================================================
% The actual symbol created in Python interface is internal_symbols/2,
% The first arity is a string, the second arity is a number.
% like, internal_symbols("S0", 0).
% We are converting it to symbols/1. here, simply ignore the second arity with "_".
symbols(X) :- internal_symbols(X, _).
% Uncomment this for an easy prototyping without Python interface.
% symbols(1..n).
% internal_symbols(_, X) :- symbols(X).
% We separated the below part to Python interface to avoid "grounding bottleneck."
% And we are using normal distrubution to create a sequence of "extends_to" among
% n nodes. For n = 100, we can use
% extends_to(1, 11).
% extends_to(11, 21).
% extends_to(21, 31).
% extends_to(31, 41).
% extends_to(41, 51).
% extends_to(51, 61).
% extends_to(61, 71).
% extends_to(71, 81).
% extends_to(81, 91).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Minimum depth requirement.
% =============================================================================
% #const depth = @d().
% path(X, X, 0) :- symbols(X).
% path(X, Y, L + 1) :- extends_to(X, Z), path(Z, Y, L), L < depth + 1.
% :- #max{L : path(X, Y, L)} < depth.
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Average width requirement.
% =============================================================================
% Calculate the in/out degree of each symbol.
in_degree(X, D) :- symbols(X), D = #count{ Y : extends_to(Y, X) }.
out_degree(X, D) :- symbols(X), D = #count{ Y : extends_to(X, Y) }.
% One class or interface at most can have 10 extends relationship.
% We can adjust this one later, base on the profiling from actual software.
% We believed if a code has more than 10 extends/implements is a bad design.
% Therefore, we added this constraint to prevent AI synthesis bad code.
:- in_degree(X, D), D > 10.
% Constraint on a symbol can't be extended by more than 30 other symbols.
:- out_degree(X, D), D > 30.
% Average constraint.
% Instead of computing the sum of all degrees and divided by n, we changed the
% average degree requirement to the degree distribution in our Python interface.
% If class X has no parent class, we consider it as a "stub" class, which could
% be servered as a "cut" node to connet this agent with other agents.
has_parent_class(X) :- extends_to(Y, X), class(Y), class(X).
:- #count{X: class(X), not has_parent_class(X)} < number_of_stub_classes.
% If interface X has no parent interfaces, we consider it as a "stub" interface
% which could be servered as a "cut" node to connect this agent with others.
:- #count{X: in_degree(X, 0), interface(X)} < number_of_stub_interfaces.
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% Spanning requirement.
% =============================================================================
{extends_to(Y, X) : XN + @lb() < YN, YN < XN + @hb()} :-
internal_symbols(X, XN), internal_symbols(Y, YN).
% If X, Y is number, not string, we can simply write without XN, YN.
% {extends_to(X, Y) : X + 10 < Y, Y < X + 100} :- symbols(X), symbols(Y).
% $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
% General Constraints
% =============================================================================
% Print SumOfAllDegree.
% allSD(SD) :- SD = #sum{D : degree(X, D), symbols(X)}. |
|
Python | hhvm/hphp/hack/src/hh_codesynthesis/hackGenerator.py | # pyre-strict
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
#
# A family of generators to create Hack constructs like class, function, interface, etc
#
# We are assuming all classes/interfaces/functions are properly defined before
# creating any implements/extends dependency. This is guaranteed by preceding
# step. In our case, it's the logic rule checked all constraints for us,
# therefore, the reasoning result from Clingo can be converted to code
# straightforward.
#
# _HackClassGenerator maintains each class definition.
# _HackInterfaceGenerator maintains each interface definition.
# _HackFunctionGenerator maintains each function definition.
# HackGenerator extends CodeGenerator combines all _Hack*Generator to
# emit Hack code on clingo output.
import functools
from collections import deque
from typing import Any, Dict, List, Optional, Set, Tuple
import clingo
from hphp.hack.src.hh_codesynthesis.codeGenerator import ClingoContext, CodeGenerator
class _HackBaseGenerator(object):
"""
_HackBaseGenerator for the shared part of _HackInterfaceGenerator and
_HackClassGenerator. In this case is the body of each class or interface
definition. We are extending this to support method declaration/definition.
"""
def __init__(self) -> None:
super(_HackBaseGenerator, self).__init__()
self.name = "Base"
# A set of methods in this class/interface.
self.methods: Set[str] = set()
# A set of parameters invoked in dummy method.
self.parameter_set: Set[str] = set()
# A set of functions to invoke in dummy method.
self.invoke_funcs_set: Set["_HackFunctionGenerator"] = set()
# A set of parents this symbol had.
self.parents: Set[str] = set()
def add_method(self, method_name: str) -> None:
self.methods.add(method_name)
def add_parameter(self, parameter_type: str) -> None:
self.parameter_set.add(parameter_type)
def add_parent(self, parent_name: str) -> None:
self.parents.add(parent_name)
def _print_dummy_method_body(self) -> str:
return ";"
def _print_dummy_method(self) -> str:
parameter_list = ", ".join(
(map(lambda x: f"{x} ${x}_obj", sorted(self.parameter_set)))
)
if parameter_list == "" and len(self.invoke_funcs_set) == 0:
return ""
dummy_name = f"dummy_{self.name}_method"
# We are defining a unique dummy_method among all the methods defined by
# the user. If there is a naming conflict, simply extending it with "_".
while dummy_name in self.methods:
dummy_name += "_"
return (
f"\npublic function {dummy_name}({parameter_list}):"
f" void{self._print_dummy_method_body()}\n"
)
def _print_method_body(self) -> str:
return ";"
def _print_method(self, method_name: str, static_keyword: str = " ") -> str:
return (
f"\npublic{static_keyword}function {method_name}():"
f" void{self._print_method_body()}\n"
)
def _print_methods(self) -> str:
return "".join(list(map(self._print_method, sorted(self.methods))))
def _print_static_methods(self) -> str:
return ""
def _print_body(self) -> str:
return (
"{"
+ self._print_static_methods()
+ self._print_dummy_method()
+ self._print_methods()
+ "}"
)
class _HackInterfaceGenerator(_HackBaseGenerator):
"""A generator to emit Hack Interface definition."""
def __init__(self, name: str, **kwargs: Dict[str, Any]) -> None:
super(_HackInterfaceGenerator, self).__init__()
self.name = name
# A set of extends relationship in this interface.
self.extends: Set[str] = set()
def add_extend(self, extend_from: str) -> None:
self.extends.add(extend_from)
def _print_extends(self) -> str:
if len(self.extends) == 0:
return ""
return "extends {}".format(",".join(sorted(self.extends)))
def __str__(self) -> str:
return f"interface {self.name} {self._print_extends()} {self._print_body()}"
class _HackClassGenerator(_HackBaseGenerator):
"""A generator to emit Hack Class definition."""
def __init__(self, name: str, **kwargs: Dict[str, Any]) -> None:
super(_HackClassGenerator, self).__init__()
self.name = name
# Extend relationship could only be one parent class.
self.extend: str = ""
# A set of implements relationship in this class.
self.implements: Set[str] = set()
# A set of static methods in this class.
self.static_methods: Set[str] = set()
# A set of methods to invoke in dummy method.
self.invoke_set: Set[Tuple[str, str]] = set()
# A set of static methods to invoke in dummy method.
self.invoke_static_set: Set[Tuple[str, str]] = set()
def set_extend(self, extend_from: str) -> None:
self.extend = extend_from
def add_implement(self, implement: str) -> None:
self.implements.add(implement)
def add_static_method(self, method_name: str) -> None:
self.static_methods.add(method_name)
def add_invoke(self, object_type: str, method_name: str) -> None:
if object_type in self.parameter_set:
self.invoke_set.add((object_type, method_name))
def add_invoke_static_method(self, class_name: str, method_name: str) -> None:
self.invoke_static_set.add((class_name, method_name))
def add_invoke_function(self, fn_obj: "_HackFunctionGenerator") -> None:
self.invoke_funcs_set.add(fn_obj)
def _print_extend(self) -> str:
if self.extend == "":
return ""
return "extends {}".format(self.extend)
def _print_implements(self) -> str:
if len(self.implements) == 0:
return ""
return "implements {}".format(",".join(sorted(self.implements)))
def _print_static_methods(self) -> str:
return "".join(
[self._print_method(x, " static ") for x in sorted(self.static_methods)]
)
def _print_dummy_method_body(self) -> str:
return (
"{"
+ "".join([f"\n${x[0]}_obj->{x[1]}();\n" for x in sorted(self.invoke_set)])
+ "".join(
[f"\n{x[0]}::{x[1]}();\n" for x in sorted(self.invoke_static_set)]
)
+ "".join(
[
f"\n{x._print_callee()}\n"
for x in sorted(self.invoke_funcs_set, key=lambda x: x.name)
]
)
+ "}"
)
def _print_method_body(self) -> str:
return "{}"
def __str__(self) -> str:
return (
f"class {self.name} {self._print_extend()} "
+ f"{self._print_implements()} {self._print_body()}"
)
class _HackFunctionGenerator:
"""A generator to emit Hack Function definition."""
def __init__(self, name: str, **kwargs: Dict[str, Any]) -> None:
self.name = name
# A set of static methods to invoke in the function.
# A tuple with (class_name, static_method_name) added to the set.
self.invoke_static_set: Set[Tuple[str, str]] = set()
# A set of type(class/interface) methods to invoke in the function.
# A tuple with (type_name, method_name) added to the set.
self.type_method_set: Set[Tuple[str, str]] = set()
# A set of functions to invoke in the function.
self.invoke_funcs_set: Set["_HackFunctionGenerator"] = set()
# A set of class objects to create in the function.
# To invoke a class method, we are creating objects from this set
# prior to invoke the method in `type_method_set`
# A string with class_name added to the set.
self.class_obj_set: Set[str] = set()
# A list of parameter and argument pairs.
# To invoke an interface method, we are adding this list to the function
# parameter, the objects are created by the caller
# later in the function body invokes method in `type_method_set`
# A tuple with (parameter_type, argument_type) appended to the list.
self.parameter_list: List[Tuple[str, str]] = []
def add_invoke_static_method(self, class_name: str, method_name: str) -> None:
self.invoke_static_set.add((class_name, method_name))
def add_invoke_function(self, fn_obj: "_HackFunctionGenerator") -> None:
self.invoke_funcs_set.add(fn_obj)
def add_class_obj(self, class_name: str) -> None:
self.class_obj_set.add(class_name)
def add_class_method(self, class_name: str, method_name: str) -> None:
self.type_method_set.add((class_name, method_name))
def add_parameter(self, parameter_type: str, argument_type: str) -> None:
self.parameter_list.append((parameter_type, argument_type))
def _create_arguments(self) -> str:
return "".join(
[
f"${argument_type}_obj = new {argument_type}();\n"
for (_, argument_type) in self.parameter_list
]
)
def _print_parameters(self) -> str:
return ", ".join(
[
f"{parameter_type} ${parameter_type}_obj"
for (parameter_type, _) in self.parameter_list
]
)
def _print_arguments(self) -> str:
return ", ".join(
[f"${argument_type}_obj" for (_, argument_type) in self.parameter_list]
)
def _print_callee(self) -> str:
return self._create_arguments() + f"{self.name}({self._print_arguments()});"
def _print_body(self) -> str:
return (
"{"
+ "".join(
[f"\n{x[0]}::{x[1]}();\n" for x in sorted(self.invoke_static_set)]
)
+ "".join(
[
f"\n{x._print_callee()}\n"
for x in sorted(self.invoke_funcs_set, key=lambda x: x.name)
]
)
+ "".join(
[
f"\n${class_name}_obj = new {class_name}();\n"
for class_name in sorted(self.class_obj_set)
]
)
+ "".join(
[
f"\n${type_name}_obj->{method}();\n"
for (type_name, method) in sorted(self.type_method_set)
]
)
+ "}"
)
def __str__(self) -> str:
return (
f"function {self.name}({self._print_parameters()}): void"
f" {self._print_body()}"
)
class HackCodeGenerator(CodeGenerator):
"""A wrapper generator encapsulates each _Hack*Generator to emit Hack Code"""
def __init__(
self, solving_context: Optional[ClingoContext] = None, model_count: int = 1
) -> None:
super(HackCodeGenerator, self).__init__(solving_context, model_count)
self.class_objs: Dict[str, _HackClassGenerator] = {}
self.interface_objs: Dict[str, _HackInterfaceGenerator] = {}
self.function_objs: Dict[str, _HackFunctionGenerator] = {}
# Sub set of class/interface objects are stub classes/interfaces.
self.stub_classes: List[str] = []
self.stub_interfaces: List[str] = []
def _look_up_object_by_symbol(self, symbol: str) -> "_HackBaseGenerator":
if symbol in self.class_objs:
return self.class_objs[symbol]
elif symbol in self.interface_objs:
return self.interface_objs[symbol]
raise RuntimeError("No object with symbol name {0}".format(symbol))
def _add_class(self, name: str) -> None:
self.class_objs[name] = _HackClassGenerator(name)
def _add_interface(self, name: str) -> None:
self.interface_objs[name] = _HackInterfaceGenerator(name)
def _add_function(self, name: str) -> None:
self.function_objs[name] = _HackFunctionGenerator(name)
def _add_extend(self, name: str, extend: str) -> None:
if name in self.class_objs:
self.class_objs[name].set_extend(extend)
self.class_objs[name].add_parent(extend)
if name in self.interface_objs:
self.interface_objs[name].add_extend(extend)
self.interface_objs[name].add_parent(extend)
def _add_implement(self, name: str, implement: str) -> None:
if name in self.class_objs:
self.class_objs[name].add_implement(implement)
self.class_objs[name].add_parent(implement)
def _add_method(self, name: str, method_name: str) -> None:
if name in self.class_objs:
self.class_objs[name].add_method(method_name)
if name in self.interface_objs:
self.interface_objs[name].add_method(method_name)
def _add_static_method(self, name: str, method_name: str) -> None:
if name in self.class_objs:
self.class_objs[name].add_static_method(method_name)
def _add_to_parameter_set(self, name: str, parameter_type: str) -> None:
if name in self.class_objs:
self.class_objs[name].add_parameter(parameter_type)
elif name in self.interface_objs:
self.interface_objs[name].add_parameter(parameter_type)
def _add_invoke_function(self, name: str, function_name: str) -> None:
# The function didn't purely pass as a name, we are passing a reference
# to the function object. The reason is the function is going to have
# parameter inside, so keep a reference to the function object make
# code synthesis easier later.
if function_name not in self.function_objs:
return
if name in self.class_objs:
self.class_objs[name].add_invoke_function(self.function_objs[function_name])
if name in self.function_objs:
self.function_objs[name].add_invoke_function(
self.function_objs[function_name]
)
def _add_object_in_function(self, name: str, class_name: str) -> None:
if name in self.function_objs:
self.function_objs[name].add_class_obj(class_name)
def _add_invoke(self, name: str, object_type: str, method_name: str) -> None:
if name in self.class_objs:
self.class_objs[name].add_invoke(object_type, method_name)
def _add_invoke_static_method(
self, name: str, class_name: str, method_name: str
) -> None:
if name in self.class_objs:
self.class_objs[name].add_invoke_static_method(class_name, method_name)
if name in self.function_objs:
self.function_objs[name].add_invoke_static_method(class_name, method_name)
def _add_invoke_in_function(
self, name: str, type_name: str, method_name: str
) -> None:
if name in self.function_objs:
self.function_objs[name].add_class_method(type_name, method_name)
def _add_parameter_to_function(
self, name: str, parameter_type: str, argument_type: str
) -> None:
if name in self.function_objs:
self.function_objs[name].add_parameter(parameter_type, argument_type)
def _find_stubs(self) -> None:
for name, node in self.class_objs.items():
if node.extend == "":
self.stub_classes.append(name)
for name, node in self.interface_objs.items():
if len(node.extends) == 0:
self.stub_interfaces.append(name)
def validate_nodes(self) -> None:
# Graph validation using the constraints specified in the context.
assert (
len(self.class_objs) >= self.solving_context.min_classes
), "Expected to get at least {0}, but only have {1} classes.".format(
self.solving_context.min_classes, len(self.class_objs)
)
assert (
len(self.interface_objs) >= self.solving_context.min_interfaces
), "Expected to get at least {0}, but only have {1} interfaces.".format(
self.solving_context.min_interfaces, len(self.interface_objs)
)
assert (
len(self.class_objs) + len(self.interface_objs)
>= self.solving_context.number_of_nodes
), "Expected to get at least {0}, but only have {1} symbols.".format(
self.solving_context.number_of_nodes,
len(self.class_objs) + len(self.interface_objs),
)
def validate_stubs(self) -> None:
# Check number of stub nodes.
assert (
len(self.stub_classes) >= self.solving_context.min_stub_classes
), "Expected to get at least {0}, but only have {1} stub classes.".format(
self.solving_context.min_stub_classes, len(self.stub_classes)
)
assert (
len(self.stub_interfaces) >= self.solving_context.min_stub_interfaces
), "Expected to get at least {0}, but only have {1} stub interfaces.".format(
self.solving_context.min_stub_interfaces, len(self.stub_interfaces)
)
def validate_depth(self) -> None:
# Validate the depth requirement (Union-find set).
symbols = list(self.class_objs.keys()) + list(self.interface_objs.keys())
depth: Dict[str, int] = dict.fromkeys(symbols, int(1))
for symbol in symbols:
ancestors = deque(
[
(symbol, parent)
for parent in self._look_up_object_by_symbol(symbol).parents
]
)
while len(ancestors) != 0:
(child, ancestor) = ancestors.popleft()
depth[ancestor] = max(depth[ancestor], depth[child] + 1)
ancestors.extend(
[
(ancestor, parent)
for parent in self._look_up_object_by_symbol(ancestor).parents
]
)
assert (
max(depth.values()) >= self.solving_context.min_depth
), "Expected to get at least {0} depth, but the max depth is {1}.".format(
self.solving_context.min_depth, max(depth.values())
)
def validate_degree(self) -> None:
in_degrees = [0] * 10
# Iterate through class_objs and interface_objs.
for node in self.class_objs.values():
degree = len(node.implements)
degree += 1 if node.extend != "" else 0
in_degrees[degree] += 1
for node in self.interface_objs.values():
degree = len(node.extends)
in_degrees[degree] += 1
# Validate the degrees are greater than the specified deistribution.
assert functools.reduce(
lambda x, y: x and y,
map(
lambda actual, expected: actual >= expected,
in_degrees,
self.solving_context.degree_distribution,
),
True,
), "Expected degree distribution {0}, but got {1}".format(
self.solving_context.degree_distribution, in_degrees
)
def validate(self) -> bool:
self.validate_nodes()
self.validate_stubs()
self.validate_depth()
self.validate_degree()
return True
def __str__(self) -> str:
return (
"<?hh\n"
+ "\n".join(str(x) for x in self.class_objs.values())
+ "\n"
+ "\n".join(str(x) for x in self.interface_objs.values())
+ "\n"
+ "".join([str(x) + "\n" for x in self.function_objs.values()])
)
def on_model(self, m: clingo.Model) -> bool:
# Same set of parameters and search algorithm will produce the same
# result set. To make sure two different agent using the same settings
# can produce different output, we are counting models in the result
# set. The first agent using the same configuration gets first one,
# the second agent using the same configuration gets second one, and so
# on so forth.
self.model_count -= 1
if self.model_count > 0:
return True
# Separate into 'class(?)', 'interface(?)', 'funcs(?)',
# 'implements(?, ?)', 'extends(?, ?)', 'add_method(?, ?)',
# 'add_static_method(?, ?)', 'has_method_with_parameter(?, ?)'
# 'invokes_function(?, ?)', 'creates_in_body(?, ?)'
# 'invokes_in_method(?, ?, ?)', 'invokes_static_method(?, ?, ?)'
# 'invokes_in_body(?, ?, ?)', 'has_parameter_and_argument(?, ?, ?)'
predicates = m.symbols(atoms=True)
node_func = {
"class": self._add_class,
"interface": self._add_interface,
"funcs": self._add_function,
}
edge_func = {
"extends": self._add_extend,
"implements": self._add_implement,
"add_method": self._add_method,
"add_static_method": self._add_static_method,
"has_method_with_parameter": self._add_to_parameter_set,
"invokes_function": self._add_invoke_function,
"creates_in_body": self._add_object_in_function,
}
trip_func = {
"invokes_in_method": self._add_invoke,
"invokes_static_method": self._add_invoke_static_method,
"invokes_in_body": self._add_invoke_in_function,
"has_parameter_and_argument": self._add_parameter_to_function,
}
# Three passes,
# First pass creates individual nodes like class, interface, function.
for predicate in predicates:
if predicate.name in node_func:
node_func[predicate.name](predicate.arguments[0].string)
# Second pass creates edge between two nodes.
for predicate in predicates:
if predicate.name in edge_func:
edge_func[predicate.name](
predicate.arguments[0].string, predicate.arguments[1].string
)
# Third pass creates relationships between three nodes.
for predicate in predicates:
if predicate.name in trip_func:
trip_func[predicate.name](
predicate.arguments[0].string,
predicate.arguments[1].string,
predicate.arguments[2].string,
)
self._find_stubs()
return False |
Python | hhvm/hphp/hack/src/hh_codesynthesis/hh_codesynthesis.py | #!/usr/bin/env python3
# pyre-strict
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the "hack" directory of this source tree.
#
# This library will construct a logic program using the given parameter,
# invoke Clingo to solve the logic program to produce an agent graph,
# and we create multiple agents in parallel to produce valid codebase.
import sys
from typing import Any, Dict, List
from hphp.hack.src.hh_codesynthesis import agentGenerator
from hphp.hack.src.hh_codesynthesis.agentGraphGenerator import (
AgentGraphClingoContext,
AgentGraphGenerator,
generating_an_agent_graph,
)
from hphp.hack.src.hh_codesynthesis.codeGenerator import ClingoContext, CodeGenerator
from hphp.hack.src.hh_codesynthesis.hackGenerator import HackCodeGenerator
class Agent(object):
"""To hold all information belongs to one agent."""
def __init__(self, generator: CodeGenerator, solving_context: ClingoContext):
super(Agent, self).__init__()
self.generator = generator
self.solving_context = solving_context
def create_agent(
agents: List[Agent], agent_numbers: List[int], profiles: List[Dict[str, Any]]
):
for index, agent_number in enumerate(agent_numbers):
# The number of agents may greater than the number of profiles
# So, we are round robin through each profile for the agent here.
profile = profiles[index % len(profiles)]
solving_context = ClingoContext(
number_of_nodes=profile["number_of_nodes"],
min_depth=profile["min_depth"],
min_classes=profile["min_classes"],
min_interfaces=profile["min_interfaces"],
lower_bound=profile["lower_bound"],
higher_bound=profile["higher_bound"],
min_stub_classes=profile["min_stub_classes"],
min_stub_interfaces=profile["min_stub_interfaces"],
degree_distribution=profile["degree_distribution"],
)
generator = HackCodeGenerator(solving_context)
# To avoid two agents using same profile to produce the same output,
# we are using model_count to enumerate the next solution with this profile.
generator.model_count = index // len(profiles)
combined_rules = agentGenerator.generate_logic_rules(
solving_context, f"A{agent_number}"
)
agents[agent_number] = Agent(generator, solving_context)
agentGenerator.do_reasoning(combined_rules, generator)
def main() -> None:
# [ToDo] Parse the JSON configuration file later. Mock an object for now.
config = {
"number_of_agents": 100,
"number_of_infra_agents": 20,
"number_of_product_agents": 40,
"number_of_leaves": 30,
"agent_distribution": [5, 10, 10, 10, 10, 10, 10, 15, 20],
"infra_agent_indegrees": [0, 10],
"infra_agent_outdegrees": [1, 10],
"product_agent_indegrees": [1, 10],
"product_agent_outdegrees": [0, 5],
"infra_agent_profiles": [
{
"number_of_nodes": 12,
"min_depth": 3,
"min_classes": 3,
"min_interfaces": 4,
"lower_bound": 1,
"higher_bound": 5,
"min_stub_classes": 4,
"min_stub_interfaces": 1,
"degree_distribution": [1, 3, 5],
},
{
"number_of_nodes": 12,
"min_depth": 3,
"min_classes": 3,
"min_interfaces": 4,
"lower_bound": 1,
"higher_bound": 5,
"min_stub_classes": 4,
"min_stub_interfaces": 1,
"degree_distribution": [1, 3, 5],
},
],
"product_agent_profiles": [
{
"number_of_nodes": 12,
"min_depth": 3,
"min_classes": 3,
"min_interfaces": 4,
"lower_bound": 1,
"higher_bound": 5,
"min_stub_classes": 4,
"min_stub_interfaces": 1,
"degree_distribution": [1, 3, 5],
},
{
"number_of_nodes": 12,
"min_depth": 3,
"min_classes": 3,
"min_interfaces": 4,
"lower_bound": 1,
"higher_bound": 5,
"min_stub_classes": 4,
"min_stub_interfaces": 1,
"degree_distribution": [1, 3, 5],
},
{
"number_of_nodes": 12,
"min_depth": 3,
"min_classes": 3,
"min_interfaces": 4,
"lower_bound": 1,
"higher_bound": 5,
"min_stub_classes": 4,
"min_stub_interfaces": 1,
"degree_distribution": [1, 3, 5],
},
],
}
# Setup agent graph generator and context.
agent_graph_generator = AgentGraphGenerator(
agent_distribution=config["agent_distribution"],
solving_context=AgentGraphClingoContext(
number_of_infra_agents=config["number_of_infra_agents"],
number_of_product_agents=config["number_of_product_agents"],
number_of_leaves=config["number_of_leaves"],
infra_agent_profile={
"in_degree": config["infra_agent_indegrees"],
"out_degree": config["infra_agent_outdegrees"],
},
product_agent_profile={
"in_degree": config["product_agent_indegrees"],
"out_degree": config["product_agent_outdegrees"],
},
),
)
agent_graph_generator.on_model = agent_graph_generator.generate
# Creating agent graph.
generating_an_agent_graph(agent_graph_generator)
# Using agent graph to create each agent.
agents: List[Agent] = [None] * config["number_of_agents"]
# [ToDo] Parallel run this section.
create_agent(
agents, agent_graph_generator.infra_agents, config["infra_agent_profiles"]
)
create_agent(
agents, agent_graph_generator.product_agents, config["product_agent_profiles"]
)
# Connecting each agent.
# Using the edge relationship in the agent graph, we are connecting the
# stub classes/interfaces in each agent to the parent agent.
for agent_number, edge in enumerate(agent_graph_generator.edges):
agent = agents[agent_number]
number_of_parent_agents = len(edge)
# If this agent is root agent, no parents exisit.
if number_of_parent_agents == 0:
continue
# We are using uniform distribution to handle the case where one agent
# could depend on multiple agents. This can be changed later.
parents = list(edge)
for index, stub in enumerate(agent.generator.stub_classes):
# Choose a class in the parent agent.
parent = parents[index % number_of_parent_agents]
parent_class = list(agents[parent].generator.class_objs)[
index
// number_of_parent_agents
% len(agents[parent].generator.class_objs)
]
# Stub class extends the chosen one.
agent.generator._add_extend(stub, parent_class)
for index, stub in enumerate(agent.generator.stub_interfaces):
# Choose an interface in the parent agent.
parent = parents[index % number_of_parent_agents]
parent_interface = list(agents[parent].generator.interface_objs)[
index
// number_of_parent_agents
% len(agents[parent].generator.interface_objs)
]
# Stub interface extends the chosen one.
agent.generator._add_extend(stub, parent_interface)
# Output agents to each file.
for agent_number, agent in enumerate(agents):
agentGenerator.output_to_file_or_stdout(
agent.generator, f"agent_{agent_number}.php"
)
if __name__ == "__main__":
sys.exit(main()) |
OCaml | hhvm/hphp/hack/src/hh_fanout/build.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
external hh_fanout_build_main :
bool -> string option -> string option -> string option -> string -> unit
= "hh_fanout_build_main"
let go ~allow_empty ~incremental ~edges_dir ~delta_file ~output =
hh_fanout_build_main allow_empty incremental edges_dir delta_file output;
Lwt.return_unit |
OCaml | hhvm/hphp/hack/src/hh_fanout/calculate_fanout.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
module Detail_level = struct
type t =
| Low
| High
end
type symbol_edge = {
symbol_type: FileInfo.name_type;
symbol_name: string;
symbol_dep: Typing_deps.Dep.dependency Typing_deps.Dep.variant;
}
type changed_symbol = {
symbol_edge: symbol_edge;
num_outgoing_edges: int option;
outgoing_files: Relative_path.Set.t option;
}
type explanation = {
removed_symbols: changed_symbol list;
modified_symbols: changed_symbol list;
added_symbols: changed_symbol list;
}
type result = {
fanout_dependents: Typing_deps.DepSet.t;
fanout_files: Relative_path.Set.t;
explanations: explanation Relative_path.Map.t;
telemetry: Telemetry.t;
}
let explanation_to_json (explanation : explanation) : Hh_json.json =
let changed_symbol_to_json
{
symbol_edge = { symbol_type; symbol_name; _ };
num_outgoing_edges;
outgoing_files;
} =
Hh_json.JSON_Object
[
("type", Hh_json.JSON_String (FileInfo.show_name_type symbol_type));
("name", Hh_json.JSON_String symbol_name);
( "num_outgoing_edges",
Option.value_map
num_outgoing_edges
~f:Hh_json.int_
~default:Hh_json.JSON_Null );
( "outgoing_files",
Option.value_map
outgoing_files
~default:Hh_json.JSON_Null
~f:(fun outgoing_files ->
Hh_json.JSON_Array
(Relative_path.Set.fold
outgoing_files
~init:[]
~f:(fun path acc ->
let path = Relative_path.suffix path in
Hh_json.JSON_String path :: acc))) );
]
in
Hh_json.JSON_Object
[
( "added_symbols",
Hh_json.JSON_Array
(List.map ~f:changed_symbol_to_json explanation.added_symbols) );
( "modified_symbols",
Hh_json.JSON_Array
(List.map ~f:changed_symbol_to_json explanation.modified_symbols) );
( "removed_symbols",
Hh_json.JSON_Array
(List.map ~f:changed_symbol_to_json explanation.removed_symbols) );
]
let get_symbol_edges_for_file_info (file_info : FileInfo.t) : symbol_edge list =
let make_edges ~symbol_type ~ids ~f =
List.map ids ~f:(fun (_, symbol_name, _) ->
{ symbol_type; symbol_name; symbol_dep = f symbol_name })
in
List.concat
[
make_edges
~symbol_type:FileInfo.Class
~ids:file_info.FileInfo.classes
~f:(fun name -> Typing_deps.Dep.Type name);
make_edges
~symbol_type:FileInfo.Fun
~ids:file_info.FileInfo.funs
~f:(fun name -> Typing_deps.Dep.Fun name);
make_edges
~symbol_type:FileInfo.Const
~ids:file_info.FileInfo.consts
~f:(fun name -> Typing_deps.Dep.GConst name);
make_edges
~symbol_type:FileInfo.Typedef
~ids:file_info.FileInfo.typedefs
~f:(fun name -> Typing_deps.Dep.Type name);
]
let file_info_to_dep_set
~(detail_level : Detail_level.t)
~(deps_mode : Typing_deps_mode.t)
(naming_table : Naming_table.t)
(file_info : FileInfo.t) : Typing_deps.DepSet.t * changed_symbol list =
List.fold
(get_symbol_edges_for_file_info file_info)
~init:(Typing_deps.(DepSet.make ()), [])
~f:(fun (dep_set, changed_symbols) symbol_edge ->
let symbol_dep = Typing_deps.(Dep.make symbol_edge.symbol_dep) in
let dep_set = Typing_deps.DepSet.add dep_set symbol_dep in
let changed_symbol =
match detail_level with
| Detail_level.Low ->
{ symbol_edge; num_outgoing_edges = None; outgoing_files = None }
| Detail_level.High ->
let outgoing_edges =
symbol_dep
|> Typing_deps.DepSet.singleton
|> Typing_deps.add_all_deps deps_mode
in
{
symbol_edge;
num_outgoing_edges =
Some (Typing_deps.DepSet.cardinal outgoing_edges);
outgoing_files =
Some
(Naming_table.get_64bit_dep_set_files
naming_table
outgoing_edges);
}
in
let changed_symbols = changed_symbol :: changed_symbols in
(dep_set, changed_symbols))
(** Given a file and the symbols that used to be contained in that file, find
the symbols that are currently in the file and construct a set of
dependencies that can be traversed to find the fanout of the changes to those
symbols. *)
let calculate_dep_set_for_path
~(detail_level : Detail_level.t)
~(deps_mode : Typing_deps_mode.t)
~(old_naming_table : Naming_table.t)
~(new_naming_table : Naming_table.t)
~(path : Relative_path.t)
~(delta : 'a Naming_sqlite.forward_naming_table_delta) :
Typing_deps.DepSet.t * explanation =
let (old_deps, old_symbols) =
Naming_table.get_file_info old_naming_table path
|> Option.map
~f:(file_info_to_dep_set ~detail_level ~deps_mode old_naming_table)
|> Option.value ~default:(Typing_deps.(DepSet.make ()), [])
in
let (new_deps, new_symbols) =
match delta with
| Naming_sqlite.Modified new_file_info ->
file_info_to_dep_set
~detail_level
~deps_mode
new_naming_table
new_file_info
| Naming_sqlite.Deleted -> (Typing_deps.(DepSet.make ()), [])
in
(* NB: could be optimized by constructing sets or by not using polymorphic
equality. *)
let (modified_symbols, removed_symbols) =
List.partition_tf old_symbols ~f:(fun old_symbol ->
List.exists new_symbols ~f:(fun new_symbol ->
Poly.(old_symbol.symbol_edge = new_symbol.symbol_edge)))
in
let added_symbols =
List.filter new_symbols ~f:(fun new_symbol ->
not
(List.exists old_symbols ~f:(fun old_symbol ->
Poly.(old_symbol.symbol_edge = new_symbol.symbol_edge))))
in
let explanation = { removed_symbols; modified_symbols; added_symbols } in
(Typing_deps.DepSet.union old_deps new_deps, explanation)
let go
~(detail_level : Detail_level.t)
~(deps_mode : Typing_deps_mode.t)
~(old_naming_table : Naming_table.t)
~(new_naming_table : Naming_table.t)
~(file_deltas : Naming_sqlite.file_deltas)
~(input_files : Relative_path.Set.t) : result =
let calculate_dep_set_telemetry = Telemetry.create () in
let start_time = Unix.gettimeofday () in
let (fanout_dependencies, explanations) =
Relative_path.Set.fold
input_files
~init:(Typing_deps.(DepSet.make ()), Relative_path.Map.empty)
~f:(fun path (fanout_dependencies, explanations) ->
let delta =
match Relative_path.Map.find_opt file_deltas path with
| Some delta -> delta
| None ->
failwith
("Input path %s was not in the map of `file_deltas`. "
^ "This is an internal invariant failure -- please report it. "
^ "This means that we can't process it, "
^ "as we haven't calculated its `FileInfo.t`. "
^ "The caller should have included any elements in `input_files` "
^ "when performing the calculation of `file_deltas`.")
in
let (file_deps, explanation) =
calculate_dep_set_for_path
~detail_level
~deps_mode
~old_naming_table
~new_naming_table
~path
~delta
in
let fanout_dependencies =
Typing_deps.DepSet.union fanout_dependencies file_deps
in
( fanout_dependencies,
Relative_path.Map.add explanations ~key:path ~data:explanation ))
in
(* We have the dependencies -- now traverse the dependency graph to get
their dependents. *)
let fanout_dependents =
Typing_deps.add_all_deps deps_mode fanout_dependencies
in
let calculate_dep_set_telemetry =
Telemetry.duration ~start_time calculate_dep_set_telemetry
in
let calculate_fanout_telemetry = Telemetry.create () in
let start_time = Unix.gettimeofday () in
let fanout_files =
Naming_table.get_64bit_dep_set_files new_naming_table fanout_dependents
in
let calculate_fanout_telemetry =
Telemetry.duration ~start_time calculate_fanout_telemetry
in
let telemetry = Telemetry.create () in
let telemetry =
Telemetry.object_
telemetry
~key:"calculate_dep_set"
~value:calculate_dep_set_telemetry
in
let telemetry =
Telemetry.object_
telemetry
~key:"calculate_fanout"
~value:calculate_fanout_telemetry
in
{ fanout_dependents; fanout_files; explanations; telemetry } |
OCaml Interface | hhvm/hphp/hack/src/hh_fanout/calculate_fanout.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module Detail_level : sig
type t =
| Low
| High
end
type symbol_edge = {
symbol_type: FileInfo.name_type;
symbol_name: string;
symbol_dep: Typing_deps.Dep.dependency Typing_deps.Dep.variant;
}
type changed_symbol = {
symbol_edge: symbol_edge;
num_outgoing_edges: int option;
outgoing_files: Relative_path.Set.t option;
}
type explanation = {
removed_symbols: changed_symbol list;
modified_symbols: changed_symbol list;
added_symbols: changed_symbol list;
}
type result = {
fanout_dependents: Typing_deps.DepSet.t;
(** The set of dependents in the fanout. *)
fanout_files: Relative_path.Set.t;
(** The list of files that are in the fanout of the changed files, based on
how the symbols in those files changed. *)
explanations: explanation Relative_path.Map.t;
(** Explanations of why each changed file produced its fanout files. *)
telemetry: Telemetry.t; (** Telemetry. **)
}
val explanation_to_json : explanation -> Hh_json.json
val go :
detail_level:Detail_level.t ->
deps_mode:Typing_deps_mode.t ->
old_naming_table:Naming_table.t ->
new_naming_table:Naming_table.t ->
file_deltas:Naming_sqlite.file_deltas ->
input_files:Relative_path.Set.t ->
result
val get_symbol_edges_for_file_info : FileInfo.t -> symbol_edge list
val file_info_to_dep_set :
detail_level:Detail_level.t ->
deps_mode:Typing_deps_mode.t ->
Naming_table.t ->
FileInfo.t ->
Typing_deps.DepSet.t * changed_symbol list |
OCaml | hhvm/hphp/hack/src/hh_fanout/debug_fanout.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module Hh_bucket = Bucket
open Hh_prelude
type dependent = Typing_deps.Dep.dependent Typing_deps.Dep.variant
type dependency = Typing_deps.Dep.dependency Typing_deps.Dep.variant
let compare_dependency = Typing_deps.Dep.compare_variant
let compare_dependent = Typing_deps.Dep.compare_variant
module DepEdge = struct
type t = {
dependent: dependent;
dependency: dependency;
}
[@@deriving ord]
let t_of_sexp _ =
{
dependency = Typing_deps.Dep.Fun "_";
dependent = Typing_deps.Dep.Fun "_";
}
let sexp_of_t _ = Sexp.Atom ""
end
module DepEdgeSet = Set.Make (DepEdge)
type result = {
dependencies: dependency list;
fanout_dependents: Typing_deps.DepSet.t;
relevant_dep_edges: DepEdgeSet.t;
}
let result_to_json (result : result) : Hh_json.json =
let dep_to_json dep =
Hh_json.JSON_Object
[
("variant", Hh_json.JSON_String (Typing_deps.Dep.variant_to_string dep));
( "hash",
Hh_json.JSON_String
(Typing_deps.(Dep.make dep) |> Typing_deps.Dep.to_debug_string) );
]
in
let dep_edge_to_json { DepEdge.dependent; dependency } =
Hh_json.JSON_Object
[
("dependent", dep_to_json dependent);
("dependency", dep_to_json dependency);
]
in
let { dependencies; fanout_dependents; relevant_dep_edges } = result in
Hh_json.JSON_Object
[
("dependencies", Hh_json.JSON_Array (List.map dependencies ~f:dep_to_json));
( "fanout_dependents",
Hh_json.JSON_Array
(fanout_dependents
|> Typing_deps.DepSet.elements
|> List.map ~f:Typing_deps.Dep.to_debug_string
|> List.sort ~compare:String.compare
|> List.map ~f:(fun dep -> Hh_json.JSON_String dep)) );
( "relevant_dep_edges",
Hh_json.JSON_Array
(DepEdgeSet.elements relevant_dep_edges
|> List.map ~f:dep_edge_to_json) );
]
let calculate_dep_edges
~(ctx : Provider_context.t) _acc (paths : Relative_path.t list) :
DepEdge.t HashSet.t list =
List.map paths ~f:(fun path ->
let dep_edges = HashSet.create () in
Typing_deps.add_dependency_callback
~name:"hh_fanout debug collect deps"
(fun dependent dependency ->
HashSet.add dep_edges { DepEdge.dependent; dependency });
let (ctx, entry) = Provider_context.add_entry_if_missing ~ctx ~path in
(match Provider_context.read_file_contents entry with
| Some _ ->
let _result : Tast_provider.Compute_tast_and_errors.t =
Tast_provider.compute_tast_and_errors_unquarantined ~ctx ~entry
in
()
| None -> ());
dep_edges)
let go
~(ctx : Provider_context.t)
~(workers : MultiWorker.worker list)
~(old_naming_table : Naming_table.t)
~(new_naming_table : Naming_table.t)
~(file_deltas : Naming_sqlite.file_deltas)
~(path : Relative_path.t) : result =
let deps_mode = Provider_context.get_deps_mode ctx in
let { Calculate_fanout.fanout_dependents; fanout_files; explanations; _ } =
Calculate_fanout.go
~deps_mode
~detail_level:Calculate_fanout.Detail_level.Low
~old_naming_table
~new_naming_table
~file_deltas
~input_files:(Relative_path.Set.singleton path)
in
let explanation = Relative_path.Map.find explanations path in
let { Calculate_fanout.removed_symbols; modified_symbols; added_symbols } =
explanation
in
let dependencies =
List.map
removed_symbols
~f:(fun Calculate_fanout.{ symbol_edge = { symbol_dep; _ }; _ } ->
symbol_dep)
@ List.map
modified_symbols
~f:(fun Calculate_fanout.{ symbol_edge = { symbol_dep; _ }; _ } ->
symbol_dep)
@ List.map
added_symbols
~f:(fun Calculate_fanout.{ symbol_edge = { symbol_dep; _ }; _ } ->
symbol_dep)
in
let relevant_dep_edges =
MultiWorker.call
(Some workers)
~job:(calculate_dep_edges ~ctx)
~neutral:(HashSet.create ())
~merge:(fun dependencies acc ->
List.iter dependencies ~f:(fun dependency_set ->
HashSet.union acc ~other:dependency_set);
acc)
~next:
(Hh_bucket.make
(Relative_path.Set.elements fanout_files)
~num_workers:(List.length workers))
in
HashSet.filter relevant_dep_edges ~f:(fun { DepEdge.dependent; _ } ->
let open Typing_deps in
DepSet.mem fanout_dependents (Dep.make dependent));
let relevant_dep_edges =
HashSet.to_list relevant_dep_edges |> DepEdgeSet.of_list
in
{ dependencies; fanout_dependents; relevant_dep_edges } |
OCaml Interface | hhvm/hphp/hack/src/hh_fanout/debug_fanout.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type result
val go :
ctx:Provider_context.t ->
workers:MultiWorker.worker list ->
old_naming_table:Naming_table.t ->
new_naming_table:Naming_table.t ->
file_deltas:Naming_sqlite.file_deltas ->
path:Relative_path.t ->
result
val result_to_json : result -> Hh_json.json |
OCaml | hhvm/hphp/hack/src/hh_fanout/dep_graph_is_subgraph.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
external hh_fanout_dep_graph_is_subgraph_main : string -> string -> unit
= "hh_fanout_dep_graph_is_subgraph_main"
let go ~sub ~super =
hh_fanout_dep_graph_is_subgraph_main sub super;
Lwt.return_unit |
OCaml | hhvm/hphp/hack/src/hh_fanout/dep_graph_stats.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
external hh_fanout_dep_graph_stats_main : string -> unit
= "hh_fanout_dep_graph_stats_main"
let go ~dep_graph =
hh_fanout_dep_graph_stats_main dep_graph;
Lwt.return_unit |
hhvm/hphp/hack/src/hh_fanout/dune | (executable
(name hh_fanout)
(link_flags
(:standard
(:include ../dune_config/ld-opts.sexp)))
(modes exe byte_complete)
(libraries
batch_init
client_ide_service
cmdliner
default_injector_config
hh_fanout_build_rust
hh_fanout_dep_graph_is_subgraph_rust
hh_fanout_dep_graph_stats_rust
hhi
lwt
provider_utils
relative_path
server
server_command_types
server_env
server_env_build
sys_utils
temp_file
utils_core)
(preprocess
(pps lwt_ppx ppx_let ppx_deriving.std)))
(data_only_dirs
cargo
hh_fanout_build_rust
hh_fanout_dep_graph_is_subgraph_rust
hh_fanout_dep_graph_stats_rust)
(library
(name hh_fanout_build_rust)
(modules)
(wrapped false)
(foreign_archives hh_fanout_build_rust))
(rule
(targets libhh_fanout_build_rust.a)
(deps
(source_tree %{workspace_root}/hack/src))
(locks /cargo)
(action
(run
%{workspace_root}/hack/scripts/invoke_cargo.sh
hh_fanout_build_rust
hh_fanout_build_rust)))
(library
(name hh_fanout_dep_graph_is_subgraph_rust)
(modules)
(wrapped false)
(foreign_archives hh_fanout_dep_graph_is_subgraph_rust))
(rule
(targets libhh_fanout_dep_graph_is_subgraph_rust.a)
(deps
(source_tree %{workspace_root}/hack/src))
(locks /cargo)
(action
(run
%{workspace_root}/hack/scripts/invoke_cargo.sh
hh_fanout_dep_graph_is_subgraph_rust
hh_fanout_dep_graph_is_subgraph_rust)))
(library
(name hh_fanout_dep_graph_stats_rust)
(modules)
(wrapped false)
(foreign_archives hh_fanout_dep_graph_stats_rust))
(rule
(targets libhh_fanout_dep_graph_stats_rust.a)
(deps
(source_tree %{workspace_root}/hack/src))
(locks /cargo)
(action
(run
%{workspace_root}/hack/scripts/invoke_cargo.sh
hh_fanout_dep_graph_stats_rust
hh_fanout_dep_graph_stats_rust))) |
|
OCaml | hhvm/hphp/hack/src/hh_fanout/hh_fanout.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type env = {
client_id: string;
root: Path.t;
ignore_hh_version: bool;
detail_level: Calculate_fanout.Detail_level.t;
naming_table_path: Path.t option;
dep_table_path: Path.t option;
watchman_sockname: Path.t option;
changed_files: Relative_path.Set.t;
state_path: Path.t option;
}
type setup_result = {
workers: MultiWorker.worker list;
ctx: Provider_context.t;
}
type saved_state_result = {
naming_table: Naming_table.t;
naming_table_path: Path.t;
dep_table_path: Path.t;
errors_path: Path.t;
saved_state_changed_files: Relative_path.Set.t;
setup_result: setup_result;
}
type cursor_reference =
| Cursor_reference_from_saved_state of saved_state_result
| Cursor_reference_id of string
let create_global_env (env : env) : ServerEnv.genv =
let server_args =
ServerArgs.default_options_with_check_mode ~root:(Path.to_string env.root)
in
let (server_config, server_local_config) =
ServerConfig.load ~silent:false server_args
in
ServerEnvBuild.make_genv server_args server_config server_local_config []
let set_up_global_environment (env : env) ~(deps_mode : Typing_deps_mode.t) :
setup_result =
let genv = create_global_env env (* no workers *) in
let server_config = genv.ServerEnv.config in
let popt = ServerConfig.parser_options genv.ServerEnv.config in
let tcopt = ServerConfig.typechecker_options genv.ServerEnv.config in
let (ctx, workers, _time_taken) =
Batch_init.init
~root:env.root
~shmem_config:(ServerConfig.sharedmem_config server_config)
~popt
~tcopt
~deps_mode
(Unix.gettimeofday ())
in
{ workers; ctx }
let load_saved_state ~(env : env) : saved_state_result Lwt.t =
let genv = create_global_env env in
let ssopt =
genv.ServerEnv.local_config.ServerLocalConfig.saved_state
|> GlobalOptions.with_log_saved_state_age_and_distance false
in
let%lwt ( naming_table_path,
naming_table_changed_files,
dep_table_path,
errors_path,
dep_table_changed_files ) =
match (env.naming_table_path, env.dep_table_path) with
| (Some naming_table_path, Some dep_table_path) ->
let errors_path =
dep_table_path
|> Path.to_string
|> Filename.split_extension
|> fst
|> SaveStateService.get_errors_filename
|> Path.make
in
Lwt.return (naming_table_path, [], dep_table_path, errors_path, [])
| (Some naming_table_path, None) ->
let%lwt dep_table_saved_state =
State_loader_lwt.load
~ssopt
~progress_callback:(fun _ -> ())
~watchman_opts:
Saved_state_loader.Watchman_options.
{ root = env.root; sockname = env.watchman_sockname }
~ignore_hh_version:env.ignore_hh_version
~saved_state_type:Saved_state_loader.Naming_and_dep_table_distc
in
(match dep_table_saved_state with
| Error load_error ->
failwith
(Printf.sprintf
"Failed to load dep-table saved-state, and saved-state files were not manually provided on command-line: %s"
(Saved_state_loader.LoadError.debug_details_of_error load_error))
| Ok { Saved_state_loader.main_artifacts; changed_files; _ } ->
let open Saved_state_loader.Naming_and_dep_table_info in
Lwt.return
( naming_table_path,
[],
main_artifacts.dep_table_path,
main_artifacts.errors_path,
changed_files ))
| (None, Some dep_table_path) ->
let%lwt naming_table_saved_state =
State_loader_lwt.load
~ssopt
~progress_callback:(fun _ -> ())
~watchman_opts:
Saved_state_loader.Watchman_options.
{ root = env.root; sockname = env.watchman_sockname }
~ignore_hh_version:env.ignore_hh_version
~saved_state_type:Saved_state_loader.Naming_and_dep_table_distc
in
(match naming_table_saved_state with
| Error load_error ->
failwith
(Printf.sprintf
"Failed to load naming-table saved-state, and saved-state files were not manually provided on command-line: %s"
(Saved_state_loader.LoadError.debug_details_of_error load_error))
| Ok { Saved_state_loader.main_artifacts; changed_files; _ } ->
let errors_path =
dep_table_path
|> Path.to_string
|> Filename.split_extension
|> fst
|> SaveStateService.get_errors_filename
|> Path.make
in
Lwt.return
( main_artifacts
.Saved_state_loader.Naming_and_dep_table_info
.naming_sqlite_table_path,
changed_files,
dep_table_path,
errors_path,
[] ))
| (None, None) ->
let%lwt saved_state =
State_loader_lwt.load
~ssopt
~progress_callback:(fun _ -> ())
~watchman_opts:
Saved_state_loader.Watchman_options.
{ root = env.root; sockname = env.watchman_sockname }
~ignore_hh_version:env.ignore_hh_version
~saved_state_type:Saved_state_loader.Naming_and_dep_table_distc
in
(match saved_state with
| Error load_error ->
failwith
(Printf.sprintf
"Failed to load naming-table saved-state, and saved-state files were not manually provided on command-line: %s"
(Saved_state_loader.LoadError.debug_details_of_error load_error))
| Ok { Saved_state_loader.main_artifacts; changed_files; _ } ->
let open Saved_state_loader.Naming_and_dep_table_info in
Lwt.return
( main_artifacts.naming_sqlite_table_path,
changed_files,
main_artifacts.dep_table_path,
main_artifacts.errors_path,
changed_files ))
in
let changed_files =
Relative_path.Set.union
(Relative_path.Set.of_list naming_table_changed_files)
(Relative_path.Set.of_list dep_table_changed_files)
in
let changed_files =
Relative_path.Set.filter changed_files ~f:(fun path ->
FindUtils.file_filter (Relative_path.to_absolute path))
in
let deps_mode =
Typing_deps_mode.InMemoryMode (Some (Path.to_string dep_table_path))
in
let setup_result = set_up_global_environment env ~deps_mode in
let naming_table =
Naming_table.load_from_sqlite
setup_result.ctx
(Path.to_string naming_table_path)
in
let naming_table =
Relative_path.Set.fold
changed_files
~init:naming_table
~f:(fun path naming_table ->
let { ClientIdeIncremental.naming_table; _ } =
ClientIdeIncremental.update_naming_tables_for_changed_file
~ctx:setup_result.ctx
~naming_table
~sienv:SearchUtils.quiet_si_env
~path
in
naming_table)
in
Lwt.return
{
naming_table;
naming_table_path;
dep_table_path;
errors_path;
saved_state_changed_files = changed_files;
setup_result;
}
let get_state_path ~(env : env) : Path.t =
match env.state_path with
| Some state_path -> state_path
| None ->
let state_path = Path.make "/tmp/hh_fanout" in
let state_path =
Path.concat state_path (Path.slash_escaped_string_of_path env.root)
in
let state_path =
Path.concat
state_path
(match Build_banner.banner with
| Some banner -> banner
| None -> "development")
in
let state_path = Path.concat state_path "hh_fanout_state" in
state_path
let make_incremental_state ~(env : env) : Incremental.state =
let state_path = get_state_path ~env in
Hh_logger.log "State path: %s" (Path.to_string state_path);
Incremental.make_reference_implementation state_path
let resolve_cursor_reference
~(env : env)
~(incremental_state : Incremental.state)
~(previous_cursor_reference : cursor_reference) :
Incremental.cursor * Relative_path.Set.t =
match previous_cursor_reference with
| Cursor_reference_from_saved_state saved_state_result ->
let client_id =
incremental_state#make_client_id
{
Incremental.client_id = env.client_id;
ignore_hh_version = env.ignore_hh_version;
dep_table_saved_state_path = saved_state_result.dep_table_path;
dep_table_errors_saved_state_path = saved_state_result.errors_path;
naming_table_saved_state_path =
Naming_sqlite.Db_path
(Path.to_string saved_state_result.naming_table_path);
deps_mode =
Provider_context.get_deps_mode saved_state_result.setup_result.ctx;
}
in
let cursor =
incremental_state#make_default_cursor client_id |> Result.ok_or_failwith
in
(cursor, saved_state_result.saved_state_changed_files)
| Cursor_reference_id cursor_id ->
let cursor =
incremental_state#look_up_cursor
~client_id:(Some (Incremental.Client_id env.client_id))
~cursor_id
|> Result.ok_or_failwith
in
(cursor, Relative_path.Set.empty)
let advance_cursor
~(env : env)
~(setup_result : setup_result)
~(previous_cursor : Incremental.cursor)
~(previous_changed_files : Relative_path.Set.t)
~(input_files : Relative_path.Set.t) : Incremental.cursor =
let cursor_changed_files =
previous_changed_files
|> Relative_path.Set.union env.changed_files
|> Relative_path.Set.union input_files
in
previous_cursor#advance
~detail_level:env.detail_level
setup_result.ctx
setup_result.workers
cursor_changed_files
let mode_calculate
~(env : env) ~(input_files : Path.Set.t) ~(cursor_id : string option) :
unit Lwt.t =
let telemetry = Telemetry.create () in
let incremental_state = make_incremental_state ~env in
let%lwt (previous_cursor, previous_changed_files, setup_result) =
match cursor_id with
| None ->
let%lwt saved_state_result = load_saved_state ~env in
let previous_cursor_reference =
Cursor_reference_from_saved_state saved_state_result
in
let (previous_cursor, previous_changed_files) =
resolve_cursor_reference
~env
~incremental_state
~previous_cursor_reference
in
Lwt.return
( previous_cursor,
previous_changed_files,
saved_state_result.setup_result )
| Some cursor_id ->
let previous_cursor_reference = Cursor_reference_id cursor_id in
let (previous_cursor, previous_changed_files) =
resolve_cursor_reference
~env
~incremental_state
~previous_cursor_reference
in
let deps_mode = previous_cursor#get_deps_mode in
let setup_result = set_up_global_environment env ~deps_mode in
Lwt.return (previous_cursor, previous_changed_files, setup_result)
in
let input_files =
Path.Set.fold input_files ~init:Relative_path.Set.empty ~f:(fun path acc ->
let path = Relative_path.create_detect_prefix (Path.to_string path) in
Relative_path.Set.add acc path)
in
let cursor =
advance_cursor
~env
~setup_result
~previous_cursor
~previous_changed_files
~input_files
in
let calculate_fanout_result = cursor#get_calculate_fanout_result in
let {
Calculate_fanout.fanout_dependents = _;
fanout_files;
explanations;
telemetry = calculate_fanout_telemetry;
} =
Option.value_exn
calculate_fanout_result
~message:
("Internal invariant failure -- "
^ "produced cursor did not have an associated `Calculate_fanout.result`"
)
in
let telemetry =
Telemetry.object_
telemetry
~key:"calculate_fanout"
~value:calculate_fanout_telemetry
in
let telemetry =
Telemetry.int_
telemetry
~key:"num_input_files"
~value:(Relative_path.Set.cardinal input_files)
in
let telemetry =
Telemetry.int_
telemetry
~key:"num_fanout_files"
~value:(Relative_path.Set.cardinal fanout_files)
in
let cursor_id = incremental_state#add_cursor cursor in
let json =
Hh_json.JSON_Object
[
( "files",
Hh_json.JSON_Array
(fanout_files
|> Relative_path.Set.elements
|> List.map ~f:Relative_path.to_absolute
|> List.map ~f:Hh_json.string_) );
( "explanations",
Hh_json.JSON_Object
(Relative_path.Map.fold explanations ~init:[] ~f:(fun k v acc ->
let path = Relative_path.suffix k in
let explanation = Calculate_fanout.explanation_to_json v in
(path, explanation) :: acc)) );
( "cursor",
let (Incremental.Cursor_id cursor_id) = cursor_id in
Hh_json.JSON_String cursor_id );
("telemetry", Telemetry.to_json telemetry);
]
in
Hh_json.json_to_multiline_output Out_channel.stdout json;
Lwt.return_unit
let mode_calculate_errors
~(env : env) ~(cursor_id : string option) ~(pretty_print : bool) :
unit Lwt.t =
let incremental_state = make_incremental_state ~env in
let cursor =
match cursor_id with
| Some cursor_id ->
incremental_state#look_up_cursor ~client_id:None ~cursor_id
| None ->
incremental_state#make_default_cursor
(Incremental.Client_id env.client_id)
in
let cursor =
match cursor with
| Error message -> failwith ("Cursor not found: " ^ message)
| Ok cursor -> cursor
in
let { ctx; workers } =
set_up_global_environment env ~deps_mode:cursor#get_deps_mode
in
let (errors, cursor) = cursor#calculate_errors ctx workers in
let cursor_id =
match cursor with
| Some cursor -> incremental_state#add_cursor cursor
| None ->
let cursor_id =
Option.value_exn
cursor_id
~message:
("Internal invariant failure -- "
^ "expected a new cursor to be generated, "
^ "given that no cursor was passed in.")
in
Incremental.Cursor_id cursor_id
in
let error_list =
errors |> Errors.get_sorted_error_list |> List.map ~f:User_error.to_absolute
in
(if pretty_print then
ServerError.print_error_list
stdout
~error_list
~stale_msg:None
~output_json:false
~save_state_result:None
~recheck_stats:None
else
let json =
ServerError.get_error_list_json
error_list
~save_state_result:None
~recheck_stats:None
in
let json =
match json with
| Hh_json.JSON_Object props ->
let props =
[
( "cursor",
let (Incremental.Cursor_id cursor_id) = cursor_id in
Hh_json.JSON_String cursor_id );
]
@ props
in
Hh_json.JSON_Object props
| _ -> failwith "Expected error JSON to be an object"
in
Hh_json.json_to_multiline_output Out_channel.stdout json);
Lwt.return_unit
let detail_level_arg =
Cmdliner.Arg.enum
[
("low", Calculate_fanout.Detail_level.Low);
("high", Calculate_fanout.Detail_level.High);
]
let env
from
client_id
root
detail_level
ignore_hh_version
naming_table_path
dep_table_path
watchman_sockname
changed_files
state_path =
let root =
Wwwroot.interpret_command_line_root_parameter (Option.to_list root)
in
(* Interpret relative paths with respect to the root from here on. That way,
we can write `hh_fanout --root ~/www foo/bar.php` and it will work regardless
of the directory that we invoked this executable from. *)
Sys.chdir (Path.to_string root);
Relative_path.set_path_prefix Relative_path.Root root;
Relative_path.set_path_prefix Relative_path.Hhi (Hhi.get_hhi_root ());
Relative_path.set_path_prefix Relative_path.Tmp (Path.make "/tmp");
let changed_files =
changed_files
|> Sys_utils.parse_path_list
|> List.filter ~f:FindUtils.file_filter
|> List.map ~f:(fun path -> Relative_path.create_detect_prefix path)
|> Relative_path.Set.of_list
in
let client_id =
(* We always require 'from'. We don't want to make the user write out a
client ID multiple times when they're using/debugging `hh_fanout`
interactively, so provide a default value in that case.
Most of the time, `from` and `client_id` will be the same anyways. An
example of reuse might occur when the IDE service wants to take advantage
of any work that the bulk typechecker has already done with regards to
updating the dependency graph. *)
Option.value client_id ~default:from
in
let naming_table_path = Option.map ~f:Path.make naming_table_path in
let dep_table_path = Option.map ~f:Path.make dep_table_path in
let watchman_sockname = Option.map ~f:Path.make watchman_sockname in
let state_path = Option.map ~f:Path.make state_path in
{
client_id;
root;
ignore_hh_version;
detail_level;
naming_table_path;
dep_table_path;
watchman_sockname;
changed_files;
state_path;
}
let env_t =
let open Cmdliner in
let open Cmdliner.Arg in
let from =
let doc = "A descriptive string indicating the caller of this program." in
required & opt (some string) None & info ["from"] ~doc ~docv:"FROM"
in
let client_id =
let doc =
String.strip
{|
A string identifying the caller of this program.
Use the same string across multiple callers to reuse hh_fanout cursors and intermediate results.
If not provided, defaults to the value for 'from'.
|}
in
let docv = "CLIENT-ID" in
value & opt (some string) None & info ["client-id"] ~doc ~docv
in
let root =
let doc =
"The root directory to run in. If not set, will attempt to locate one by searching upwards for an `.hhconfig` file."
in
let docv = "DIR" in
value & opt (some string) None & info ["root"] ~doc ~docv
in
let detail_level =
let doc =
"How much debugging output to include in the result. May slow down the query. The values are `low` or `high`."
in
let docv = "VERBOSITY" in
value
& opt detail_level_arg Calculate_fanout.Detail_level.Low
& info ["detail-level"] ~doc ~docv
in
let ignore_hh_version =
let doc =
"Skip the consistency check for the version that this program was built with versus the version of the server that built the saved-state."
in
value & flag & info ["ignore-hh-version"] ~doc
in
let naming_table_path =
let doc = "The path to the naming table SQLite saved-state." in
let docv = "PATH" in
value & opt (some string) None & info ["naming-table-path"] ~doc ~docv
in
let dep_table_path =
let doc = "The path to the dependency table saved-state." in
let docv = "PATH" in
value & opt (some string) None & info ["dep-table-path"] ~doc ~docv
in
let watchman_sockname =
let doc = "The path to the Watchman socket to use." in
let docv = "PATH" in
value & opt (some string) None & info ["watchman-sockname"] ~doc ~docv
in
let changed_files =
let doc =
String.strip
{|
A file which has changed since last time `hh_fanout` was invoked.
May be specified multiple times.
Not necessary for the caller to pass unless Watchman is unavailable.
|}
in
let docv = "PATH" in
(* Note: I think the following can be `file` as opposed to `string`, but
I'm staying faithful to the original CLI. *)
value & opt_all string [] & info ["changed-file"] ~doc ~docv
in
let state_path =
let doc =
String.strip
{|
The path to the persistent state on disk.
If not provided, will use the default path for the repository.
|}
in
let docv = "PATH" in
value & opt (some string) None & info ["state-path"] ~doc ~docv
in
Term.(
const env
$ from
$ client_id
$ root
$ detail_level
$ ignore_hh_version
$ naming_table_path
$ dep_table_path
$ watchman_sockname
$ changed_files
$ state_path)
let clean_subcommand =
let open Cmdliner in
let doc = "Delete any state files which hh_fanout uses from disk." in
let run env =
let state_path = get_state_path ~env in
Hh_logger.log "Deleting %s" (Path.to_string state_path);
Sys_utils.rm_dir_tree (Path.to_string state_path)
in
let info = Cmd.info "clean" ~doc ~sdocs:Manpage.s_common_options in
let term = Term.(const run $ env_t) in
Cmd.v info term
let calculate_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc = "Determines which files must be rechecked after a change." in
let input_files = value & pos_all string [] & info [] ~docv:"FILENAME" in
let cursor_id =
let doc = "The cursor that the previous request returned." in
value & opt (some string) None & info ["cursor"] ~doc ~docv:"CURSOR"
in
let run env input_files cursor_id =
let input_files =
input_files
|> Sys_utils.parse_path_list
|> List.filter ~f:FindUtils.file_filter
|> List.map ~f:Path.make
|> Path.Set.of_list
in
if Path.Set.is_empty input_files then
Hh_logger.warn "Warning: list of input files is empty.";
Lwt_utils.run_main (fun () -> mode_calculate ~env ~input_files ~cursor_id)
in
let info = Cmd.info "calculate" ~doc ~sdocs:Manpage.s_common_options in
let term = Term.(const run $ env_t $ input_files $ cursor_id) in
Cmd.v info term
let calculate_errors_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc = "Produce typechecking errors for the codebase." in
let cursor_id =
let doc =
String.strip
{|
The cursor returned by a previous request to `calculate`.
If not provided, uses the cursor corresponding to the saved-state.
|}
in
value & opt (some string) None & info ["cursor"] ~doc ~docv:"CURSOR"
in
let pretty_print =
let doc =
"Pretty-print the errors to stdout, rather than returning a JSON object."
in
value & flag & info ["pretty-print"] ~doc
in
let run env cursor_id pretty_print =
Lwt_utils.run_main (fun () ->
mode_calculate_errors ~env ~cursor_id ~pretty_print)
in
let info = Cmd.info "calculate-errors" ~doc ~sdocs:Manpage.s_common_options in
let term = Term.(const run $ env_t $ cursor_id $ pretty_print) in
Cmd.v info term
let mode_debug ~(env : env) ~(path : Path.t) ~(cursor_id : string option) :
unit Lwt.t =
let%lwt saved_state_result = load_saved_state ~env in
let previous_cursor_reference =
match cursor_id with
| Some _ ->
Hh_logger.warn
("A cursor ID was passed to `debug`, "
^^ "but loading from a previous cursor is not yet implemented.");
Cursor_reference_from_saved_state saved_state_result
| None -> Cursor_reference_from_saved_state saved_state_result
in
let path = Relative_path.create_detect_prefix (Path.to_string path) in
let input_files = Relative_path.Set.singleton path in
let incremental_state = make_incremental_state ~env in
let (previous_cursor, previous_changed_files) =
resolve_cursor_reference ~env ~incremental_state ~previous_cursor_reference
in
let cursor =
advance_cursor
~env
~setup_result:saved_state_result.setup_result
~previous_cursor
~previous_changed_files
~input_files
in
let file_deltas = cursor#get_file_deltas in
let new_naming_table =
Naming_table.update_from_deltas saved_state_result.naming_table file_deltas
in
let cursor_id = incremental_state#add_cursor cursor in
let json =
Debug_fanout.go
~ctx:saved_state_result.setup_result.ctx
~workers:saved_state_result.setup_result.workers
~old_naming_table:saved_state_result.naming_table
~new_naming_table
~file_deltas
~path
|> Debug_fanout.result_to_json
in
let json =
Hh_json.JSON_Object
[
( "cursor",
let (Incremental.Cursor_id cursor_id) = cursor_id in
Hh_json.JSON_String cursor_id );
("debug", json);
]
in
Hh_json.json_to_multiline_output Out_channel.stdout json;
Lwt.return_unit
let debug_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc =
"Produces debugging information about the fanout of a certain file."
in
let path = required & pos 0 (some string) None & info [] ~docv:"PATH" in
let cursor_id =
let doc = "The cursor that the previous request returned." in
value & opt (some string) None & info ["cursor"] ~doc ~docv:"CURSOR"
in
let run env path cursor_id =
let path = Path.make path in
Lwt_utils.run_main (fun () -> mode_debug ~env ~path ~cursor_id)
in
let info = Cmd.info "debug" ~doc ~sdocs:Manpage.s_common_options in
let term = Term.(const run $ env_t $ path $ cursor_id) in
Cmd.v info term
let mode_status ~(env : env) ~(cursor_id : string) : unit Lwt.t =
let incremental_state = make_incremental_state ~env in
let cursor =
incremental_state#look_up_cursor ~client_id:None ~cursor_id
|> Result.ok_or_failwith
in
let fanout_calculations =
cursor#get_calculate_fanout_results_since_last_typecheck
in
let%lwt () = Status.go fanout_calculations in
Lwt.return_unit
let status_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc =
"EXPERIMENTAL: Shows details about the files that need to be re-typechecked on the next `calculate-errors` call."
in
let cursor_id =
let doc = "The cursor that the previous request returned." in
required & opt (some string) None & info ["cursor"] ~doc ~docv:"CURSOR"
in
let run env cursor_id =
Lwt_utils.run_main (fun () -> mode_status ~env ~cursor_id)
in
let info = Cmd.info "status" ~doc ~sdocs:Manpage.s_common_options in
let term = Term.(const run $ env_t $ cursor_id) in
Cmd.v info term
let mode_query
~(env : env) ~(dep_hash : Typing_deps.Dep.t) ~(include_extends : bool) :
unit Lwt.t =
let%lwt (saved_state_result : saved_state_result) = load_saved_state ~env in
let json =
Query_fanout.go
~ctx:saved_state_result.setup_result.ctx
~dep_hash
~include_extends
|> Query_fanout.result_to_json
in
let json = Hh_json.JSON_Object [("result", json)] in
Hh_json.json_to_multiline_output Out_channel.stdout json;
Lwt.return_unit
let query_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc = "Get the edges for which the given input node is a dependency." in
let include_extends =
let doc =
"Traverse the extends dependencies for this node and include them in the output as well."
in
value & flag & info ["include-extends"] ~doc
in
let dep_hash = required & pos 0 (some string) None & info [] ~docv:"HASH" in
let run env include_extends dep_hash =
let dep_hash = Typing_deps.Dep.of_debug_string dep_hash in
Lwt_utils.run_main (fun () -> mode_query ~env ~dep_hash ~include_extends)
in
let info = Cmd.info "query" ~doc ~sdocs:Manpage.s_common_options in
let term = Term.(const run $ env_t $ include_extends $ dep_hash) in
Cmd.v info term
let mode_query_path
~(env : env) ~(source : Typing_deps.Dep.t) ~(dest : Typing_deps.Dep.t) :
unit Lwt.t =
let%lwt (saved_state_result : saved_state_result) = load_saved_state ~env in
let json =
Query_path.go ~ctx:saved_state_result.setup_result.ctx ~source ~dest
|> Query_path.result_to_json
in
let json = Hh_json.JSON_Object [("result", json)] in
Hh_json.json_to_multiline_output Out_channel.stdout json;
Lwt.return_unit
let query_path_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc =
"Find a path of dependencies edges leading from one node to another."
in
let man =
[
`S Manpage.s_description;
`P
(String.strip
{|
Produces a list of nodes in the dependency graph connected by typing- or
extends-dependency edges. This is a list of n nodes, where the leading pairs
are connected by extends-dependency edges, and the last pair is connected by
a typing-dependency edge.
|});
]
in
let source =
required & pos 0 (some string) None & info [] ~docv:"SOURCE-HASH"
in
let dest = required & pos 1 (some string) None & info [] ~docv:"DEST-HASH" in
let run env source dest =
let source = Typing_deps.Dep.of_debug_string source in
let dest = Typing_deps.Dep.of_debug_string dest in
Lwt_utils.run_main (fun () -> mode_query_path ~env ~source ~dest)
in
let info = Cmd.info "query-path" ~doc ~sdocs:Manpage.s_common_options ~man in
let term = Term.(const run $ env_t $ source $ dest) in
Cmd.v info term
let mode_build = Build.go
let build_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc = "Build the 64-bit graph from a collection of edges" in
let man =
[
`S Manpage.s_description;
`P
(String.strip
{|
Produces the 64-bit dependency graph from a collection of edges stored in a
set of binary files. The files containing the dependency graph edges are meant
to be produced by hh_server
|});
]
in
let allow_empty =
let doc =
"Do not fail when produced dependency graph is empty. By default, the tool"
^ " exits with a non-zero exit code when trying to produce an empty graph,"
^ " as most likely, this only happens when something has gone wrong (a bug)."
^ " However, producing empty graphs can still be useful! (E.g. in tests)"
in
value & flag & info ["allow-empty"] ~doc
in
let incremental =
let doc =
"Use the provided dependency graph as a base. Build a new dependency graph"
^ " by adding the edges in EDGES_DIR or DELTA_FILE to this graph."
in
value
& opt (some string) None
& info ["incremental"] ~doc ~docv:"INCREMENTAL_HHDG"
in
let edges_dir =
let doc = "A directory containing the .bin files with all the edges." in
value & opt (some string) None & info ["edges-dir"] ~doc ~docv:"EDGES_DIR"
in
let delta_file =
let doc =
"A file containing a dependency graph delta in binary format."
^ " The files should contain edges as produced by calling"
^ " `hh --save-state /path/to/file` (which is a special binary format)."
in
value & opt (some string) None & info ["delta-file"] ~doc ~docv:"DELTA_FILE"
in
let output =
let doc = "Where to put the 64-bit dependency graph." in
required & opt (some string) None & info ["output"] ~doc ~docv:"OUTPUT"
in
let run allow_empty incremental edges_dir delta_file output =
Lwt_utils.run_main (fun () ->
mode_build ~allow_empty ~incremental ~edges_dir ~delta_file ~output)
in
let info = Cmd.info "build" ~doc ~sdocs:Manpage.s_common_options ~man in
let term =
Term.(
const run $ allow_empty $ incremental $ edges_dir $ delta_file $ output)
in
Cmd.v info term
let mode_dep_graph_stats = Dep_graph_stats.go
let dep_graph_stats_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc = "Calculate some statistics for the 64-bit dependency graph" in
let man =
[
`S Manpage.s_description;
`P
(String.strip
{|
Calculate a bunch of statistics for a given 64-bit dependency graph.
|});
]
in
let dep_graph =
let doc = "Path to a 64-bit dependency graph." in
required
& opt (some string) None
& info ["dep-graph"] ~doc ~docv:"DEP_GRAPH"
in
let run dep_graph =
Lwt_utils.run_main (fun () -> mode_dep_graph_stats ~dep_graph)
in
let info =
Cmd.info "dep-graph-stats" ~doc ~sdocs:Manpage.s_common_options ~man
in
let term = Term.(const run $ dep_graph) in
Cmd.v info term
let mode_dep_graph_is_subgraph = Dep_graph_is_subgraph.go
let dep_graph_is_subgraph_subcommand =
let open Cmdliner in
let open Cmdliner.Arg in
let doc = "Check whether SUB is a subgraph of SUPER" in
let man =
[
`S Manpage.s_description;
`P
(String.strip
{|
Check whether a 64-bit dependency graph is a subgraph of an other graph.
|});
]
in
let dep_graph_sub =
let doc = "Path to smallest 64-bit dependency graph." in
required & opt (some string) None & info ["sub"] ~doc ~docv:"SUB"
in
let dep_graph_super =
let doc = "Path to largest 64-bit dependency graph." in
required & opt (some string) None & info ["super"] ~doc ~docv:"SUPER"
in
let run sub super =
Lwt_utils.run_main (fun () -> mode_dep_graph_is_subgraph ~sub ~super)
in
let info =
Cmd.info "dep-graph-is-subgraph" ~doc ~sdocs:Manpage.s_common_options ~man
in
let term = Term.(const run $ dep_graph_sub $ dep_graph_super) in
Cmd.v info term
let default_subcommand =
let open Cmdliner in
let sdocs = Manpage.s_common_options in
Term.(ret (const (`Help (`Pager, None))), Cmd.info "hh_fanout" ~sdocs)
let () =
EventLogger.init EventLogger.Event_logger_fake 0.0;
Daemon.check_entry_point ();
Folly.ensure_folly_init ();
let cmds =
[
build_subcommand;
calculate_subcommand;
calculate_errors_subcommand;
clean_subcommand;
debug_subcommand;
dep_graph_is_subgraph_subcommand;
dep_graph_stats_subcommand;
query_subcommand;
query_path_subcommand;
status_subcommand;
]
in
let (default, default_info) = default_subcommand in
let group = Cmdliner.Cmd.group ~default default_info cmds in
Stdlib.exit (Cmdliner.Cmd.eval group) |
OCaml Interface | hhvm/hphp/hack/src/hh_fanout/hh_fanout.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Main entry point. *) |
OCaml | hhvm/hphp/hack/src/hh_fanout/incremental.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module Hh_bucket = Bucket
open Hh_prelude
type client_id = Client_id of string
type cursor_id = Cursor_id of string
type dep_graph_delta = (Typing_deps.Dep.t * Typing_deps.Dep.t) HashSet.t
type client_config = {
client_id: string;
ignore_hh_version: bool;
dep_table_saved_state_path: Path.t;
dep_table_errors_saved_state_path: Path.t;
naming_table_saved_state_path: Naming_sqlite.db_path;
deps_mode: Typing_deps_mode.t;
}
type typecheck_result = {
errors: Errors.t;
(** The errors in the codebase at this point in time. This field is
cumulative, so previous cursors need not be consulted. TODO: is that
true, or should this be a `Relative_path.Map.t Errors.t`? *)
}
type cursor_state =
| Saved_state of client_config
| Saved_state_delta of {
previous: cursor_state; (** The cursor before this one. *)
changed_files: Naming_sqlite.file_deltas;
(** The files that have changed since the saved-state. This field
is cumulative, so previous cursors need not be consulted. *)
fanout_result: Calculate_fanout.result;
(** The result of calcluating
the fanout for the changed files at the given point in time. *)
}
| Typecheck_result of {
previous: cursor_state; (** The cursor before this one. *)
typecheck_result: typecheck_result;
(** The result of typechecking the fanout. *)
}
(** Construct the cursor ID exposed to the user.
For debugging purposes, the `from` and `client_config` fields are also
included in the cursor, even though we could store them in the state and
recover them from the ID.
For convenience during debugging, we try to ensure that cursors are
lexicographically-orderable by the time ordering. For that reason, it's
important that the first field in the cursor ID is the
monotonically-increasing ID.
The choice of `,` as a delimiter is important. Watchman uses `:`, which is
inappropriate for this goal, because the ASCII value of `,` is less than that
of all the numerals, while `:` is greater than that of all the numerals.
Using this delimiter ensures that a string like `cursor,1,foo` is less than a
string like `cursor,10,foo` by the ASCII lexicographical ordering, which is
not true for `cursor:1:foo` vs. `cursor:10:foo`.
Some reasoning about delimiter choice:
* `-` is likely to appear in `from` strings.
* `+` would contrast strangely with `-` in `from` strings.
* `#` is interpreted as a comment in the shell.
* `$` and `!` may accidentally interpolate values in the shell.
* `&` launched background processes in Bash.
* `(`, `)`, `'`, and `"` are usually paired, and have special meaning in
the shell. Also, in this OCaml comment I have to write this " to close the
previous double-quote, or this comment is a syntax-error.
* '/' suggests a hierarchical relationship or an actual file.
* `%` and `*` look a little strange in my opinion.
* `.` and `,` are fine.
*)
let make_cursor_id (id : int) (client_config : client_config) : cursor_id =
let cursor_id =
Printf.sprintf
"cursor,%d,%s,%d"
id
client_config.client_id
(Hashtbl.hash client_config)
in
Cursor_id cursor_id
let typecheck_and_get_deps_and_errors_job
(ctx : Provider_context.t) _acc (paths : Relative_path.t list) :
Errors.t * dep_graph_delta =
List.fold
paths
~init:(Errors.empty, HashSet.create ())
~f:(fun acc path ->
let (ctx, entry) = Provider_context.add_entry_if_missing ~ctx ~path in
match Provider_context.read_file_contents entry with
| Some _ ->
let deps = HashSet.create () in
Typing_deps.add_dependency_callback
~name:"typecheck_and_get_deps_and_errors_job"
(fun dependent dependency ->
let dependent = Typing_deps.Dep.make dependent in
let dependency = Typing_deps.Dep.make dependency in
HashSet.add deps (dependent, dependency));
let { Tast_provider.Compute_tast_and_errors.errors; _ } =
Tast_provider.compute_tast_and_errors_unquarantined ~ctx ~entry
in
let (acc_errors, acc_deps) = acc in
let acc_errors = Errors.merge errors acc_errors in
HashSet.union acc_deps ~other:deps;
(acc_errors, acc_deps)
| None -> acc)
let get_state_file_path (state_dir : Path.t) : Path.t =
Path.concat state_dir "ocaml.state"
class cursor ~client_id ~cursor_state =
object (self)
val client_id : client_id = client_id
val cursor_state : cursor_state = cursor_state
method get_file_deltas : Naming_sqlite.file_deltas =
let rec helper cursor_state =
match cursor_state with
| Saved_state _ -> Relative_path.Map.empty
| Typecheck_result { previous; _ } -> helper previous
| Saved_state_delta { changed_files; _ } -> changed_files
in
helper cursor_state
method get_calculate_fanout_result : Calculate_fanout.result option =
match cursor_state with
| Saved_state _
| Typecheck_result _ ->
None
| Saved_state_delta { fanout_result; _ } -> Some fanout_result
method get_calculate_fanout_results_since_last_typecheck
: Calculate_fanout.result list =
let rec helper cursor_state =
match cursor_state with
| Saved_state _
| Typecheck_result _ ->
[]
| Saved_state_delta { fanout_result; previous; _ } ->
fanout_result :: helper previous
in
helper cursor_state
method private load_naming_table (ctx : Provider_context.t) : Naming_table.t
=
let rec get_naming_table_path (state : cursor_state) :
Naming_sqlite.db_path =
match state with
| Saved_state { naming_table_saved_state_path; _ } ->
naming_table_saved_state_path
| Saved_state_delta { previous; _ }
| Typecheck_result { previous; _ } ->
get_naming_table_path previous
in
let (Naming_sqlite.Db_path naming_table_path) =
get_naming_table_path cursor_state
in
let changed_file_infos =
self#get_file_deltas
|> Relative_path.Map.fold ~init:[] ~f:(fun path delta acc ->
let file_info =
match delta with
| Naming_sqlite.Modified file_info -> Some file_info
| Naming_sqlite.Deleted -> None
in
(path, file_info) :: acc)
in
Naming_table.load_from_sqlite_with_changed_file_infos
ctx
changed_file_infos
naming_table_path
method get_client_id : client_id = client_id
method get_client_config : client_config =
let rec helper = function
| Saved_state client_config -> client_config
| Saved_state_delta { previous; _ }
| Typecheck_result { previous; _ } ->
helper previous
in
helper cursor_state
method get_deps_mode : Typing_deps_mode.t = self#get_client_config.deps_mode
method private load_dep_table : unit =
let rec helper cursor_state =
match cursor_state with
| Saved_state _ -> ()
| Saved_state_delta { previous; _ } -> helper previous
| Typecheck_result _ -> failwith "not implemented"
in
helper cursor_state
method private get_files_to_typecheck : Relative_path.Set.t =
let rec helper cursor_state acc =
match cursor_state with
| Typecheck_result _ ->
(* Don't need to typecheck any previous cursors. The fanout of
the files that have changed before this typecheck have already
been processed. Stop recursion here. *)
acc
| Saved_state { dep_table_errors_saved_state_path; _ } ->
if Sys.file_exists (Path.to_string dep_table_errors_saved_state_path)
then
let errors : SaveStateServiceTypes.saved_state_errors =
In_channel.with_file
~binary:true
(Path.to_string dep_table_errors_saved_state_path)
~f:(fun ic -> Marshal.from_channel ic)
in
errors
else
Relative_path.Set.empty
| Saved_state_delta { previous; fanout_result; _ } ->
let acc =
Relative_path.Set.union
acc
fanout_result.Calculate_fanout.fanout_files
in
helper previous acc
in
helper cursor_state Relative_path.Set.empty
method advance
~(detail_level : Calculate_fanout.Detail_level.t)
(ctx : Provider_context.t)
(_workers : MultiWorker.worker list)
(changed_paths : Relative_path.Set.t) : cursor =
let changed_files =
Relative_path.Set.fold
changed_paths
~init:
(match cursor_state with
| Saved_state _
| Typecheck_result _ ->
Relative_path.Map.empty
| Saved_state_delta { changed_files; _ } -> changed_files)
~f:(fun path acc ->
let (ctx, entry) =
Provider_context.add_entry_if_missing ~ctx ~path
in
match Provider_context.read_file_contents entry with
| None ->
Relative_path.Map.add acc ~key:path ~data:Naming_sqlite.Deleted
| Some _ ->
let file_info =
Ast_provider.compute_file_info
~popt:(Provider_context.get_popt ctx)
~entry
in
Relative_path.Map.add
acc
~key:path
~data:(Naming_sqlite.Modified file_info))
in
let old_naming_table = self#load_naming_table ctx in
let new_naming_table =
Naming_table.update_from_deltas old_naming_table changed_files
in
let () = self#load_dep_table in
let fanout_result =
Calculate_fanout.go
~detail_level
~deps_mode:self#get_deps_mode
~old_naming_table
~new_naming_table
~file_deltas:changed_files
~input_files:changed_paths
in
let cursor_state =
Saved_state_delta
{ previous = cursor_state; changed_files; fanout_result }
in
new cursor ~client_id ~cursor_state
method calculate_errors
(ctx : Provider_context.t) (workers : MultiWorker.worker list)
: Errors.t * cursor option =
match cursor_state with
| Typecheck_result { typecheck_result = { errors; _ }; _ } ->
(errors, None)
| (Saved_state _ | Saved_state_delta _) as current_cursor ->
(* The global reverse naming table is updated by calling this
function. We can discard the forward naming table returned to us. *)
let (_naming_table : Naming_table.t) = self#load_naming_table ctx in
let files_to_typecheck = self#get_files_to_typecheck in
let (errors, fanout_files_deps) =
MultiWorker.call
(Some workers)
~job:(typecheck_and_get_deps_and_errors_job ctx)
~neutral:(Errors.empty, HashSet.create ())
~merge:(fun (errors, deps) (acc_errors, acc_deps) ->
let acc_errors = Errors.merge acc_errors errors in
HashSet.union acc_deps ~other:deps;
(acc_errors, acc_deps))
~next:
(Hh_bucket.make
(Relative_path.Set.elements files_to_typecheck)
~num_workers:(List.length workers))
in
Hh_logger.log
"Got %d new dependency edges as a result of typechecking %d files"
(HashSet.length fanout_files_deps)
(Relative_path.Set.cardinal files_to_typecheck);
let typecheck_result = { errors } in
let cursor =
new cursor
~client_id
~cursor_state:
(Typecheck_result { previous = current_cursor; typecheck_result })
in
(errors, Some cursor)
end
type persistent_state = {
max_cursor_id: int ref;
cursors: (cursor_id, client_id * cursor) Hashtbl.t;
clients: (client_id, client_config) Hashtbl.t;
}
let save_state ~(state_path : Path.t) ~(persistent_state : persistent_state) :
unit =
Out_channel.with_file ~binary:true (Path.to_string state_path) ~f:(fun oc ->
Marshal.to_channel oc persistent_state [Marshal.Closures])
class state ~state_path ~persistent_state =
object
val state_path : Path.t = state_path
val persistent_state : persistent_state = persistent_state
method make_client_id (client_config : client_config) : client_id =
let client_id = Client_id client_config.client_id in
Hashtbl.set persistent_state.clients ~key:client_id ~data:client_config;
client_id
method make_default_cursor (client_id : client_id) : (cursor, string) result
=
match Hashtbl.find persistent_state.clients client_id with
| Some client_config ->
Ok (new cursor ~client_id ~cursor_state:(Saved_state client_config))
| None ->
let (Client_id client_id) = client_id in
Error (Printf.sprintf "Client ID %s could not be found" client_id)
method look_up_cursor ~(client_id : client_id option) ~(cursor_id : string)
: (cursor, string) result =
let cursor_opt =
Hashtbl.find persistent_state.cursors (Cursor_id cursor_id)
in
match (client_id, cursor_opt) with
| (None, Some (Client_id _existing_client_id, cursor)) -> Ok cursor
| (Some (Client_id client_id), Some (Client_id existing_client_id, cursor))
when String.equal client_id existing_client_id ->
Ok cursor
| ( Some (Client_id client_id),
Some (Client_id existing_client_id, _cursor) ) ->
Error
(Printf.sprintf
"Client ID %s was provided, but cursor %s is associated with client ID %s"
client_id
cursor_id
existing_client_id)
| (Some (Client_id client_id), None) ->
Error
(Printf.sprintf
"Cursor with ID %s not found (for client ID %s)"
cursor_id
client_id)
| (None, None) ->
Error (Printf.sprintf "Cursor with ID %s not found)" cursor_id)
method add_cursor (cursor : cursor) : cursor_id =
let client_id = cursor#get_client_id in
let client_config = Hashtbl.find_exn persistent_state.clients client_id in
let cursor_id =
make_cursor_id !(persistent_state.max_cursor_id) client_config
in
incr persistent_state.max_cursor_id;
Hashtbl.set
persistent_state.cursors
~key:cursor_id
~data:(client_id, cursor);
save_state ~state_path ~persistent_state;
cursor_id
end
let init_state_dir (state_dir : Path.t) ~(populate_dir : Path.t -> unit) : unit
=
Disk.mkdir_p (state_dir |> Path.dirname |> Path.to_string);
if not (Path.file_exists state_dir) then
Tempfile.with_tempdir (fun temp_dir ->
populate_dir temp_dir;
try
Disk.rename (Path.to_string temp_dir) (Path.to_string state_dir)
with
| Disk.Rename_target_already_exists _
| Disk.Rename_target_dir_not_empty _ ->
(* Assume that the directory was initialized by another process
before us, so we don't need to do anything further. *)
())
let make_reference_implementation (state_dir : Path.t) : state =
init_state_dir state_dir ~populate_dir:(fun temp_dir ->
let temp_state_path = get_state_file_path temp_dir in
if not (Path.file_exists temp_state_path) then
save_state
~state_path:temp_state_path
~persistent_state:
{
max_cursor_id = ref 0;
cursors = Hashtbl.Poly.create ();
clients = Hashtbl.Poly.create ();
});
let state_path = get_state_file_path state_dir in
let (persistent_state : persistent_state) =
try
In_channel.with_file
~binary:true
(Path.to_string state_path)
~f:(fun ic -> Marshal.from_channel ic)
with
| e ->
let e = Exception.wrap e in
Hh_logger.warn
("HINT: An error occurred while loading hh_fanout state. "
^^ "If it is corrupted, "
^^ "try running `hh_fanout clean` to delete the state, "
^^ "then try your query again.");
Exception.reraise e
in
let state = new state ~state_path ~persistent_state in
(state :> state) |
OCaml Interface | hhvm/hphp/hack/src/hh_fanout/incremental.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** An identifier representing one of the users of `hh_fanout`. Clients with
different options will have different IDs. *)
type client_id = Client_id of string
(** An identifier representing a `cursor`. *)
type cursor_id = Cursor_id of string
type dep_graph_delta = (Typing_deps.Dep.t * Typing_deps.Dep.t) HashSet.t
(** Any options which would affect the results of the returned fanouts. *)
type client_config = {
client_id: string;
ignore_hh_version: bool;
dep_table_saved_state_path: Path.t;
dep_table_errors_saved_state_path: Path.t;
naming_table_saved_state_path: Naming_sqlite.db_path;
deps_mode: Typing_deps_mode.t;
}
(** A "cursor" represents a pointer to a state in time of the repository with
respect to the dependency graph and typechecking errors.
`hh_fanout` operations take a cursor representing the previous state of the
world, and return a new cursor representing the current state of the world.
*)
class type cursor =
object
(** Get the cumulative file deltas that have occurred since the
saved-state for the point in time represented by this cursor. *)
method get_file_deltas : Naming_sqlite.file_deltas
(** Get the fanout calculated for the files changed in this cursor.
Returns `None` if inapplicable for this type of cursor. *)
method get_calculate_fanout_result : Calculate_fanout.result option
(** Get the fanout that needs to be re-typechecked (i.e. the fanout of
the files changed since the last typecheck).
There is one element in the returned list per cursor-advance since the
last typecheck. The newest elements are first. *)
method get_calculate_fanout_results_since_last_typecheck :
Calculate_fanout.result list
(** Get the client ID that owns this cursor. *)
method get_client_id : client_id
(** Get the client configuration associated with this cursor.
In particular, this contains information about the saved-state that this
cursor was initialized with. *)
method get_client_config : client_config
(** Helper to get deps_mode out of client_config *)
method get_deps_mode : Typing_deps_mode.t
(** Process the provided set of changed files and advance the cursor.
This involves typechecking the changed files and updating the dependency
graph.
The resulting cursor is NOT persisted to disk. The caller is responsible
for persisting it using `State.add_cursor`. *)
method advance :
detail_level:Calculate_fanout.Detail_level.t ->
Provider_context.t ->
MultiWorker.worker list ->
Relative_path.Set.t ->
cursor
(** Typecheck the files in the fanout for this cursor.
Returns a cursor with the typechecking errors cached. The resulting
cursor is NOT persisted to disk. The caller is responsible for persisting
it using `State.add_cursor`.
If no new cursor was created (because no additional work needed to be
performed, i.e. the typechecking errors were already cached for this
cursor), then returns `None` instead of `Some cursor`.
*)
method calculate_errors :
Provider_context.t -> MultiWorker.worker list -> Errors.t * cursor option
end
class type state =
object
(** Look up the ID of the client corresponding to the provided
`client_config`.
If no such client already exists, one is created. This operation is
idempotent. *)
method make_client_id : client_config -> client_id
(** Construct the cursor corresponding to the saved-state.
The caller should immediately advance it with any files that have changed
since the saved-state.
Returns `Error` if the given `client_id` doesn't exist. *)
method make_default_cursor : client_id -> (cursor, string) result
(** Look up the given cursor.
If `client_id` is provided, it is used as a sanity-check, and an
exception is thrown if the provided cursor does not belong to the
provided client.
* Returns `Error` if the cursor does not exist.
* Returns `Error` if the cursor exists, but doesn't belong to the
provided client. *)
method look_up_cursor :
client_id:client_id option -> cursor_id:string -> (cursor, string) result
(** Add the given cursor to the state, committing any changes to disk. *)
method add_cursor : cursor -> cursor_id
end
(** Reference implementation using OCaml blobs to store state. Loads the
state from the given path on disk.
If the path does not exist, it is atomically created and loaded.
Currently not production-usable -- it is not safe for concurrent consumers.
*)
val make_reference_implementation : Path.t -> state |
OCaml | hhvm/hphp/hack/src/hh_fanout/query_fanout.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type single_result = {
hash: Typing_deps.Dep.t;
paths: Relative_path.Set.t;
}
type result = single_result list
let go
~(ctx : Provider_context.t)
~(dep_hash : Typing_deps.Dep.t)
~(include_extends : bool) : result =
let deps_mode = Provider_context.get_deps_mode ctx in
let dep_set = Typing_deps.get_ideps_from_hash deps_mode dep_hash in
let dep_set =
if include_extends then
Typing_deps.add_all_deps deps_mode dep_set
else
Typing_deps.add_typing_deps deps_mode dep_set
in
dep_set
|> Typing_deps.DepSet.elements
|> List.map ~f:(fun hash ->
let paths =
Naming_provider.get_files ctx (Typing_deps.DepSet.singleton hash)
in
{ hash; paths })
let result_to_json result : Hh_json.json =
Hh_json.JSON_Object
(List.map result ~f:(fun { hash; paths } ->
( Typing_deps.Dep.to_debug_string hash,
Hh_json.JSON_Array
(paths
|> Relative_path.Set.elements
|> List.map ~f:(fun path ->
Hh_json.JSON_String (Relative_path.to_absolute path))) ))) |
OCaml Interface | hhvm/hphp/hack/src/hh_fanout/query_fanout.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type result
val go :
ctx:Provider_context.t ->
dep_hash:Typing_deps.Dep.t ->
include_extends:bool ->
result
val result_to_json : result -> Hh_json.json |
OCaml | hhvm/hphp/hack/src/hh_fanout/query_path.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type dep_path_acc = Typing_deps.Dep.t list
type result_node = {
dep: Typing_deps.Dep.t;
paths: Relative_path.Set.t;
}
type result = result_node list option
(** Find a path from [current] to [dest] using a depth-first search. Only the
first result is returned.
A "path" is always zero or more extends-dependency edges followed a single
typing-dependency edge. Extends-dependencies are transitively traversed,
while typing-dependencies don't need to be transitively traversed since they
always end in a terminal node.
*)
let rec search
~(deps_mode : Typing_deps_mode.t)
~(dep_path_acc : dep_path_acc)
~(seen_acc : Typing_deps.DepSet.t)
~(current : Typing_deps.Dep.t)
~(dest : Typing_deps.Dep.t) : dep_path_acc option =
let current_direct_deps =
current
|> Typing_deps.DepSet.singleton
|> Typing_deps.add_typing_deps deps_mode
in
if Typing_deps.DepSet.mem current_direct_deps dest then
Some (dest :: dep_path_acc)
else if not (Typing_deps.Dep.is_class current) then
(* There are no further extends deps to follow for this node. *)
None
else
let dep_path_acc = current :: dep_path_acc in
let extends_deps =
current
|> Typing_deps.Dep.extends_of_class
|> Typing_deps.DepSet.singleton
|> Typing_deps.add_typing_deps deps_mode
|> Typing_deps.DepSet.elements
in
List.fold_until
extends_deps
~init:seen_acc
~f:(fun seen_acc extends_dep ->
let seen_acc = Typing_deps.DepSet.add seen_acc extends_dep in
let result =
search ~deps_mode ~dep_path_acc ~seen_acc ~current:extends_dep ~dest
in
match result with
| Some result -> Container.Continue_or_stop.Stop (Some result)
| None -> Container.Continue_or_stop.Continue seen_acc)
~finish:(fun _seen_acc -> None)
let go
~(ctx : Provider_context.t)
~(source : Typing_deps.Dep.t)
~(dest : Typing_deps.Dep.t) : result =
let deps_mode = Provider_context.get_deps_mode ctx in
search
~deps_mode
~dep_path_acc:[]
~seen_acc:Typing_deps.(DepSet.make ())
~current:source
~dest
|> Option.map ~f:(fun dep_path ->
List.rev_map dep_path ~f:(fun dep ->
let paths =
dep
|> Typing_deps.DepSet.singleton
|> Naming_provider.get_files ctx
in
{ dep; paths }))
let result_to_json (result : result) : Hh_json.json =
match result with
| None -> Hh_json.JSON_Null
| Some dep_path ->
Hh_json.JSON_Array
(List.map dep_path ~f:(fun { dep; paths } ->
Hh_json.JSON_Object
[
( "hash",
Hh_json.JSON_String (Typing_deps.Dep.to_debug_string dep) );
( "paths",
Hh_json.JSON_Array
(paths
|> Relative_path.Set.elements
|> List.map ~f:(fun path ->
Hh_json.JSON_String (Relative_path.to_absolute path))
) );
])) |
OCaml Interface | hhvm/hphp/hack/src/hh_fanout/query_path.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
type result
val go :
ctx:Provider_context.t ->
source:Typing_deps.Dep.t ->
dest:Typing_deps.Dep.t ->
result
val result_to_json : result -> Hh_json.json |
OCaml | hhvm/hphp/hack/src/hh_fanout/status.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
let go (fanout_calculations : Calculate_fanout.result list) : unit Lwt.t =
let (fanout_files, explanations) =
List.fold
fanout_calculations
~init:(Relative_path.Set.empty, Relative_path.Map.empty)
~f:(fun acc { Calculate_fanout.fanout_files; explanations; _ } ->
let (acc_fanout_files, acc_explanations) = acc in
let acc_fanout_files =
Relative_path.Set.union acc_fanout_files fanout_files
in
let acc_explanations =
Relative_path.Map.union acc_explanations explanations
in
(acc_fanout_files, acc_explanations))
in
Relative_path.Map.iter explanations ~f:(fun path explanation ->
let open Calculate_fanout in
Tty.cprintf (Tty.Bold Tty.Default) "%s\n" (Relative_path.suffix path);
let get_symbol_num_files symbol =
match symbol.outgoing_files with
| Some outgoing_files ->
(match Relative_path.Set.cardinal outgoing_files with
| 1 -> "(1 file)"
| n -> Printf.sprintf "(%d files)" n)
| None -> Printf.sprintf "(? files)"
in
List.iter explanation.added_symbols ~f:(fun added_symbol ->
Tty.cprintf
(Tty.Bold Tty.Green)
" A %s"
added_symbol.symbol_edge.symbol_name;
Printf.printf " %s\n" (get_symbol_num_files added_symbol));
List.iter explanation.removed_symbols ~f:(fun removed_symbol ->
Tty.cprintf
(Tty.Bold Tty.Red)
" D %s"
removed_symbol.symbol_edge.symbol_name;
Printf.printf " %s\n" (get_symbol_num_files removed_symbol));
List.iter explanation.modified_symbols ~f:(fun modified_symbol ->
Tty.cprintf
(Tty.Bold Tty.Blue)
" M %s"
modified_symbol.symbol_edge.symbol_name;
Printf.printf " %s\n" (get_symbol_num_files modified_symbol));
());
Printf.printf "Total files to typecheck: ";
Tty.cprintf
(Tty.Bold Tty.Default)
"%d\n"
(Relative_path.Set.cardinal fanout_files);
Lwt.return_unit |
OCaml Interface | hhvm/hphp/hack/src/hh_fanout/status.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Render the details about the fanout to be checked in a nice, colorized,
user-readable format. Prints to stdout. *)
val go : Calculate_fanout.result list -> unit Lwt.t |
TOML | hhvm/hphp/hack/src/hh_fanout/cargo/hh_fanout_build/Cargo.toml | # @generated by autocargo
[package]
name = "hh_fanout_build"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../hh_fanout_build_rust/build.rs"
test = false
doctest = false
[dependencies]
bytemuck = { version = "1.12.3", features = ["derive"] }
dashmap = { version = "5.4", features = ["rayon", "serde"] }
dep_graph_delta = { version = "0.0.0", path = "../../../deps" }
depgraph_compress = { version = "0.0.0", path = "../../../depgraph/depgraph_compress/cargo/depgraph_compress" }
depgraph_reader = { version = "0.0.0", path = "../../../depgraph/cargo/depgraph_reader" }
depgraph_writer = { version = "0.0.0", path = "../../../depgraph/cargo/depgraph_writer" }
hash = { version = "0.0.0", path = "../../../utils/hash" }
libc = "0.2.139"
log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] }
memmap2 = "0.5.10"
newtype = { version = "0.0.0", path = "../../../utils/newtype" }
parking_lot = { version = "0.12.1", features = ["send_guard"] }
rayon = "1.2"
smallvec = { version = "1.6.1", features = ["serde", "specialization", "union"] } |
TOML | hhvm/hphp/hack/src/hh_fanout/cargo/hh_fanout_build_rust/Cargo.toml | # @generated by autocargo
[package]
name = "hh_fanout_build_rust"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../hh_fanout_build_rust/ffi.rs"
test = false
doctest = false
crate-type = ["lib", "staticlib"]
[dependencies]
delta_log = { version = "0.0.0", path = "../../../utils/rust/delta_log" }
env_logger = "0.10"
hh_fanout_build = { version = "0.0.0", path = "../hh_fanout_build" }
ocamlrep_ocamlpool = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } |
TOML | hhvm/hphp/hack/src/hh_fanout/cargo/hh_fanout_dep_graph_is_subgraph_rust/Cargo.toml | # @generated by autocargo
[package]
name = "hh_fanout_dep_graph_is_subgraph_rust"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../hh_fanout_dep_graph_is_subgraph_rust/dep_graph_is_subgraph.rs"
test = false
doctest = false
crate-type = ["lib", "staticlib"]
[dependencies]
depgraph_reader = { version = "0.0.0", path = "../../../depgraph/cargo/depgraph_reader" }
env_logger = "0.10"
log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] }
ocamlrep_ocamlpool = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } |
TOML | hhvm/hphp/hack/src/hh_fanout/cargo/hh_fanout_dep_graph_stats_rust/Cargo.toml | # @generated by autocargo
[package]
name = "hh_fanout_dep_graph_stats_rust"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../hh_fanout_dep_graph_stats_rust/dep_graph_stats.rs"
test = false
doctest = false
crate-type = ["lib", "staticlib"]
[dependencies]
depgraph_reader = { version = "0.0.0", path = "../../../depgraph/cargo/depgraph_reader" }
env_logger = "0.10"
json = "0.12.1"
log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] }
ocamlrep_ocamlpool = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" } |
Rust | hhvm/hphp/hack/src/hh_fanout/hh_fanout_build_rust/build.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::ffi::OsString;
use std::fs;
use std::fs::File;
use std::io;
use std::path::Path;
use std::path::PathBuf;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use dep_graph_delta::DepGraphDeltaIterator;
use depgraph_compress::OptimizeConfig;
use depgraph_compress::WriteConfig;
use depgraph_reader::Dep;
use depgraph_reader::DepGraph;
use depgraph_writer::HashIndex;
use depgraph_writer::HashIndexSet;
use depgraph_writer::HashListIndex;
use depgraph_writer::MemDepGraph;
use hash::DashMap;
use log::info;
use newtype::IdVec;
use parking_lot::Mutex;
use rayon::prelude::*;
use smallvec::SmallVec;
struct EdgesDir {
// For remote typechecks, we output edges in a serialized rust structure format
struct_handles: Vec<PathBuf>,
}
impl EdgesDir {
fn open<P: AsRef<Path>>(dir: P) -> io::Result<EdgesDir> {
let struct_handles = fs::read_dir(&dir)?
.map(|entry| {
let path = entry?.path();
if matches!(
path.extension().and_then(|x| x.to_str()),
Some("bin") | Some("hhdg_delta")
) {
Ok(Some(path))
} else {
Ok(None)
}
})
.filter_map(|x| x.transpose())
.collect::<io::Result<Vec<PathBuf>>>()?;
Ok(EdgesDir { struct_handles })
}
fn struct_handle_count(&self) -> usize {
self.struct_handles.len()
}
fn read_all_edges(self) -> io::Result<Edges> {
let mut acc = Edges::default();
register_dep_graph_delta_files(&self.struct_handles, &mut acc)?;
Ok(acc)
}
}
/// Read in DepGraphDelta files and add their contents to `edges`.
fn register_dep_graph_delta_files(all_paths: &[PathBuf], edges: &mut Edges) -> io::Result<()> {
let num_files_read = AtomicUsize::new(0);
all_paths
.par_iter()
.with_min_len(1)
.with_max_len(1)
.try_for_each(|path| -> io::Result<()> {
// Log progress in a human-readably way no matter how rayon parallelizes things.
// It's not required that these messages be strictly in order (no need to lock here just for this).
let i = num_files_read.fetch_add(1, Ordering::Relaxed);
if all_paths.len() < 100 || (i + 1) % 100 == 0 {
info!("Reading in struct file {}/{}", i + 1, all_paths.len());
}
if std::fs::metadata(path)?.len() == 0 {
// We can't mmap empty files, but they have no data anyway so ignore.
return Ok(());
}
// Memory-map the .hhdg_delta file and tell Linux we're going to need its bytes.
let mmap = {
let file = File::open(path)?;
let mmap = unsafe { memmap2::Mmap::map(&file)? };
unsafe {
libc::madvise(
mmap.as_ptr() as *mut libc::c_void,
mmap.len(),
libc::MADV_WILLNEED,
)
};
mmap
};
// Turn mmap raw bytes into &[u64], which we know it contains.
let contents: &[u64] = bytemuck::cast_slice(&mmap as &[u8]);
// Dump edges into the graph.
for (dependency, dependents) in DepGraphDeltaIterator::new(contents) {
edges.register_many(dependency, dependents.iter().copied());
}
Ok(())
})
}
/// We have high DashMap lock contention with the default number of shards, so increase it by 4x.
fn high_dashmap_shard_count() -> usize {
std::cmp::max(1, rayon::current_num_threads() * 16).next_power_of_two()
}
/// Maps a Dep to a densely numbered 32-bit newtype.
///
/// This mapping is not deterministic, it's designed to hand out numbers
/// quickly. Later, when we have seen all Deps, we renumber uses to
/// use a final deterministic numbering based on Dep order.
#[derive(Debug)]
struct DepToHashIndex {
/// Recently added mappings we haven't flushed to deps.
deps: DashMap<Dep, HashIndex>,
/// The next index to hand out when something is added to `deps`.
next_index: AtomicUsize,
}
impl DepToHashIndex {
fn new() -> Self {
let deps = dashmap::DashMap::with_hasher_and_shard_amount(
hash::BuildHasher::default(),
high_dashmap_shard_count(),
);
Self {
deps,
next_index: AtomicUsize::new(0),
}
}
fn get_or_allocate(&self, dep: Dep) -> HashIndex {
*self.deps.entry(dep).or_insert_with(|| {
HashIndex::from_usize(self.next_index.fetch_add(1, Ordering::Relaxed))
})
}
fn finish(self) -> DashMap<Dep, HashIndex> {
// Check for overflow here.
assert!(self.next_index.into_inner() <= 1 + !0u32 as usize);
self.deps
}
}
/// Structure used to read in all edges in parallel
#[derive(Debug)]
pub struct Edges {
/// This table maps a HashIndex for dependencies to a HashSet of dependents.
///
/// For scalability it's sharded. The low bits of the HashIndex pick which shard
/// to use, and remaining bits index into that shard's Vec.
shards: Box<[Mutex<Vec<HashIndexSet>>; Self::NUM_SHARDS]>,
/// Assigns new, temporary Dep -> HashIndex mappings.
dep_to_temp_index: DepToHashIndex,
}
impl Default for Edges {
fn default() -> Self {
Self {
shards: Box::new(std::array::from_fn(|_| Default::default())),
dep_to_temp_index: DepToHashIndex::new(),
}
}
}
impl Edges {
const NUM_SHARDS: usize = 2048;
/// Register many dependents for one dependency in a single shot.
pub fn register_many<T>(&self, dependency: Dep, dependents: T)
where
T: Iterator<Item = Dep>,
{
let dependency_index = self.dep_to_temp_index.get_or_allocate(dependency);
let dependency_num = dependency_index.as_usize();
let dependents: Vec<HashIndex> = dependents
.map(|dep| self.dep_to_temp_index.get_or_allocate(dep))
.collect();
let shard = &mut self.shards[dependency_num % Self::NUM_SHARDS].lock();
let index = dependency_num / Self::NUM_SHARDS;
if shard.len() <= index {
shard.resize_with(index + 1, Default::default);
}
shard[index].extend(dependents);
}
pub fn finish(self) -> MemDepGraph {
// Sort the hashes.
info!("Sorting all hashes");
let mut dep_to_temp_index: Vec<_> = self.dep_to_temp_index.finish().into_iter().collect();
dep_to_temp_index.par_sort_unstable_by_key(|(dep, _)| *dep);
info!("Sorting all hashes done");
// Create the vec of hashes & inverse mapping so we can update all HashIndex values.
info!("Creating remap table");
let mut remap_table: Vec<HashIndex> = vec![HashIndex(0); dep_to_temp_index.len()];
let hashes: IdVec<HashIndex, Dep> = dep_to_temp_index
.into_iter()
.enumerate()
.map(|(i, (dep, old_index))| {
// While we're here, update `remap_table`.
//
// This can be done in parallel in remap_table holds atomics.
remap_table[old_index.as_usize()] = HashIndex::from_usize(i);
dep
})
.collect();
info!("Creating remap table done");
info!("Interning edge lists");
// Collect up all sorted edge lists. They had 32 bit HashIndex values, but they were
// using the old, unsorted numbering scheme so we need to update them as we go.
let edge_list_interner: DashMap<Box<[HashIndex]>, SmallVec<[HashIndex; 4]>> =
dashmap::DashMap::with_hasher_and_shard_amount(
hash::BuildHasher::default(),
high_dashmap_shard_count(),
);
// Guarantee there exists an entry empty edge list, as it's the default we use later.
edge_list_interner.insert(Default::default(), Default::default());
self.shards
.into_par_iter()
.enumerate()
.for_each(|(shard_lo_index, shard)| {
shard
.into_inner()
.into_par_iter()
.enumerate()
.for_each(|(i, dependents_set)| {
let shard_hi_index = i * Self::NUM_SHARDS;
let dependency = HashIndex::from_usize(shard_hi_index + shard_lo_index);
// Sort both to canonicalize, and so interning works.
let mut dependents: Box<[HashIndex]> = dependents_set
.into_iter()
.map(|d| remap_table[d.as_usize()])
.collect();
dependents.sort_unstable();
edge_list_interner
.entry(dependents)
.or_default()
.push(remap_table[dependency.as_usize()]);
})
});
drop(remap_table);
// Sort into edge lists into canonical order, placing the empty list first.
let mut interned_edge_lists: Vec<_> = edge_list_interner.into_iter().collect();
interned_edge_lists.par_sort_unstable_by(|(d1, _), (d2, _)| {
let key1 = (d1.len(), d1);
let key2 = (d2.len(), d2);
key1.cmp(&key2)
});
let empty_edge_list_index = HashListIndex(0);
assert!(
interned_edge_lists[empty_edge_list_index.0 as usize]
.0
.is_empty()
);
info!("Interning edge lists done");
info!("Building edge lists");
// Here we give each Dep an index into a vec of shared edge lists. It's common for multiple
// Deps to share the same edge list.
// Everything defaults to having an empty edge list unless we see otherwise.
let mut edge_list_indices = IdVec::new_from_vec(vec![empty_edge_list_index; hashes.len()]);
// Build up a Vec of the edge lists, and remember which edge list each HashIndex uses.
let edge_lists = interned_edge_lists
.into_iter()
.enumerate()
.map(|(i, (dependents, dependencies))| {
// Allocate an edge list index for this edge list.
let hash_list_index = HashListIndex::from_usize(i);
// Tell all of the dependencies with this edge list that they are using it.
for dependency in dependencies {
edge_list_indices[dependency] = hash_list_index;
}
// Remember the edge list.
dependents
})
.collect();
info!("Building edge lists done");
MemDepGraph {
hashes,
edge_list_indices,
edge_lists,
}
}
fn count_edges(&self) -> usize {
self.shards
.par_iter()
.with_min_len(1)
.with_max_len(1)
.map(|e| e.lock().iter().map(|s| s.len()).sum::<usize>())
.sum()
}
}
/// Extend a collection of edges by adding all edges from the given
/// dependency graph.
fn extend_edges_from_dep_graph(all_edges: &Edges, graph: &DepGraph) {
graph.par_all_hashes().for_each(|dependency| {
if let Some(hash_list) = graph.hash_list_for(dependency) {
all_edges.register_many(dependency, graph.hash_list_hashes(hash_list));
}
});
}
/// Reads all the edges from the edges files in `edges_dir` and collects them in
/// a list of (dependency, list of dependents).
pub fn edges_from_dir(edges_dir: &Path) -> io::Result<MemDepGraph> {
let edges_dir = EdgesDir::open(edges_dir)?;
let edges = edges_dir.read_all_edges()?;
Ok(edges.finish())
}
pub fn build(
allow_empty: bool,
incremental: Option<OsString>,
new_edges_dir: Option<OsString>,
delta_file: Option<OsString>,
output: &Path,
) -> io::Result<()> {
let all_edges = match (new_edges_dir, delta_file) {
(None, None) => {
panic!("build: at least one of --edges-dir or --delta-file flags should be passed")
}
(Some(_), Some(_)) => {
panic!("build: cannot specify both --edges-dir and --delta-file")
}
(Some(new_edges_dir), None) => {
info!("Opening binary files in {:?}", new_edges_dir);
let new_edges_dir = EdgesDir::open(new_edges_dir)?;
info!(
"Discovered {} struct files with edges",
new_edges_dir.struct_handle_count()
);
let all_edges = new_edges_dir.read_all_edges()?;
info!(
"All binary files loaded ({} unique edges)",
all_edges.count_edges()
);
all_edges
}
(None, Some(delta_file)) => {
info!("Opening dep graph delta at {:?}", delta_file);
let mut all_edges = Edges::default();
register_dep_graph_delta_files(&[delta_file.into()], &mut all_edges)?;
info!("Delta loaded with {} edges", all_edges.count_edges());
all_edges
}
};
match incremental {
None => info!("Not reading in edges from previous dependency graph (incremental=None)"),
Some(incremental) => {
info!(
"Reading in edges from previous dependency graph at {:?}",
incremental
);
let old_dep_graph = DepGraph::from_path(&incremental)?;
extend_edges_from_dep_graph(&all_edges, &old_dep_graph);
info!("Done reading in old edges");
}
}
info!("Done reading Edges");
info!("Converting to structured_edges & unique hashes");
let mem_dep_graph = all_edges.finish();
info!("Converting to structured_edges & unique hashes done");
if !allow_empty && mem_dep_graph.edge_lists.iter().all(|list| list.is_empty()) {
panic!("No input edges. Refusing to build as --allow-empty not set.");
}
info!("Registering {} unique hashes", mem_dep_graph.hashes.len());
if output.extension().and_then(|x| x.to_str()) == Some("zhhdg") {
let write_config = WriteConfig::default();
let optimize_config = OptimizeConfig::default();
depgraph_compress::write_dep_graph(output, mem_dep_graph, &write_config, &optimize_config)?;
} else {
depgraph_writer::write_dep_graph(output, &mem_dep_graph)?;
}
info!("Done");
Ok(())
} |
Rust | hhvm/hphp/hack/src/hh_fanout/hh_fanout_build_rust/ffi.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::ffi::OsString;
use ocamlrep_ocamlpool::ocaml_ffi;
ocaml_ffi! {
fn hh_fanout_build_main(
allow_empty: bool,
incremental: Option<OsString>,
new_edges_dir: Option<OsString>,
delta_file: Option<OsString>,
output: OsString,
) {
if std::env::var("HH_LOG_RELATIVE").is_ok() {
delta_log::init_delta_logger();
} else {
env_logger::init_from_env(
env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"),
);
}
hh_fanout_build::build(allow_empty, incremental, new_edges_dir, delta_file, output.as_ref()).unwrap();
std::process::exit(0)
}
} |
Rust | hhvm/hphp/hack/src/hh_fanout/hh_fanout_dep_graph_is_subgraph_rust/dep_graph_is_subgraph.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::HashSet;
use std::ffi::OsString;
use std::io;
use depgraph_reader::Dep;
use depgraph_reader::DepGraph;
use log::info;
use ocamlrep_ocamlpool::ocaml_ffi;
struct MissingEdge {
dependent: Dep,
dependency: Dep,
}
fn find_missing_edge(sub_graph: &DepGraph, super_graph: &DepGraph) -> Option<MissingEdge> {
// TODO: This could be much faster.
//
// 1. Use rayon.
// 2. Use `hash_list_id_for_dep` instead of `hash_list_for`. That returns a token that uniquely identifies
// the physical `HashList`, and identical rows share the same `HashListId`. Duplicates are common.
// Then use a table (say, `DashSet`) to remember which pairs of `HashListId` values have already been
// compared, and only check each pair once. If not in the table, `hash_list_for_id` can efficiently map
// the `HashListId` to a `HashList`.
let sub_hashes = sub_graph.all_hashes();
for dependency in sub_hashes {
if let Some(sub_hash_list) = sub_graph.hash_list_for(dependency) {
let super_hashes: HashSet<Dep> = super_graph
.hash_list_for(dependency)
.map_or_else(HashSet::default, |hl| {
super_graph.hash_list_hashes(hl).collect()
});
for dependent in sub_graph.hash_list_hashes(sub_hash_list) {
if !super_hashes.contains(&dependent) {
return Some(MissingEdge {
dependent,
dependency,
});
}
}
}
}
None
}
fn main(sub_graph: OsString, super_graph: OsString) -> io::Result<()> {
env_logger::init_from_env(
env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"),
);
info!("Opening sub-graph at {:?}", sub_graph);
let sub_graph = DepGraph::from_path(&sub_graph)?;
info!("Opening super-graph at {:?}", super_graph);
let super_graph = DepGraph::from_path(&super_graph)?;
match find_missing_edge(&sub_graph, &super_graph) {
None => println!("OK"),
Some(MissingEdge {
dependent,
dependency,
}) => {
let dependent: u64 = dependent.into();
let dependency: u64 = dependency.into();
println!("FAIL: the following dependent -> dependency edge is missing:");
println!(" {:x} -> {:x}", dependent, dependency);
std::process::exit(1)
}
}
Ok(())
}
ocaml_ffi! {
fn hh_fanout_dep_graph_is_subgraph_main(
sub_graph: OsString,
super_graph: OsString,
) {
main(sub_graph, super_graph).unwrap();
}
} |
Rust | hhvm/hphp/hack/src/hh_fanout/hh_fanout_dep_graph_stats_rust/dep_graph_stats.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::HashSet;
use std::ffi::OsString;
use depgraph_reader::DepGraph;
use log::info;
use ocamlrep_ocamlpool::ocaml_ffi;
fn main(dep_graph: OsString) {
env_logger::init_from_env(
env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"),
);
info!("Opening dependency graph at {:?}", dep_graph);
let depgraph = DepGraph::from_path(&dep_graph).unwrap();
info!("Validating integrity of dependency graph");
depgraph.validate_hash_lists().unwrap();
let num_unique_hashes = depgraph.all_hashes().len();
info!(
"Gathering hash list stats for {} unique hashes",
num_unique_hashes
);
let mut all_hash_list_ids = HashSet::new();
let mut num_total_edges = 0;
let mut num_stored_edges = 0;
for hash in depgraph.all_hashes() {
if let Some(hash_list_id) = depgraph.hash_list_id_for_dep(hash) {
let num_hash_indices = depgraph.hash_list_for_id(hash_list_id).len() as u64;
num_total_edges += num_hash_indices;
if all_hash_list_ids.insert(hash_list_id) {
num_stored_edges += num_hash_indices;
}
}
}
let num_unique_hash_lists = all_hash_list_ids.len();
let obj = json::object! {
unique_hashes: num_unique_hashes,
total_edges: num_total_edges,
unique_hash_lists: num_unique_hash_lists,
stored_edges: num_stored_edges,
};
println!("{}", json::stringify_pretty(obj, 4));
}
ocaml_ffi! {
fn hh_fanout_dep_graph_stats_main(dep_graph: OsString) {
main(dep_graph);
}
} |
TOML | hhvm/hphp/hack/src/hh_manual/Cargo.toml | # @generated by autocargo
[package]
name = "hh_manual"
version = "0.0.0"
edition = "2021"
[dependencies]
anyhow = "1.0.71"
clap = { version = "3.2.25", features = ["derive", "env", "regex", "unicode", "wrap_help"] }
pulldown-cmark = "0.9.1" |
Rust | hhvm/hphp/hack/src/hh_manual/src/main.rs | use std::collections::HashMap;
use std::ffi::OsString;
use std::io::ErrorKind;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use anyhow::Context;
use anyhow::Result;
use clap::Parser;
use clap::Subcommand;
use pulldown_cmark::CodeBlockKind;
use pulldown_cmark::Event;
use pulldown_cmark::Options;
use pulldown_cmark::Parser as MarkdownParser;
use pulldown_cmark::Tag;
/// Handles code samples in markdown files from the Hack manual.
///
/// Quickstart:
///
/// $ cd ~/fbsource/fbcode
/// $ buck run //hphp/hack/src/hh_manual:hh_manual extract hphp/hack/manual/hack/
#[derive(Parser, Debug)]
#[clap(verbatim_doc_comment)]
struct Cli {
#[clap(subcommand)]
command: Commands,
}
#[derive(Subcommand, Debug)]
enum Commands {
/// Extract Hack code samples as standalone files suitable for hh_single_type_check.
///
/// For every markdown file in the directory (recursively), extract any example of the
/// form:
///
/// ```hack
/// your_sample_code_here();
/// ```
///
/// Additionally, the following syntax is recognized:
///
/// ```hack no-extract
/// // This is highlighted as Hack in the manual, but
/// // not extracted as a standalone file for testing.
/// ```
///
/// ```hack error
/// // This is extracted as a .hack_error file, and tests will ensure
/// // that it actually produces an error.
/// ```
///
/// ```hack file:foo.hack
/// // This is extracted as a file named foo.hack. Multiple code blocks
/// // can use file:foo.hack, and the result will be concatenated.
/// ```
///
/// Each code block is wrapped in a toplevel function. If the block contains any
/// toplevel definitions (function, classes, etc), the block is extracted unchanged.
///
/// ```hack
/// function foo(): void {
/// // This code block is extracted as-is.
/// }
/// ```
/// Any previous *.hack, *.hack_error or *.php files are deleted from the destination
/// directory.
///
/// Other files are untouched, so you can create HH_FLAGS files for any chapters in
/// the manual.
#[clap(verbatim_doc_comment)]
Extract {
/// The directory containing the chapters of the Hack manual.
path: PathBuf,
},
}
#[derive(Debug, Clone)]
struct CodeBlock {
filename: Option<String>,
content: String,
error: bool,
}
/// Does `src` look like toplevel code, such as a class or function
/// definition? If not, it's just a snippet.
fn looks_like_toplevel_code(src: &str) -> bool {
let toplevel_prefixes = [
// Functions can start with these.
"function",
"async",
// Classish types.
"class",
"trait",
"interface",
"enum",
"abstract",
"final",
// Type aliases
"type",
"newtype",
// Constants
"const",
// Modules
"module",
"new module",
"internal",
"public",
// Using types/namespaces.
"namespace",
"use",
];
src.lines().any(|line| {
for prefix in toplevel_prefixes {
if line.starts_with(&format!("{} ", prefix)) {
return true;
}
}
false
})
}
/// Wrap snippet `src` in a function definition, so it's a valid Hack
/// program.
///
/// If `i` is provided, append it to the function name, so we can have
/// multiple functions in the same file.
fn wrap_snippet(src: &str, i: Option<usize>) -> String {
let mut res = String::new();
res.push_str(&format!(
"async function example_snippet_wrapper{}(): Awaitable<void> {{\n",
match i {
Some(i) => format!("{}", i),
None => "".to_owned(),
},
));
let can_indent = !src.contains("<<<");
for line in src.lines() {
res.push_str(&format!("{}{}\n", if can_indent { " " } else { "" }, line));
}
res.push_str("}\n");
res
}
/// Given markdown text `src`, extract all the triple-backtick code
/// blocks that are marked as `hack`.
fn extract_hack_blocks(src: &str) -> Result<Vec<CodeBlock>> {
let options = Options::empty();
let parser = MarkdownParser::new_ext(src, options);
let mut res = vec![];
let mut block_info: Option<String> = None;
let mut file_snippet_count: HashMap<String, usize> = HashMap::new();
for event in parser {
match event {
Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(info))) => {
block_info = Some(info.into_string());
}
Event::Text(t) => {
if let Some(info) = &block_info {
if let Some(hack_info) = info.to_lowercase().trim().strip_prefix("hack") {
let mut filename = None;
let mut should_extract = true;
let mut error = false;
for part in hack_info.trim().split(' ') {
if part.is_empty() {
// No metadata after the triple backticks.
} else if part == "no-extract" {
// Highlighted as Hack, but not extracted
should_extract = false;
break;
} else if part == "error" {
error = true;
} else if part.starts_with("file:") {
filename = Some(part.trim_start_matches("file:").to_owned());
} else {
return Err(anyhow::anyhow!(
"Invalid code block metadata '{}'",
part,
));
}
}
let content = if looks_like_toplevel_code(&t) {
t.into_string()
} else {
let snippet_i = if let Some(filename) = &filename {
*file_snippet_count.entry(filename.to_string()).or_insert(0) += 1;
Some(file_snippet_count[filename])
} else {
None
};
wrap_snippet(&t, snippet_i)
};
if should_extract {
res.push(CodeBlock {
filename,
content,
error,
});
}
}
}
}
Event::End(_) => {
block_info = None;
}
_ => {}
}
}
Ok(res)
}
/// Concatenate all code blocks with the same `file:foo.hack` filename
/// into a single block.
fn merge_by_filename(code_blocks: &[CodeBlock]) -> Vec<CodeBlock> {
let mut res: Vec<CodeBlock> = vec![];
let mut named_blocks: HashMap<String, CodeBlock> = HashMap::new();
for code_block in code_blocks {
if let Some(filename) = &code_block.filename {
named_blocks
.entry(filename.to_owned())
.and_modify(|b| b.content = format!("{}\n{}", b.content, code_block.content))
.or_insert_with(|| code_block.clone());
} else {
res.push(code_block.clone());
}
}
res.extend(named_blocks.values().cloned());
res
}
/// Write `content` as an extracted file to `out_path`.
fn write_example(out_path: &Path, content: &str, page_rel_path: &Path) -> Result<()> {
let mut out_f = std::fs::File::create(out_path)?;
let content_lines: Vec<_> = content.lines().collect();
// Generally we want the first line to be `// generated`. However, hh_single_type_check
// requires the `//// multifile.hack` comment to be the first line.
//
// If we have a multiline comment or shebang, put `// generated` on the second line.
let first_line_multifile = if let Some(line) = content_lines.first() {
line.starts_with("////") || line.starts_with("#!")
} else {
false
};
for (i, line) in content_lines.iter().enumerate() {
if (i == 0 && !first_line_multifile) || (i == 1 && first_line_multifile) {
write!(out_f, "// @")?;
writeln!(
out_f,
"generated by hh_manual from {}",
page_rel_path.display()
)?;
writeln!(
out_f,
"// @codegen-command : buck run fbcode//hphp/hack/src/hh_manual:hh_manual extract fbcode/hphp/hack/manual/hack/"
)?;
}
writeln!(out_f, "{}", line)?;
}
Ok(())
}
/// Write all the extracted examples from page `page_name` to
/// `out_dir`.
fn write_extracted_examples(
out_dir: &Path,
page_rel_path: &Path,
page_name: &str,
code_blocks: &[CodeBlock],
) -> Result<()> {
std::fs::create_dir_all(out_dir)?;
let mut i = 0;
for code_block in code_blocks {
let out_name: String = if let Some(filename) = &code_block.filename {
format!("{}-{}", page_name, filename)
} else {
i += 1;
if code_block.error {
format!("{}-{:02}.hack_error", page_name, i)
} else {
format!("{}-{:02}.hack", page_name, i)
}
};
let out_path = out_dir.join(out_name);
write_example(&out_path, &code_block.content, page_rel_path)
.with_context(|| format!("Failed to write examples to {}", out_path.display()))?;
}
Ok(())
}
fn is_hidden(path: &Path) -> bool {
let name = path.file_name().unwrap_or_default();
name.to_string_lossy().starts_with('.')
}
/// Write all the extracted examples from `chapter_dir` to `test_dir`.
fn write_chapter_examples(chapter_dir: &Path, test_dir: &Path, hack_dir: &Path) -> Result<()> {
let chapter_name = chapter_dir.file_name().unwrap().to_string_lossy();
let out_dir = test_dir.join(&*chapter_name);
let rel_chapter_dir = chapter_dir.strip_prefix(hack_dir).unwrap_or(chapter_dir);
let rel_out_dir = out_dir.strip_prefix(hack_dir).unwrap_or(&out_dir);
println!(
"{:<45} -> {}",
rel_chapter_dir.display(),
rel_out_dir.display()
);
remove_existing_examples(&out_dir).with_context(|| {
format!(
"Failed to remove previously generated examples in {}",
chapter_name
)
})?;
for page_name in std::fs::read_dir(chapter_dir)? {
let page_path = page_name?.path();
if page_path.extension() == Some(&OsString::from("md")) && !is_hidden(&page_path) {
let src_bytes = std::fs::read(&page_path)
.with_context(|| format!("Could not read {}", page_path.display()))?;
let src = String::from_utf8_lossy(&src_bytes);
let code_blocks = extract_hack_blocks(&src)
.with_context(|| format!("Page: {}", page_path.display()))?;
let code_blocks = merge_by_filename(&code_blocks);
let page_name = page_path.file_stem().unwrap().to_string_lossy();
let page_rel_path = page_path.strip_prefix(hack_dir).unwrap_or(&page_path);
write_extracted_examples(&out_dir, page_rel_path, &page_name, &code_blocks)?;
}
}
Ok(())
}
/// Remove any *.php, *.hack or *.hack_error files from `path`, so we
/// don't have leftover extracted files that are no longer in the
/// markdown.
fn remove_existing_examples(path: &Path) -> Result<()> {
if let Ok(dir_entries) = std::fs::read_dir(path) {
for file_name in dir_entries {
let file_path = file_name?.path();
if file_path.extension() == Some(&OsString::from("php"))
|| (file_path.extension() == Some(&OsString::from("hack")))
|| (file_path.extension() == Some(&OsString::from("hack_error")))
{
std::fs::remove_file(file_path)?;
}
}
}
Ok(())
}
fn main() -> Result<()> {
let cli = Cli::parse();
let guide_dir = match cli.command {
Commands::Extract { path } => path,
};
let abs_guide_dir = match guide_dir.canonicalize() {
Ok(d) => d,
Err(e) => match e.kind() {
ErrorKind::NotFound => {
return Err(anyhow::format_err!(
"Path does not exist: {}",
guide_dir.display()
));
}
_ => {
return Err(anyhow::format_err!("{}", e.to_string()));
}
},
};
let hack_dir = abs_guide_dir.parent().unwrap().parent().unwrap();
let test_dir = hack_dir.join("test").join("extracted_from_manual");
for chapter_name in std::fs::read_dir(&abs_guide_dir)? {
let chapter_path = chapter_name?.path();
write_chapter_examples(&chapter_path, &test_dir, hack_dir)?;
}
Ok(())
} |
TOML | hhvm/hphp/hack/src/hh_naming_table_builder/Cargo.toml | # @generated by autocargo
[package]
name = "naming_table_builder"
version = "0.0.0"
edition = "2021"
[lib]
path = "naming_table_builder.rs"
[[bin]]
name = "hh_naming_table_builder"
path = "hh_naming_table_builder.rs"
[dependencies]
anyhow = "1.0.71"
bumpalo = { version = "3.11.1", features = ["collections"] }
clap = { version = "3.2.25", features = ["derive", "env", "regex", "unicode", "wrap_help"] }
direct_decl_parser = { version = "0.0.0", path = "../parser/api/cargo/direct_decl_parser" }
files_to_ignore = { version = "0.0.0", path = "../utils/files_to_ignore" }
find_utils = { version = "0.0.0", path = "../utils/find_utils" }
hh_config = { version = "0.0.0", path = "../utils/hh_config/cargo" }
hh_slog = { version = "0.0.0", path = "../utils/hh_slog/cargo/hh_slog" }
hhi = { version = "0.0.0", path = "../hhi/rust" }
names = { version = "0.0.0", path = "../naming/names_rust" }
oxidized = { version = "0.0.0", path = "../oxidized" }
oxidized_by_ref = { version = "0.0.0", path = "../oxidized_by_ref" }
rayon = "1.2"
relative_path = { version = "0.0.0", path = "../utils/rust/relative_path" }
si_addendum = { version = "0.0.0", path = "../utils/cargo/si_addendum" }
slog = { version = "2.7", features = ["max_level_trace", "nested-values"] }
tempdir = "0.3" |
hhvm/hphp/hack/src/hh_naming_table_builder/dune | (rule
(targets libnaming_table_builder_ffi.a)
(deps
(source_tree %{workspace_root}/hack/src))
(locks /cargo)
(action
(run
%{workspace_root}/hack/scripts/invoke_cargo.sh
naming_table_builder_ffi
naming_table_builder_ffi)))
(library
(name naming_table_builder_ffi)
(wrapped false)
(modules)
(foreign_archives naming_table_builder_ffi))
(library
(name naming_table_builder_ffi_externs)
(wrapped false)
(modules naming_table_builder_ffi_externs)
(libraries
naming_table_builder_ffi
relative_path
search_utils
sys_utils)) |
|
Rust | hhvm/hphp/hack/src/hh_naming_table_builder/hh_naming_table_builder.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
/// Generates a naming table saved state and outputs it to the given file
fn main() -> anyhow::Result<()> {
std::env::set_var("RUST_BACKTRACE", "1");
std::env::set_var("RUST_LIB_BACKTRACE", "1");
if matches!(std::env::var("RUST_LOG").ok().as_deref(), None | Some("")) {
std::env::set_var("RUST_LOG", "INFO");
}
let args = <naming_table_builder::Args as clap::Parser>::parse();
let status = naming_table_builder::build_naming_table(args)?;
std::process::exit(status.as_code());
} |
Rust | hhvm/hphp/hack/src/hh_naming_table_builder/naming_table_builder.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::path::Path;
use std::path::PathBuf;
use files_to_ignore::FilesToIgnore;
use oxidized::decl_parser_options::DeclParserOptions;
use oxidized::parser_options::ParserOptions;
use oxidized::search_types::SiAddendum;
use oxidized_by_ref::direct_decl_parser::ParsedFileWithHashes;
use rayon::prelude::*;
use relative_path::RelativePath;
#[derive(Debug, clap::Parser)]
pub struct Args {
/// The root of the repository, where .hhconfig is, e.g., ~/www
#[clap(long)]
pub www: PathBuf,
/// Filename to save naming table to
#[clap(long)]
pub output: PathBuf,
/// Indicates that the output file should be overwritten, if present
#[clap(long)]
pub overwrite: bool,
/// Provide a directory containing custom HHIs to use in place of the `hhi` crate
#[clap(long)]
pub custom_hhi_path: Option<PathBuf>,
/// By default, this program exits with an error status code if the
/// generated naming table contains duplicate symbols (stored in the
/// NAMING_SYMBOLS_OVERFLOW table). If this option is provided, exit with a
/// success code instead. hh_server is not designed to tolerate duplicates
/// or read the overflow table, but the rearchitecture is.
#[clap(long)]
pub allow_collisions: bool,
/// Write to the DB concurrently with parsing. Saves time but makes assigned
/// file info IDs nondeterministic (by default, file info IDs are assigned
/// so that RelativePaths are in sorted order).
#[clap(long)]
pub unsorted: bool,
}
pub fn build_naming_table(args: Args) -> anyhow::Result<ExitStatus> {
let (log, _guard) = hh_slog::init_term_envlogger("");
let hhconfig = hh_config::HhConfig::from_root(&args.www, &Default::default())?;
if args.output.exists() {
let output = args.output.display();
if args.output.is_dir() {
eprintln!("Cannot write to {output}; is a directory");
return Ok(ExitStatus::InputError);
}
if args.overwrite {
slog::warn!(log, "Deleting {output}, since --overwrite was passed",);
std::fs::remove_file(&args.output)?;
} else {
eprintln!("{output} exists; if you wish to overwrite it, use the --overwrite flag",);
return Ok(ExitStatus::InputError);
}
}
let (hhi_path, _hhi_tmpdir) = if let Some(path) = args.custom_hhi_path {
if path.exists() && path.is_dir() {
slog::info!(log, "Using HHI files in {}", path.display());
(path, None)
} else {
eprintln!("Custom HHI directory {} not found", path.display());
return Ok(ExitStatus::InputError);
}
} else {
let tmpdir = tempdir::TempDir::new("hh_naming_table_builder_hhi")?;
let path = tmpdir.path().to_owned();
slog::info!(log, "Extracting HHI files to {}", path.display());
hhi::write_hhi_files(&path)?;
(path, Some(tmpdir))
};
slog::info!(log, "Walking WWW...");
let files_to_ignore = FilesToIgnore::new(&hhconfig.ignored_paths)?;
let walk = |root, prefix| -> anyhow::Result<Vec<_>> {
find_utils::find_hack_files(&files_to_ignore, root, prefix).collect()
};
let mut filenames = walk(&args.www, relative_path::Prefix::Root)?;
filenames.extend(walk(&hhi_path, relative_path::Prefix::Hhi)?);
let decl_opts = &DeclParserOptions::from_parser_options(&hhconfig.opts);
let parse = |path| parse_file(&hhconfig.opts, decl_opts, &args.www, &hhi_path, path);
let save_result = if args.unsorted {
slog::info!(log, "Parsing files and writing to DB...");
names::Names::build(&args.output, |tx| {
filenames
.into_par_iter()
.try_for_each(|path| -> anyhow::Result<_> { Ok(tx.send(parse(path)?)?) })
})?
} else {
slog::info!(log, "Parsing files...");
let mut summaries: Vec<(RelativePath, names::FileSummary)> = filenames
.into_par_iter()
.map(parse)
.collect::<anyhow::Result<_>>()?;
summaries.par_sort_by(|a, b| a.0.cmp(&b.0));
slog::info!(log, "Writing to DB...");
names::Names::build_from_iterator(&args.output, summaries.into_iter())?
};
slog::info!(
log,
"Inserted symbols into the naming table: {:?}",
&save_result
);
if !save_result.collisions.is_empty() && !args.allow_collisions {
slog::error!(
log,
"Failed due to name collisions: {:?}",
save_result.collisions,
);
return Ok(ExitStatus::SqlAssertionFailure);
}
slog::info!(log, "Finished saving naming table with 0 errors");
Ok(ExitStatus::NoError)
}
/// Functionally similar to `build_naming_table` with certain args set. The main differences are:
/// - `unsorted` set to true, `allow_collisions` set to false.
/// - Symbol index addenda are derived from decl parsing and returned.
pub fn build_naming_table_ide(
www: &Path,
hhi_path: &Path,
output: &Path,
) -> anyhow::Result<(ExitStatus, Vec<(RelativePath, Vec<SiAddendum>)>)> {
let hhconfig = hh_config::HhConfig::from_root(www, &Default::default())?;
let files_to_ignore = FilesToIgnore::new(&hhconfig.ignored_paths)?;
let walk = |root, prefix| -> anyhow::Result<Vec<_>> {
find_utils::find_hack_files(&files_to_ignore, root, prefix).collect()
};
let mut filenames = walk(www, relative_path::Prefix::Root)?;
filenames.extend(walk(hhi_path, relative_path::Prefix::Hhi)?);
let decl_opts = &DeclParserOptions::from_parser_options(&hhconfig.opts);
// Parse each file in parallel to get the file summary and a list of symbol
// index addenda. The addenda are used to update the symbol index DB (e.g.
// autocomplete, workspace symbol).
let parse_results: Vec<(
(RelativePath, names::FileSummary),
(RelativePath, Vec<SiAddendum>),
)> = filenames
.into_par_iter()
.map(|path| {
let (path, summary, addenda) =
parse_file_with_addenda(&hhconfig.opts, decl_opts, www, hhi_path, path)?;
Ok(((path.clone(), summary), (path, addenda)))
})
.collect::<anyhow::Result<_>>()?;
let (summaries, addenda): (Vec<_>, Vec<_>) = parse_results.into_iter().unzip();
let save_result = names::Names::build_from_iterator(output, summaries.into_iter())?;
if !save_result.collisions.is_empty() {
// TODO(toyang): should we allow collisions at this point? I'm not sure if a duplicate naming error should be reported here vs. somewhere else.
return Ok((ExitStatus::SqlAssertionFailure, addenda));
}
Ok((ExitStatus::NoError, addenda))
}
#[derive(Copy, Clone, Debug)]
#[repr(u8)]
pub enum ExitStatus {
NoError = 0,
InputError = 10,
SqlAssertionFailure = 212,
}
impl ExitStatus {
pub fn as_code(self) -> i32 {
self as u8 as i32
}
}
fn parse_file_with_hashes<'a>(
text: &'a [u8],
arena: &'a bumpalo::Bump,
opts: &ParserOptions,
decl_opts: &DeclParserOptions,
path: RelativePath,
) -> anyhow::Result<ParsedFileWithHashes<'a>> {
let prefix = path.prefix();
let parsed_file =
direct_decl_parser::parse_decls_for_typechecking(decl_opts, path, text, arena);
let with_hashes =
ParsedFileWithHashes::new(parsed_file, opts.po_deregister_php_stdlib, prefix, arena);
Ok(with_hashes)
}
fn parse_file(
opts: &ParserOptions,
decl_opts: &DeclParserOptions,
root: &Path,
hhi_path: &Path,
path: RelativePath,
) -> anyhow::Result<(RelativePath, names::FileSummary)> {
let text = std::fs::read(match path.prefix() {
relative_path::Prefix::Root => root.join(path.path()),
relative_path::Prefix::Hhi => hhi_path.join(path.path()),
prefix => panic!("Unexpected RelativePath prefix: {prefix}"),
})?;
let arena = bumpalo::Bump::new();
let with_hashes = parse_file_with_hashes(&text, &arena, opts, decl_opts, path.clone())?;
let summary = names::FileSummary::new(&with_hashes);
Ok((path, summary))
}
fn parse_file_with_addenda(
opts: &ParserOptions,
decl_opts: &DeclParserOptions,
root: &Path,
hhi_path: &Path,
path: RelativePath,
) -> anyhow::Result<(RelativePath, names::FileSummary, Vec<SiAddendum>)> {
let text = std::fs::read(match path.prefix() {
relative_path::Prefix::Root => root.join(path.path()),
relative_path::Prefix::Hhi => hhi_path.join(path.path()),
prefix => panic!("Unexpected RelativePath prefix: {prefix}"),
})?;
let arena = bumpalo::Bump::new();
let with_hashes = parse_file_with_hashes(&text, &arena, opts, decl_opts, path.clone())?;
let summary = names::FileSummary::new(&with_hashes);
let addenda = si_addendum::get_si_addenda(&with_hashes);
Ok((path, summary, addenda))
} |
Rust | hhvm/hphp/hack/src/hh_naming_table_builder/naming_table_builder_ffi.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Instant;
use ocamlrep_custom::Custom;
use oxidized::search_types::SiAddendum;
use relative_path::RelativePath;
use unwrap_ocaml::UnwrapOcaml;
/// A future representing a naming table build. Like Rust's standard future, if
/// this is polled and is completed, then it shouldn't be polled again.
#[derive(Clone, Default)]
struct BuildProgress(Arc<Mutex<Option<BuildResultWithTelemetry>>>);
struct BuildResultWithTelemetry {
pub result: anyhow::Result<BuildResult>,
pub time_elapsed_secs: f64,
}
struct BuildResult {
pub exit_status: naming_table_builder::ExitStatus,
pub si_addenda: Vec<(RelativePath, Vec<SiAddendum>)>,
}
impl BuildProgress {
pub fn poll_and_take(&self) -> Option<BuildResultWithTelemetry> {
let mut guard = self.0.lock().unwrap();
guard.take()
}
pub fn set(&self, result: BuildResultWithTelemetry) {
let mut guard = self.0.lock().unwrap();
assert!(guard.is_none());
*guard = Some(result);
}
}
impl ocamlrep_custom::CamlSerialize for BuildProgress {
ocamlrep_custom::caml_serialize_default_impls!();
}
fn spawn_build(www: PathBuf, hhi_path: PathBuf, output: PathBuf) -> BuildProgress {
let progress = BuildProgress::default();
let progress_in_builder_thread = progress.clone();
let _handle = std::thread::spawn(move || {
let build_benchmark_start = Instant::now();
// TODO: an atomic rename would probably be more robust here.
if output.exists() {
if let Err(err) = std::fs::remove_file(&output) {
progress_in_builder_thread.set(BuildResultWithTelemetry {
result: Err(anyhow::anyhow!(err)),
time_elapsed_secs: build_benchmark_start.elapsed().as_secs_f64(),
});
return;
}
}
let result = naming_table_builder::build_naming_table_ide(&www, &hhi_path, &output).map(
|(exit_status, si_addenda)| BuildResult {
exit_status,
si_addenda,
},
);
let time_elapsed_secs = build_benchmark_start.elapsed().as_secs_f64();
progress_in_builder_thread.set(BuildResultWithTelemetry {
result,
time_elapsed_secs,
});
});
progress
// thread is detached
}
ocamlrep_ocamlpool::ocaml_ffi! {
fn naming_table_builder_ffi_build(www: PathBuf, custom_hhi_path: PathBuf, output: PathBuf) -> Custom<BuildProgress> {
let progress = spawn_build(www, custom_hhi_path, output);
Custom::from(progress)
}
// Returns (exit_status, si_addenda, time_taken_ms)
fn naming_table_builder_ffi_poll(progress: Custom<BuildProgress>) -> Option<(i32, Vec<(RelativePath, Vec<SiAddendum>)>, f64)> {
progress.poll_and_take().map(
|BuildResultWithTelemetry {
result,
time_elapsed_secs,
}| {
let result = result.unwrap_ocaml();
(result.exit_status.as_code(), result.si_addenda, time_elapsed_secs)
}
)
}
} |
OCaml | hhvm/hphp/hack/src/hh_naming_table_builder/naming_table_builder_ffi_externs.ml | (*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Hh_prelude
type build_progress
type si_addenda = (Relative_path.t * SearchTypes.si_addendum list) list
type build_result = {
exit_status: int;
si_addenda: si_addenda;
time_taken_secs: float;
}
external build :
www:string -> custom_hhi_path:string -> output:string -> build_progress
= "naming_table_builder_ffi_build"
(** The naming table build represents failure in one of two ways: by
throwing an OCaml exception (we unwrap the result and obtain a call
stack), or by returning a non-zero exit status. The exit statuses are
described by `naming_table_builder::ExitStatus`. All other failures are
represented by exceptions. *)
external poll_exn :
build_progress ->
(int * (Relative_path.t * SearchTypes.si_addendum list) list * float) option
= "naming_table_builder_ffi_poll"
let build ~(www : Path.t) ~(custom_hhi_path : Path.t) ~(output : Path.t) :
build_progress =
build
~www:(Path.to_string www)
~custom_hhi_path:(Path.to_string custom_hhi_path)
~output:(Path.to_string output)
let poll_exn (progress : build_progress) : build_result option =
poll_exn progress
|> Option.map ~f:(fun (exit_status, si_addenda, time_taken_secs) ->
{ exit_status; si_addenda; time_taken_secs }) |
TOML | hhvm/hphp/hack/src/hh_naming_table_builder/cargo/naming_table_builder_ffi/Cargo.toml | # @generated by autocargo
[package]
name = "naming_table_builder_ffi"
version = "0.0.0"
edition = "2021"
[lib]
path = "../../naming_table_builder_ffi.rs"
test = false
doctest = false
crate-type = ["lib", "staticlib"]
[dependencies]
anyhow = "1.0.71"
naming_table_builder = { version = "0.0.0", path = "../.." }
ocamlrep_custom = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" }
ocamlrep_ocamlpool = { version = "0.1.0", git = "https://github.com/facebook/ocamlrep/", branch = "main" }
oxidized = { version = "0.0.0", path = "../../../oxidized" }
relative_path = { version = "0.0.0", path = "../../../utils/rust/relative_path" }
unwrap_ocaml = { version = "0.0.0", path = "../../../utils/unwrap_ocaml" } |
OCaml | hhvm/hphp/hack/src/hh_oxidize/configuration.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Core
open Reordered_argument_collections
type mode =
| ByBox
| ByRef
type t = {
mode: mode;
extern_types: string SMap.t;
owned_types: SSet.t;
copy_types: SSet.t option;
}
let default =
{
extern_types = SMap.empty;
mode = ByBox;
owned_types = SSet.empty;
copy_types = None;
}
let config : t option ref = ref None
let set t =
if Option.is_some !config then failwith "Config already set";
config := Some t
let mode () = (Option.value_exn !config).mode
let extern_type type_name =
"" :: State.curr_module_name () :: Output.glob_uses ()
|> List.find_map ~f:(fun mod_name ->
let maybe_qualified_type =
if String.equal mod_name "" then
type_name
else
mod_name ^ "::" ^ type_name
in
SMap.find_opt
(Option.value_exn !config).extern_types
maybe_qualified_type)
let owned_type type_name =
"" :: State.curr_module_name () :: Output.glob_uses ()
|> List.exists ~f:(fun mod_name ->
let maybe_qualified_type =
if String.equal mod_name "" then
type_name
else
mod_name ^ "::" ^ type_name
in
SSet.mem (Option.value_exn !config).owned_types maybe_qualified_type)
let copy_type type_name =
match (Option.value_exn !config).copy_types with
| None -> `Unknown
| Some copy_types ->
`Known
("" :: State.curr_module_name () :: Output.glob_uses ()
|> List.exists ~f:(fun mod_name ->
let maybe_qualified_type =
if String.equal mod_name "" then
type_name
else
mod_name ^ "::" ^ type_name
in
SSet.mem copy_types maybe_qualified_type))
let is_known v b =
match v with
| `Known k -> Bool.equal b k
| _ -> false |
OCaml Interface | hhvm/hphp/hack/src/hh_oxidize/configuration.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
(** Config settings which apply to the conversion of all modules. The global
configuration is set once at startup and not changed after. *)
type mode =
| ByBox (** Emit recursive definitions using Boxes. Use Vecs and Strings. *)
| ByRef
(** Emit type definitions containing references, slices, and
&strs rather than Boxes, Vecs, and Strings. The emitted type
definitions are intended to be suitable for arena-allocation
(i.e., all types have a no-op implementation of Drop). *)
type t = {
mode: mode;
(** The mode to when emitting recursive types, vectors and strings. *)
extern_types: string SMap.t;
(** The extern_types setting allows for the importing of types defined
outside the set of modules to be oxidized. If our extern_types map has
an entry mapping ["bar::Bar"] to ["foo::bar::Bar"], then instances of
[Bar.t] in the OCaml source will be converted to [foo::bar::Bar]
rather than [bar::Bar]. All extern_types are assumed to take no
lifetime parameter. *)
owned_types: SSet.t;
(** The owned_types setting allows specifying a set of types which do not
need a lifetime parameter (so that hh_oxidize need not use global
knowledge of all types being converted to track which do and do not
need lifetime parameters). *)
copy_types: SSet.t option;
(** The owned_types setting allows specifying a set of types which
implement Copy, and should not be put behind a reference (so that
hh_oxidize need not use global knowledge of all types being converted
to track which do and do not implement Copy). *)
}
val default : t
(** Set the global config. To be invoked at startup. Raises an exception if
invoked more than once. *)
val set : t -> unit
(** Return the emitter's [mode]. Raises an exception if invoked before [set]. *)
val mode : unit -> mode
(** If the given type name was set to be imported from an extern types file,
return its fully-qualified name, else None. Raises an exception if invoked
before [set]. *)
val extern_type : string -> string option
(** If the given type name does not need a lifetime parameter, return true.
Raises an exception if invoked before [set]. *)
val owned_type : string -> bool
(** If the given type name implements (or should implement) Copy, return
[`Known true]. If no list of copy types was provided, return [`Unknown].
Raises an exception if invoked before [set]. *)
val copy_type : string -> [ `Known of bool | `Unknown ]
(** Test if the given value is the right kind of `Known *)
val is_known : [> `Known of bool ] -> bool -> bool |
OCaml | hhvm/hphp/hack/src/hh_oxidize/convert_longident.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Core
open Longident
let strip_decl_prefix =
SSet.of_list
[
"DeclFunArity";
"DeclFunParam";
"DeclFunParams";
"DeclFunType";
"DeclPossiblyEnforcedTy";
"DeclTparam";
"DeclTy";
"DeclWhereConstraint";
]
type flattened_longident =
| FLident of string list
| FLdot of flattened_longident * string list
| FLapply of flattened_longident list
let rec flatten_longident = function
| Lident str -> FLident [str]
| Lapply (id1, id2) ->
let ids =
match flatten_longident id1 with
| FLapply ids -> ids
| id -> [id]
in
FLapply (ids @ [flatten_longident id2])
| Ldot (id, str) ->
(match flatten_longident id with
| FLident strs -> FLident (str :: strs)
| FLdot (id, strs) -> FLdot (id, str :: strs)
| FLapply _ as id -> FLdot (id, [str]))
(* [> `RustType of Rust_type.t | `Module of string ] = *)
let to_string for_open id =
let rec to_string id =
match id with
| FLident []
| FLdot (_, [])
| FLapply [] ->
assert false
| FLident (ty :: modules) ->
let ty =
match (ty, modules) with
| _ when for_open -> Utils.convert_module_name ty
| ("t", m :: _) -> Utils.convert_type_name m
| _ -> Utils.convert_type_name ty
in
(* HACK: The oxidized version of `ty` has no phase. *)
let ty =
if String.equal ty "LoclTy" then
"Ty"
else if String.equal ty "LoclPossiblyEnforcedTy" then
"PossiblyEnforcedTy"
else if SSet.mem ty strip_decl_prefix then
String.chop_prefix_exn ty ~prefix:"Decl"
else
ty
in
let modules = List.map modules ~f:Utils.convert_module_name in
ty :: modules |> List.rev |> String.concat ~sep:"::"
| FLdot (id, assoc_tys) ->
let id = to_string id in
let assoc_tys = List.map assoc_tys ~f:Utils.convert_type_name in
assoc_tys |> List.rev |> List.cons id |> String.concat ~sep:"::"
| FLapply (ftor :: args) ->
let ftor = to_string ftor in
let args = args |> List.map ~f:to_string |> String.concat ~sep:", " in
sprintf "%s<%s>" ftor args
in
to_string id
let longident_to_string ?(for_open = false) id =
flatten_longident id |> to_string for_open |
OCaml Interface | hhvm/hphp/hack/src/hh_oxidize/convert_longident.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val longident_to_string : ?for_open:bool -> Longident.t -> string |
OCaml | hhvm/hphp/hack/src/hh_oxidize/convert_toplevel_phrase.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Core
open Asttypes
open Parsetree
open Utils
open Output
open Convert_longident
module Env : sig
type t
val empty : t
val add_defined_module : t -> string -> t
val is_defined_submodule : t -> string -> bool
end = struct
type t = { defined_submodules: SSet.t }
let empty = { defined_submodules = SSet.empty }
let add_defined_module (env : t) (module_name : string) : t =
let module_name = String.uncapitalize module_name in
{ defined_submodules = SSet.add module_name env.defined_submodules }
let is_defined_submodule (env : t) (module_name : string) : bool =
let module_name = String.uncapitalize module_name in
SSet.mem module_name env.defined_submodules
end
(* HACK: These modules are not used in any type declarations, so importing them
will result in an "unused import" warning or an "unresolved import" error in
Rust. *)
let module_blacklist =
[
(* nast.ml opens Aast, but doesn't use it in type declarations. *)
"aast";
"aast_defs_visitors_ancestors";
"ast_defs_visitors_ancestors";
"base::export";
"core_kernel";
"common";
"hh_core";
"hh_json";
"hh_prelude";
"naming_special_names";
"pp_type";
"reordered_argument_collections";
"sexplib::std";
"string_utils";
"utils";
]
(* HACK: These submodules are defined inline in another module. We don't convert
nested modules (I think it would be a bit of work to handle imports
properly), so we convert them manually and re-export them when we see their
OCaml definition. *)
let nested_modules =
[("ast_defs", "ShapeMap"); ("typing_defs_core", "TShapeMap")]
let blacklisted = List.mem module_blacklist ~equal:String.equal
(* HACK: These submodules are defined inline in another module solely to provide
scoping for the variant constructors. Since Rust enums define their own
namespace for their variants, there's no need to use submodules for this on
the Rust side. *)
let enum_modules =
[
("error_codes", "Parsing");
("error_codes", "Naming");
("error_codes", "NastCheck");
("error_codes", "Typing");
("error_codes", "Init");
(* An optional error set that runs only for arg --enable-global-access-check. *)
("error_codes", "GlobalAccessCheck");
]
let is_manually_converted_nested_module mod_name =
List.mem
nested_modules
(State.curr_module_name (), mod_name)
~equal:(Tuple.T2.equal ~eq1:String.equal ~eq2:String.equal)
let is_enum_module mod_name =
List.mem
enum_modules
(State.curr_module_name (), mod_name)
~equal:(Tuple.T2.equal ~eq1:String.equal ~eq2:String.equal)
let is_enum_module_import id =
match id with
| Longident.(Ldot (Lident mod_name, enum_type_name)) ->
List.mem
enum_modules
(convert_module_name mod_name, convert_type_name enum_type_name)
~equal:(Tuple.T2.equal ~eq1:String.equal ~eq2:String.equal)
| _ -> false
let string_of_module_desc = function
| Pmod_structure _ -> "Pmod_structure"
| Pmod_functor _ -> "Pmod_functor"
| Pmod_apply _ -> "Pmod_apply"
| Pmod_constraint _ -> "Pmod_constraint"
| Pmod_unpack _ -> "Pmod_unpack"
| Pmod_extension _ -> "Pmod_extension"
| Pmod_ident _ -> "Pmod_ident"
let structure_item (env : Env.t) (si : structure_item) : Env.t =
match si.pstr_desc with
(* A type declaration. The [type_decls] list will contain items in the case of
mutual recursion, i.e., `type ... and`. *)
| Pstr_type (_, type_decl :: type_decls) ->
Convert_type_decl.type_declaration type_decl;
List.iter
type_decls
~f:(Convert_type_decl.type_declaration ~mutual_rec:true);
env
| Pstr_type (_, []) -> failwith "unexpected parse tree: empty Pstr_type"
(* Convert `open Foo` to `use crate::foo::*;` *)
| Pstr_open { popen_expr; _ } ->
let id =
match popen_expr.pmod_desc with
| Pmod_ident id -> id
| _ -> failwith "unsupported 'open' statement"
in
let mod_name = longident_to_string id.txt ~for_open:true in
if blacklisted mod_name then
log "Not opening %s: it is blacklisted" mod_name
else
add_glob_use mod_name;
env
(* Convert `module F = Foo` to `use crate::foo as f;` *)
| Pstr_module
{
pmb_name = { txt = Some alias; _ };
pmb_expr = { pmod_desc = Pmod_ident id; _ };
_;
} ->
let mod_name =
longident_to_string id.txt ~for_open:(not (is_enum_module_import id.txt))
in
if blacklisted mod_name then
log "Not aliasing %s: it is blacklisted" mod_name
else
add_alias
mod_name
(if is_enum_module_import id.txt then
alias
else
convert_module_name alias);
env
(* Convert `include Foo` to explicit re-exports (`pub use`) for every type
exported by Foo (see {!Stringify.get_includes}). *)
| Pstr_include { pincl_mod = { pmod_desc = Pmod_ident id; _ }; _ } ->
let mod_name = longident_to_string id.txt ~for_open:true in
if blacklisted mod_name then
log "Not including %s: it is blacklisted" mod_name
else if Env.is_defined_submodule env mod_name then
log
"Not including %s: its definition is local and hasn't been converted."
mod_name
else
add_include mod_name;
env
| Pstr_module
{
pmb_name = { txt = Some mod_name; _ };
pmb_expr = { pmod_desc = Pmod_structure _; _ };
_;
}
when is_manually_converted_nested_module mod_name ->
let rust_mod_name = convert_module_name mod_name in
log
"Not converting submodule %s: importing crate::%s instead"
mod_name
rust_mod_name;
add_alias ("crate::" ^ rust_mod_name) rust_mod_name;
env
| Pstr_module
{
pmb_name = { txt = Some mod_name; _ };
pmb_expr =
{
pmod_desc =
Pmod_structure
({
pstr_desc =
Pstr_type
(_, [({ ptype_name = { txt = "t"; _ }; _ } as enum_type)]);
_;
}
:: _);
_;
};
_;
}
when is_enum_module mod_name ->
log "Converting submodule %s to enum type" mod_name;
let enum_type =
{
enum_type with
ptype_name = { enum_type.ptype_name with txt = mod_name };
}
in
Convert_type_decl.type_declaration enum_type;
env
| Pstr_module
{ pmb_name = { txt = Some name; _ }; pmb_expr = { pmod_desc; _ }; _ } ->
let kind = string_of_module_desc pmod_desc in
log "Not converting submodule %s: %s not supported" name kind;
let env = Env.add_defined_module env name in
env
| Pstr_include { pincl_mod = { pmod_desc; _ }; _ } ->
let kind = string_of_module_desc pmod_desc in
log "Not converting include: %s not supported" kind;
env
| Pstr_exception
{ ptyexn_constructor = { pext_name = { txt = name; _ }; _ }; _ } ->
log "Not converting exception %s" name;
env
| Pstr_eval _ ->
log "Not converting Pstr_eval";
env
| Pstr_primitive _ ->
log "Not converting Pstr_primitive";
env
| Pstr_typext _ ->
log "Not converting Pstr_typext";
env
| Pstr_recmodule _ ->
log "Not converting Pstr_recmodule";
env
| Pstr_module { pmb_name = { txt = None; _ }; _ } ->
log "Not converting unnamed Pstr_module";
env
| Pstr_modtype _ ->
log "Not converting Pstr_modtype";
env
| Pstr_class _ ->
log "Not converting Pstr_class";
env
| Pstr_class_type _ ->
log "Not converting Pstr_class_type";
env
| Pstr_extension _ ->
log "Not converting Pstr_extension";
env
(* Doc comments on files are represented with Pstr_attribute, so silently
ignore them. *)
| Pstr_attribute _ -> env
(* Our goal is to convert types only, so silently ignore values. *)
| Pstr_value _ -> env
let toplevel_phrase : Env.t -> toplevel_phrase -> Env.t =
fun env -> function
| Ptop_def items -> List.fold items ~f:structure_item ~init:env
| Ptop_dir _ ->
log "Not converting toplevel directive";
env |
OCaml Interface | hhvm/hphp/hack/src/hh_oxidize/convert_toplevel_phrase.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
module Env : sig
type t
val empty : t
end
val toplevel_phrase : Env.t -> Parsetree.toplevel_phrase -> Env.t |
OCaml | hhvm/hphp/hack/src/hh_oxidize/convert_type.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Core
open Asttypes
open Longident
open Parsetree
open Reordered_argument_collections
open Utils
open State
open Rust_type
let primitives =
[ "()"; "isize"; "usize"; "i64"; "u64"; "i32"; "u32"; "i16"; "u16"; "i8";
"u8"; "f32"; "f64"; "char"; "bool" ]
[@@ocamlformat "disable"]
let is_primitive ty args =
List.length args = 0 && List.mem primitives ty ~equal:String.equal
let rec is_copy ty =
if is_ref ty then
true
else
let (ty, targs) = type_name_and_params ty in
Configuration.is_known (Configuration.copy_type ty) true
|| is_primitive ty targs
|| (String.equal ty "Option"
|| String.equal ty "std::cell::Cell"
|| String.equal ty "std::cell::RefCell")
&& is_copy (List.hd_exn targs)
(* A list of (<module>, <ty1>, <ty2>) tuples where we need to add indirection.
In the definition of <module>::<ty1>, instances of <ty2> need to be boxed
(for instances of mutual recursion where we would otherwise define types of
infinite size). *)
let add_indirection_between () =
[
("typing_defs_core", "ConstraintType", "ConstraintType_");
("aast_defs", "Hint", "Hint_");
("patt_locl_ty", "PattLoclTy", "Shape");
("patt_locl_ty", "PattLoclTy", "Params");
("patt_locl_ty", "ShapeField", "PattLoclTy");
("patt_error", "PattError", "Secondary");
]
@
match Configuration.mode () with
| Configuration.ByBox -> [("typing_defs_core", "Ty", "Ty_")]
| Configuration.ByRef ->
[
("aast", "Expr_", "Expr");
("aast", "Expr_", "Afield");
("aast", "Expr_", "AssertExpr");
("typing_defs_core", "Ty_", "Ty");
]
let equal_s3 = [%derive.eq: string * string * string]
let should_add_indirection ~seen_indirection (ty : Rust_type.t) =
match Configuration.mode () with
| Configuration.ByRef ->
if not (is_copy ty) then
true
else if seen_indirection then
false
else if String.equal (self ()) (type_name_and_params ty |> fst) then
true
else
let (ty, _) = type_name_and_params ty in
List.mem
(add_indirection_between ())
(curr_module_name (), self (), ty)
~equal:equal_s3
| Configuration.ByBox ->
let (ty, _) = type_name_and_params ty in
(not seen_indirection)
&& (String.equal (self ()) ty
|| List.mem
(add_indirection_between ())
(curr_module_name (), self (), ty)
~equal:equal_s3)
let add_rcoc_between = [("file_info", "Pos", "relative_path::RelativePath")]
let should_add_rcoc ty =
match Configuration.mode () with
| Configuration.ByRef -> false
| Configuration.ByBox ->
List.mem add_rcoc_between (curr_module_name (), self (), ty) ~equal:equal_s3
(* These types inherently add an indirection, so we don't need to box instances
of recursion in their type arguments. *)
let indirection_types = SSet.of_list ["Vec"]
(* When oxidizing by-reference, do not add a lifetime parameter to these builtins. *)
let owned_builtins =
SSet.of_list
(["Option"; "std::cell::RefCell"; "std::cell::Cell"; "Int64"] @ primitives)
let is_owned_builtin = SSet.mem owned_builtins
let rec core_type ?(seen_indirection = false) (ct : core_type) : Rust_type.t =
let (is_by_box, is_by_ref) =
match Configuration.mode () with
| Configuration.ByBox -> (true, false)
| Configuration.ByRef -> (false, true)
in
match ct.ptyp_desc with
| Ptyp_var "ty" when is_by_ref ->
rust_ref (lifetime "a") (rust_type "Ty" [lifetime "a"] [])
| Ptyp_var name -> convert_type_name name |> rust_type_var
| Ptyp_alias (_, name) -> rust_type (convert_type_name name) [] []
| Ptyp_tuple tys -> tuple tys
| Ptyp_arrow _ -> raise (Skip_type_decl "it contains an arrow type")
| Ptyp_constr ({ txt = Lident "list"; _ }, [arg]) when is_by_ref ->
let arg = core_type ~seen_indirection:true arg in
rust_ref (lifetime "a") (rust_type "[]" [] [arg])
| Ptyp_constr ({ txt = Lident "string"; _ }, []) when is_by_ref ->
rust_ref (lifetime "a") (rust_simple_type "str")
| Ptyp_constr ({ txt = Lident "byte_string"; _ }, []) when is_by_box ->
rust_type "bstr::BString" [] []
| Ptyp_constr ({ txt = Lident "t_byte_string"; _ }, []) when is_by_box ->
rust_type "bstr::BString" [] []
| Ptyp_constr ({ txt = Lident "byte_string"; _ }, []) when is_by_ref ->
rust_ref (lifetime "a") (rust_simple_type "bstr::BStr")
| Ptyp_constr ({ txt = Lident "t_byte_string"; _ }, []) when is_by_ref ->
rust_ref (lifetime "a") (rust_simple_type "bstr::BStr")
| Ptyp_constr ({ txt = Ldot (Lident "Path", "t"); _ }, []) ->
(* Path.t *)
if is_by_ref then
rust_ref (lifetime "a") (rust_simple_type "std::path::Path")
else
rust_simple_type "std::path::PathBuf"
| Ptyp_constr ({ txt = Ldot (Lident "Hash", "hash_value"); _ }, []) ->
(* Hash.hash_value *)
rust_type "isize" [] []
| Ptyp_constr (id, args) ->
let id =
match id.txt with
| Lident "unit" -> "()"
| Lident "int" -> "isize"
| Lident "bool" -> "bool"
| Lident "float" -> "f64"
| Lident "list" -> "Vec"
| Lident "ref" -> begin
match Configuration.mode () with
| Configuration.ByRef -> "std::cell::Cell"
| Configuration.ByBox -> "std::cell::RefCell"
end
| Ldot (Lident "Int64", "t") ->
Output.add_extern_use "ocamlrep_caml_builtins::Int64";
"Int64"
| id -> Convert_longident.longident_to_string id
in
let id =
if String.equal id "T" then
convert_type_name @@ curr_module_name ()
else
id
in
let extern_type = Configuration.extern_type id in
let id = Option.value extern_type ~default:id in
let seen_indirection = seen_indirection || SSet.mem indirection_types id in
let args =
(* HACK: eliminate phase type arguments *)
match args with
| [{ ptyp_desc = Ptyp_var "phase"; _ }]
| [{ ptyp_desc = Ptyp_var "ty"; _ }]
| [{ ptyp_desc = Ptyp_constr ({ txt = Lident "decl_phase"; _ }, _); _ }]
| [{ ptyp_desc = Ptyp_constr ({ txt = Lident "locl_phase"; _ }, _); _ }]
->
[]
| _ when String.equal id "FunType" -> []
| _ -> args
in
let add_lifetime =
is_by_ref
&& Option.is_none extern_type
&& (not (is_owned_builtin id))
&& not (Configuration.owned_type id)
in
let lifetime =
if add_lifetime then
[lifetime "a"]
else
[]
in
let args = List.map args ~f:(core_type ~seen_indirection) in
if should_add_rcoc id then
rust_type "std::sync::Arc" [] [rust_type id lifetime args]
(* Direct or indirect recursion *)
else if should_add_indirection ~seen_indirection (rust_type id [] args) then
match Configuration.mode () with
| Configuration.ByRef ->
if String.equal id "Option" then
rust_type
"Option"
[]
[rust_ref (Rust_type.lifetime "a") (List.hd_exn args)]
else
rust_ref (Rust_type.lifetime "a") (rust_type id lifetime args)
| Configuration.ByBox -> rust_type "Box" [] [rust_type id [] args]
else
rust_type id lifetime args
| Ptyp_any -> raise (Skip_type_decl "cannot convert type Ptyp_any")
| Ptyp_object _ -> raise (Skip_type_decl "cannot convert type Ptyp_object")
| Ptyp_class _ -> raise (Skip_type_decl "cannot convert type Ptyp_class")
| Ptyp_variant _ -> raise (Skip_type_decl "cannot convert type Ptyp_variant")
| Ptyp_poly _ -> raise (Skip_type_decl "cannot convert type Ptyp_poly")
| Ptyp_package _ -> raise (Skip_type_decl "cannot convert type Ptyp_package")
| Ptyp_extension _ ->
raise (Skip_type_decl "cannot convert type Ptyp_extension")
and tuple ?(seen_indirection = false) types =
List.map ~f:(core_type ~seen_indirection) types |> rust_type "()" []
let core_type = core_type ~seen_indirection:false
let is_primitive ty = is_primitive ty [] |
OCaml Interface | hhvm/hphp/hack/src/hh_oxidize/convert_type.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val core_type : Parsetree.core_type -> Rust_type.t
val tuple : ?seen_indirection:bool -> Parsetree.core_type list -> Rust_type.t
val is_copy : Rust_type.t -> bool
val is_primitive : string -> bool |
OCaml | hhvm/hphp/hack/src/hh_oxidize/convert_type_decl.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Core
open Asttypes
open Longident
open Parsetree
open Printf
open Utils
open Output
open State
open Convert_longident
open Rust_type
let is_by_ref () =
match Configuration.mode () with
| Configuration.ByRef -> true
| Configuration.ByBox -> false
let stringify_attribute { attr_name; attr_payload; _ } =
match (attr_name, attr_payload) with
| ({ txt = "ocaml.doc" | "value"; _ }, _) -> None
| ({ txt; _ }, PStr []) -> Some txt
| ({ txt; _ }, PStr [structure_item]) ->
let item =
structure_item
|> Format.asprintf "%a" Pprintast.structure_item
|> String.strip ~drop:(function
| ' '
| '\t'
| ';' ->
true
| _ -> false)
in
Some (txt ^ " " ^ item)
| _ -> None
let add_default_attr_if_ocaml_yojson_drop_if attributes acc_attr_list =
let contains_yojson_drop_if attr =
match stringify_attribute attr with
| None -> false
| Some attr -> String.is_prefix attr ~prefix:"yojson_drop_if"
in
if List.exists attributes ~f:contains_yojson_drop_if then
"default" :: acc_attr_list
else
acc_attr_list
let add_deserialize_with_arena tys acc_attr_list =
let contains_ref = List.exists ~f:Rust_type.contains_ref tys in
if contains_ref || (is_by_ref () && List.exists ~f:Rust_type.is_var tys) then
(* deserialize a type contains any Cell causes a compilation error, see T90211775 *)
let contains_cell =
List.exists
~f:(fun t ->
Rust_type.type_name_and_params t
|> fst
|> String.is_suffix ~suffix:"::Cell")
tys
in
if contains_cell then
"skip" :: acc_attr_list
else
let acc_attr_list =
if contains_ref then
"borrow" :: acc_attr_list
else
acc_attr_list
in
"deserialize_with = \"arena_deserializer::arena\"" :: acc_attr_list
else
acc_attr_list
let rust_de_field_attr (tys : Rust_type.t list) (attributes : attributes) :
string =
let serde_attr_list =
[]
|> add_default_attr_if_ocaml_yojson_drop_if attributes
|> add_deserialize_with_arena tys
in
if List.is_empty serde_attr_list then
""
else
sprintf "#[serde(%s)]" @@ String.concat ~sep:", " serde_attr_list
let default_implements () =
match Configuration.mode () with
| Configuration.ByRef -> [(Some "arena_trait", "TrivialDrop")]
| Configuration.ByBox -> []
let implements_traits _name = default_implements ()
let default_derives () =
(match Configuration.mode () with
| Configuration.ByBox ->
[(Some "ocamlrep", "FromOcamlRep"); (Some "serde", "Deserialize")]
| Configuration.ByRef -> [(Some "ocamlrep", "FromOcamlRepIn")])
@ [
(None, "Clone");
(None, "Debug");
(None, "Eq");
(None, "Hash");
(None, "Ord");
(None, "PartialEq");
(None, "PartialOrd");
(Some "no_pos_hash", "NoPosHash");
(Some "eq_modulo_pos", "EqModuloPos");
(Some "ocamlrep", "ToOcamlRep");
(Some "serde", "Serialize");
(Some "serde", "Deserialize");
]
let derive_copy ty = Convert_type.is_copy (Rust_type.rust_simple_type ty)
let is_by_box () = not (is_by_ref ())
let additional_derives ty : (string option * string) list =
if derive_copy ty then
[(None, "Copy"); (Some "ocamlrep", "FromOcamlRepIn")]
else
[]
module DeriveSkipLists : sig
val skip_derive : ty:string -> trait:string -> bool
end = struct
let skip_list_for_ty ty =
let is_by_ref = is_by_ref () in
match ty with
(* A custom implementation of Ord for Error_ matches the sorting behavior of
errors in OCaml. *)
| "user_error::UserError" -> ["Ord"; "PartialOrd"]
(* GlobalOptions contains a couple floats, which only implement PartialEq
and PartialOrd, and do not implement Hash. *)
| "global_options::GlobalOptions" ->
["Eq"; "EqModuloPos"; "Hash"; "NoPosHash"; "Ord"]
(* And GlobalOptions is used in Genv which is used in Env. We
* don't care about comparison or hashing on environments *)
| "typing_env_types::Env" ->
["Eq"; "EqModuloPos"; "Hash"; "NoPosHash"; "Ord"]
| "typing_env_types::Genv" ->
["Eq"; "EqModuloPos"; "Hash"; "NoPosHash"; "Ord"]
(* And GlobalOptions is used in SavedEnv. *)
| "tast::SavedEnv" -> ["Eq"; "EqModuloPos"; "Hash"; "NoPosHash"; "Ord"]
| "tast::ByNames" -> ["Eq"; "EqModuloPos"; "Hash"; "NoPosHash"; "Ord"]
| "ast_defs::Id" -> ["Debug"]
| "errors::Errors" when is_by_ref -> ["Debug"]
| "typing_reason::T_" when is_by_ref -> ["Debug"]
| "typing_defs_core::Ty" when is_by_ref ->
["Eq"; "PartialEq"; "Ord"; "PartialOrd"]
| "typing_defs_core::Ty_" -> ["Debug"]
| "typing_defs_core::ConstraintType" when is_by_ref ->
["Eq"; "PartialEq"; "Ord"; "PartialOrd"]
| "typing_defs_core::TshapeFieldName" when is_by_ref -> ["Debug"]
| _ -> []
let skip_list_for_trait trait =
match trait with
| "EqModuloPos" ->
[
"scoured_comments::*";
"pos_or_decl::*";
"namespace_env::*";
"file_info::NameType";
"file_info::Pos";
"file_info::Id";
"file_info::FileInfo";
"file_info::Names";
"file_info::SavedNames";
"file_info::Saved";
"file_info::Diff";
"aast_defs::*";
"nast::*";
"tast::*";
"full_fidelity_parser_env::*";
"lints_core::*";
"typing_env_types::*";
"typing_tyvar_occurrences::*";
"typing_per_cont_env::*";
"typing_inference_env::*";
"typing_kinding_defs::*";
"type_parameter_env::*";
"typing_fake_members::*";
"typing_defs_core::HasMember";
"typing_defs_core::Destructure";
"typing_defs_core::DestructureKind";
"typing_defs_core::ConstraintType_";
"typing_defs_core::ConstraintType";
"typing_defs_core::InternalType";
]
| _ -> []
let is_in_ty_skip_list ~ty ~trait =
List.mem (skip_list_for_ty ty) trait ~equal:String.equal
let is_in_trait_skip_list ~ty ~trait =
let path_ty = String.split ty ~on:':' in
List.exists (skip_list_for_trait trait) ~f:(fun skip_ty ->
(* if skip_ty is like "SomeTy" then treat it as unqualified
* and skip if any type like "some_path::SomeTy" is in the
* skip list. Otherwise, just compare the fully qualified types,
* modulo "*". *)
match String.split skip_ty ~on:':' with
| [skip_ty] ->
(match List.last path_ty with
| None -> false
| Some ty -> String.equal ty skip_ty)
| path_skip_ty ->
List.equal
(fun node skip_node ->
String.equal node skip_node || String.equal "*" skip_node)
path_ty
path_skip_ty)
let skip_derive ~ty ~trait =
is_in_ty_skip_list ~ty ~trait || is_in_trait_skip_list ~ty ~trait
end
let derived_traits ty =
let ty = sprintf "%s::%s" (curr_module_name ()) ty in
default_derives ()
|> List.filter ~f:(fun (_, trait) ->
not (DeriveSkipLists.skip_derive ~ty ~trait))
|> List.append (additional_derives ty)
let denylisted_types () =
(match Configuration.mode () with
| Configuration.ByRef ->
[
("typing_defs_core", "CanIndex");
("typing_defs_core", "CanTraverse");
("typing_defs_core", "ConstraintType_");
("typing_defs_core", "ConstraintType");
("typing_defs_core", "Destructure");
("typing_defs_core", "DestructureKind");
("typing_defs_core", "HasMember");
("typing_defs_core", "HasTypeMember");
("typing_defs_core", "InternalType");
("nast", "Defs");
]
| Configuration.ByBox -> [])
@ [
("aast_defs", "LocalIdMap");
("aast_defs", "ByteString");
("errors", "FinalizedError");
("errors", "Marker");
("errors", "MarkedMessage");
("errors", "PositionGroup");
("file_info", "Saved");
("typing_defs", "ExpandEnv");
("typing_defs", "PhaseTy");
("typing_defs", "WildcardAction");
("typing_reason", "DeclPhase");
("typing_reason", "LoclPhase");
]
(* HACK: ignore anything beginning with the "decl" or "locl" prefix, since the
oxidized version of Ty does not have a phase. *)
let denylisted_type_prefixes =
[
("typing_defs", "Decl");
("typing_defs_core", "Decl");
("typing_defs", "Locl");
("typing_defs_core", "Locl");
]
(* HACK: Typing_reason is usually aliased to Reason, so we have lots of
instances of Reason.t. Since we usually convert an identifier like Reason.t
to reason::Reason, the actual type needs to be renamed to the common alias.
This looks nicer anyway. *)
let renamed_types = [(("typing_reason", "TypingReason"), "Reason")]
(* By default, when we see an alias to a tuple type, we will assume the alias
adds some meaning, and generate a new tuple struct type named after the
alias. In some cases, the alias adds no meaning and we should also use an
alias in Rust. *)
let tuple_aliases =
[
("ast_defs", "Pstring");
("ast_defs", "PositionedByteString");
("errors", "Message");
("typing_reason", "PosId");
]
let newtypes =
[
("aast_defs", "Block");
("aast_defs", "FinallyBlock");
("aast_defs", "Program");
("aast_defs", "UserAttributes");
("file_info", "HashType");
]
(*
A list of (<module>, <ty1>) where ty1 is enum and all non-empty variant fields should
be wrapped by Box to keep ty1 size down.
*)
let box_variant () =
(match Configuration.mode () with
| Configuration.ByRef -> [("typing_defs_core", "Ty_")]
| Configuration.ByBox -> [])
@ [
("aast_defs", "Expr_");
("aast_defs", "Stmt_");
("aast_defs", "Def");
("aast_defs", "Pattern");
]
let equal_s2 = [%derive.eq: string * string]
let should_box_variant ty =
List.mem (box_variant ()) (curr_module_name (), ty) ~equal:equal_s2
(* When should_box_variant returns true, we will switch to boxing the fields of
each variant by default. Some fields are small enough not to need boxing,
though, so we opt out of the boxing behavior for them here to avoid
unnecessary indirections. The rule of thumb I'm using here is that the size
should be two words or less (the size of a slice). *)
let unbox_field ty =
let open String in
let is_copy = Convert_type.is_copy ty in
let ty = Rust_type.rust_type_to_string ty in
ty = "String"
|| ty = "bstr::BString"
|| is_prefix ty ~prefix:"Vec<"
|| is_prefix ty ~prefix:"Block<"
|| is_prefix ty ~prefix:"&'a "
|| is_prefix ty ~prefix:"Option<&'a "
|| is_prefix ty ~prefix:"std::cell::Cell<&'a "
|| is_prefix ty ~prefix:"std::cell::RefCell<&'a "
||
match Configuration.mode () with
| Configuration.ByRef ->
ty = "tany_sentinel::TanySentinel"
|| ty = "ident::Ident"
|| ty = "ConditionTypeName<'a>"
|| ty = "ConstraintType<'a>"
|| (is_prefix ty ~prefix:"Option<" && is_copy)
|| (is_prefix ty ~prefix:"std::cell::Cell<" && is_copy)
|| (is_prefix ty ~prefix:"std::cell::RefCell<" && is_copy)
|| Convert_type.is_primitive ty
| Configuration.ByBox -> false
let add_rcoc = [("aast_defs", "Nsenv"); ("aast", "Nsenv")]
let should_add_rcoc ty =
match Configuration.mode () with
| Configuration.ByRef -> false
| Configuration.ByBox ->
List.mem add_rcoc (curr_module_name (), ty) ~equal:equal_s2
let denylisted ty_name =
let ty = (curr_module_name (), ty_name) in
List.mem (denylisted_types ()) ty ~equal:equal_s2
|| List.exists denylisted_type_prefixes ~f:(fun (mod_name, prefix) ->
String.equal mod_name (curr_module_name ())
&& String.is_prefix ty_name ~prefix)
let rename ty_name =
List.find renamed_types ~f:(fun (x, _) ->
equal_s2 x (curr_module_name (), ty_name))
|> Option.value_map ~f:snd ~default:ty_name
let should_use_alias_instead_of_tuple_struct ty_name =
let equal = [%derive.eq: string * string] in
List.mem tuple_aliases (curr_module_name (), ty_name) ~equal
let should_be_newtype ty_name =
let equal = [%derive.eq: string * string] in
List.mem newtypes (curr_module_name (), ty_name) ~equal
let doc_comment_of_attribute { attr_name; attr_payload; _ } =
match (attr_name, attr_payload) with
| ({ txt = "ocaml.doc"; _ }, PStr structure_items) ->
List.find_map structure_items ~f:(fun structure_item ->
match structure_item.pstr_desc with
| Pstr_eval
({ pexp_desc = Pexp_constant (Pconst_string (doc, _, _)); _ }, _) ->
Some doc
| _ -> None)
| _ -> None
let convert_doc_comment doc =
doc
|> String.strip ~drop:(function
| '*'
| ' '
| '\n'
| '\t' ->
true
| _ -> false)
|> String.split ~on:'\n'
|> List.fold
~init:(false, [])
~f:(fun (was_in_code_block, lines) original_line ->
(* Remove leading whitespace *)
let lstripped = String.lstrip original_line in
let maybe_chop_prefix prefix s =
String.chop_prefix s ~prefix |> Option.value ~default:s
in
(* Remove leading asterisk and one space after, if present *)
let no_asterisk =
lstripped |> maybe_chop_prefix "*" |> maybe_chop_prefix " "
in
let now_in_code_block =
if String.is_prefix ~prefix:"```" (String.lstrip no_asterisk) then
not was_in_code_block
else
was_in_code_block
in
let line =
if
now_in_code_block
&& was_in_code_block
&& String.equal lstripped no_asterisk
then
sprintf "///%s\n" original_line
else
sprintf "/// %s\n" no_asterisk
in
(now_in_code_block, line :: lines))
|> (fun (_, l) -> List.rev l)
|> String.concat
let doc_comment_of_attribute_list attrs =
attrs
|> List.find_map ~f:doc_comment_of_attribute
|> Option.map ~f:convert_doc_comment
|> Option.value ~default:""
let ocaml_attr attrs =
attrs
|> List.filter_map ~f:stringify_attribute
|> List.map ~f:(fun attr ->
if String.contains attr '"' then
Printf.sprintf "#[rust_to_ocaml(attr = r#\"%s\"#)]\n" attr
else
Printf.sprintf "#[rust_to_ocaml(attr = \"%s\")]\n" attr)
|> String.concat ~sep:""
let type_param (ct, _) = Convert_type.core_type ct
let type_params name params =
let params = List.map ~f:type_param params in
let lifetime =
match Configuration.mode () with
| Configuration.ByRef ->
if Configuration.owned_type name then
[]
else
[Rust_type.lifetime "a"]
| Configuration.ByBox -> []
in
(lifetime, params)
let record_label_declaration
?(pub = false) ?(prefix = "") (ld : label_declaration) : label =
let doc = doc_comment_of_attribute_list ld.pld_attributes in
let attr = ocaml_attr ld.pld_attributes in
let pub =
if pub then
"pub "
else
""
in
let name =
ld.pld_name.txt |> String.chop_prefix_exn ~prefix |> convert_field_name
in
let ty = Convert_type.core_type ld.pld_type in
sprintf
"%s%s%s%s %s: %s,\n"
doc
(rust_de_field_attr [ty] ld.pld_attributes)
attr
pub
name
(rust_type_to_string ty)
let find_record_label_prefix labels =
let prefix =
labels |> List.map ~f:(fun ld -> ld.pld_name.txt) |> common_prefix_of_list
in
(* Only remove a common prefix up to the last underscore (if a record has
fields x_bar and x_baz, we want to remove x_, not x_ba). *)
let idx = ref (String.length prefix) in
while !idx > 0 && Char.(prefix.[!idx - 1] <> '_') do
idx := !idx - 1
done;
String.sub prefix ~pos:0 ~len:!idx
let record_prefix_attr prefix =
if String.is_empty prefix then
""
else
sprintf "#[rust_to_ocaml(prefix = \"%s\")]\n" prefix
let declare_record_arguments ?(pub = false) ~prefix labels =
labels
|> map_and_concat ~f:(record_label_declaration ~pub ~prefix)
|> sprintf "{\n%s}"
let declare_constructor_arguments ?(box_fields = false) types : Rust_type.t list
=
if not box_fields then
if List.is_empty types then
[]
else
List.map ~f:Convert_type.core_type types
else
match types with
| [] -> []
| [ty] ->
let ty = Convert_type.core_type ty in
let ty =
if unbox_field ty then
ty
else
match Configuration.mode () with
| Configuration.ByRef -> rust_ref (lifetime "a") ty
| Configuration.ByBox -> rust_type "Box" [] [ty]
in
[ty]
| _ ->
(match Configuration.mode () with
| Configuration.ByRef ->
let tys = Convert_type.tuple ~seen_indirection:true types in
[rust_ref (lifetime "a") tys]
| Configuration.ByBox -> [rust_type "Box" [] [Convert_type.tuple types]])
let variant_constructor_value cd =
(* If we see the [@value 42] attribute, assume it's for ppx_deriving enum,
and that all the variants are zero-argument (i.e., assume this is a
C-like enum and provide custom discriminant values). *)
List.find_map cd.pcd_attributes ~f:(fun { attr_name; attr_payload; _ } ->
match (attr_name, attr_payload) with
| ( { txt = "value"; _ },
PStr
[
{
pstr_desc =
Pstr_eval
( {
pexp_desc =
Pexp_constant (Pconst_integer (discriminant, None));
_;
},
_ );
_;
};
] ) ->
Some discriminant
| _ -> None)
let variant_constructor_declaration ?(box_fields = false) cd =
let doc = doc_comment_of_attribute_list cd.pcd_attributes in
let attr = ocaml_attr cd.pcd_attributes in
let name = convert_type_name cd.pcd_name.txt in
let name_attr =
if String.equal name cd.pcd_name.txt then
""
else
sprintf "#[rust_to_ocaml(name = \"%s\")]\n" cd.pcd_name.txt
in
let value =
variant_constructor_value cd
|> Option.value_map ~f:(( ^ ) " = ") ~default:""
in
match cd.pcd_args with
| Pcstr_tuple types ->
let tys = declare_constructor_arguments ~box_fields types in
sprintf
"%s%s%s%s%s%s%s%s,\n"
doc
(rust_de_field_attr tys cd.pcd_attributes)
attr
name_attr
(if box_fields && List.length types > 1 then
"#[rust_to_ocaml(inline_tuple)]"
else
"")
name
(if List.is_empty tys then
""
else
map_and_concat ~sep:"," ~f:rust_type_to_string tys |> sprintf "(%s)")
value
| Pcstr_record labels ->
let prefix = find_record_label_prefix labels in
sprintf
"%s%s%s%s%s%s%s,\n"
doc
attr
(record_prefix_attr prefix)
name_attr
name
(declare_record_arguments labels ~prefix)
value
let ctor_arg_len (ctor_args : constructor_arguments) : int =
match ctor_args with
| Pcstr_tuple x -> List.length x
| Pcstr_record x -> List.length x
(* When converting a variant type to a Rust enum, consider whether the enum will
be "C-like" (i.e., a type where all variants take no arguments), and if so,
what the maximum [@value] annotation was. *)
type enum_kind =
| C_like of {
max_value: int;
num_variants: int;
}
| Sum_type of { num_variants: int }
| Not_an_enum
let type_declaration ~mutual_rec name td =
let tparam_list =
match (td.ptype_params, td.ptype_name.txt) with
(* HACK: eliminate tparam from `type _ ty_` and phase-parameterized types *)
| ([({ ptyp_desc = Ptyp_any; _ }, _)], "ty_")
| ([({ ptyp_desc = Ptyp_var "phase"; _ }, _)], _)
| ([({ ptyp_desc = Ptyp_var "ty"; _ }, _)], _)
when String.(
curr_module_name () = "typing_defs_core"
|| curr_module_name () = "typing_defs") ->
[]
| ([({ ptyp_desc = Ptyp_any; _ }, _)], "t_")
when String.(curr_module_name () = "typing_reason") ->
[]
| (tparams, _) -> tparams
in
let (lifetime, tparams) = type_params name tparam_list in
let serde_attr =
if List.is_empty lifetime || List.is_empty tparams then
""
else
let bounds =
map_and_concat
~sep:", "
~f:(fun v ->
sprintf
"%s: 'de + arena_deserializer::DeserializeInArena<'de>"
(Rust_type.rust_type_to_string v))
tparams
in
sprintf "#[serde(bound(deserialize = \"%s\" ))]" bounds
in
let doc = doc_comment_of_attribute_list td.ptype_attributes in
let attr = ocaml_attr td.ptype_attributes in
let attr =
if mutual_rec then
"#[rust_to_ocaml(and)]\n" ^ attr
else
attr
in
let attrs_and_vis ?(additional_attrs = "") enum_kind ~force_derive_copy =
if
force_derive_copy
&& Configuration.is_known (Configuration.copy_type name) false
then
failwith
(Printf.sprintf
"Type %s::%s can implement Copy but is not specified in the copy_types file. Please add it."
(curr_module_name ())
name);
let additional_derives =
if force_derive_copy then
[(None, "Copy")]
else
[]
in
let derive_attr =
let traits = derived_traits name @ additional_derives in
let traits =
match enum_kind with
| C_like _ -> (Some "ocamlrep", "FromOcamlRep") :: traits
| _ -> traits
in
let traits =
if force_derive_copy then
(Some "ocamlrep", "FromOcamlRepIn") :: traits
else
traits
in
traits
|> List.dedup_and_sort ~compare:(fun (_, t1) (_, t2) ->
String.compare t1 t2)
|> List.map ~f:(fun (m, trait) ->
Option.iter m ~f:(fun m -> add_extern_use (m ^ "::" ^ trait));
trait)
|> String.concat ~sep:", "
|> sprintf "#[derive(%s)]"
in
let repr =
match enum_kind with
| C_like { max_value; num_variants }
when max num_variants (max_value + 1) <= 256 ->
"\n#[repr(u8)]"
| Sum_type { num_variants } when num_variants <= 256 -> "\n#[repr(C, u8)]"
| _ -> "\n#[repr(C)]"
in
doc ^ derive_attr ^ serde_attr ^ attr ^ additional_attrs ^ repr ^ "\npub"
in
let deserialize_in_arena_macro ~force_derive_copy =
if is_by_ref () || force_derive_copy || String.equal name "EmitId" then
let lts = List.map lifetime ~f:(fun _ -> Rust_type.lifetime "arena") in
sprintf
"arena_deserializer::impl_deserialize_in_arena!(%s%s);\n"
name
(type_params_to_string lts tparams)
else
""
in
let implements ~force_derive_copy =
let traits = implements_traits name in
let traits =
if force_derive_copy then
(Some "arena_trait", "TrivialDrop") :: traits
else
traits
in
traits
|> List.dedup_and_sort ~compare:(fun (_, t1) (_, t2) ->
String.compare t1 t2)
|> List.map ~f:(fun (m, trait) ->
Option.iter m ~f:(fun m -> add_extern_use (m ^ "::" ^ trait));
trait)
|> List.map ~f:(fun trait ->
sprintf
"\nimpl%s %s for %s%s {}"
(type_params_to_string ~bound:trait lifetime tparams)
trait
name
(type_params_to_string lifetime tparams))
|> String.concat ~sep:""
in
match (td.ptype_kind, td.ptype_manifest) with
| (_, Some ty) ->
(* The manifest represents a `= <some_type>` clause. When td.ptype_kind is
Ptype_abstract, this is a simple type alias:
type foo = Other_module.bar
In this case, the manifest contains the type Other_module.bar.
The ptype_kind can also be a full type definition. It is Ptype_variant in
a declaration like this:
type foo = Other_module.foo =
| Bar
| Baz
For these declarations, the OCaml compiler verifies that the variants in
Other_module.foo are equivalent to the ones we define in this
Ptype_variant.
I don't think there's a direct equivalent to this in Rust, or any reason
to try to reproduce it. If we see a manifest, we can ignore the
ptype_kind and just alias, re-export, or define a newtype for
Other_module.foo. *)
(match ty.ptyp_desc with
(* Polymorphic variants. *)
| Ptyp_variant _ ->
raise (Skip_type_decl "polymorphic variants not supported")
| Ptyp_constr ({ txt = Lident "t"; _ }, []) ->
(* In the case of `type t = prefix * string ;; type relative_path = t`, we
have already defined a RelativePath type because we renamed t in the
first declaration to the name of the module. We can just skip the second
declaration introducing the alias. *)
let mod_name_as_type = convert_type_name (curr_module_name ()) in
if String.equal name mod_name_as_type then
raise
(Skip_type_decl
("it is an alias to type t, which was already renamed to "
^ mod_name_as_type))
else
sprintf
"%s%spub type %s = %s;"
doc
attr
(rust_type name lifetime tparams |> rust_type_to_string)
mod_name_as_type
| Ptyp_constr ({ txt = id; _ }, targs) ->
let id = longident_to_string id in
let ty_name = id |> String.split ~on:':' |> List.last_exn in
if
List.length td.ptype_params = List.length targs
&& String.(self () = ty_name)
&& not mutual_rec
then (
add_ty_reexport id;
raise (Skip_type_decl ("it is a re-export of " ^ id))
) else
let ty = Convert_type.core_type ty in
if should_add_rcoc name then
sprintf
"%s%spub type %s = std::sync::Arc<%s>;"
doc
attr
(rust_type name lifetime tparams |> rust_type_to_string)
(rust_type_to_string ty)
else if should_be_newtype name then
sprintf
"%s struct %s (%s pub %s);%s\n%s"
(attrs_and_vis Not_an_enum ~force_derive_copy:false)
(rust_type name lifetime tparams |> rust_type_to_string)
(rust_de_field_attr [ty] td.ptype_attributes)
(rust_type_to_string ty)
(implements ~force_derive_copy:false)
(deserialize_in_arena_macro ~force_derive_copy:false)
else
sprintf
"%s%spub type %s = %s;"
doc
attr
(rust_type name lifetime tparams |> rust_type_to_string)
(deref ty |> rust_type_to_string)
| _ ->
if should_use_alias_instead_of_tuple_struct name then
let ty = Convert_type.core_type ty |> deref |> rust_type_to_string in
sprintf
"%s%spub type %s = %s;"
doc
attr
(rust_type name lifetime tparams |> rust_type_to_string)
ty
else
let ty =
match ty.ptyp_desc with
| Ptyp_tuple tys ->
map_and_concat
~f:(fun ty ->
Convert_type.core_type ty |> fun t ->
sprintf
"%s pub %s"
(rust_de_field_attr [t] td.ptype_attributes)
(rust_type_to_string t))
~sep:","
tys
|> sprintf "(%s)"
| _ ->
Convert_type.core_type ty
|> rust_type_to_string
|> sprintf "(pub %s)"
in
sprintf
"%s struct %s %s;%s\n%s"
(attrs_and_vis Not_an_enum ~force_derive_copy:false)
(rust_type name lifetime tparams |> rust_type_to_string)
ty
(implements ~force_derive_copy:false)
(deserialize_in_arena_macro ~force_derive_copy:false))
(* Variant types, including GADTs. *)
| (Ptype_variant ctors, None) ->
let all_nullary =
List.for_all ctors ~f:(fun c -> 0 = ctor_arg_len c.pcd_args)
in
let force_derive_copy =
if is_by_ref () then
true
else
all_nullary
in
let box_fields =
if is_by_ref () then
true
else
should_box_variant name
in
let num_variants = List.length ctors in
let enum_kind =
if not all_nullary then
Sum_type { num_variants }
else
let max_value =
ctors
|> List.filter_map ~f:variant_constructor_value
|> List.map ~f:int_of_string
|> List.fold ~init:0 ~f:max
in
C_like { max_value; num_variants }
in
let ctors =
map_and_concat ctors ~f:(variant_constructor_declaration ~box_fields)
in
sprintf
"%s enum %s {\n%s}%s\n%s"
(attrs_and_vis enum_kind ~force_derive_copy)
(rust_type name lifetime tparams |> rust_type_to_string)
ctors
(implements ~force_derive_copy)
(deserialize_in_arena_macro ~force_derive_copy)
(* Record types. *)
| (Ptype_record labels, None) ->
let prefix = find_record_label_prefix labels in
let labels = declare_record_arguments labels ~pub:true ~prefix in
sprintf
"%s struct %s %s%s\n%s"
(attrs_and_vis
Not_an_enum
~force_derive_copy:false
~additional_attrs:(record_prefix_attr prefix))
(rust_type name lifetime tparams |> rust_type_to_string)
labels
(implements ~force_derive_copy:false)
(deserialize_in_arena_macro ~force_derive_copy:false)
(* `type foo`; an abstract type with no specified implementation. This doesn't
mean much outside of an .mli, I don't think. *)
| (Ptype_abstract, None) ->
raise (Skip_type_decl "Abstract types without manifest not supported")
(* type foo += A, e.g. the exn type. *)
| (Ptype_open, None) -> raise (Skip_type_decl "Open types not supported")
let type_declaration ?(mutual_rec = false) td =
let name = td.ptype_name.txt in
let name =
if String.equal name "t" then
curr_module_name ()
else
name
in
let name = convert_type_name name in
let name = rename name in
let mod_name = curr_module_name () in
if denylisted name then
log "Not converting type %s::%s: it was denylisted" mod_name name
else
match Configuration.extern_type name with
| Some extern_type ->
log "Not converting type %s::%s: re-exporting instead" mod_name name;
add_decl name (sprintf "pub use %s;" extern_type)
| None ->
(try
with_self name (fun () ->
add_decl name (type_declaration ~mutual_rec name td))
with
| Skip_type_decl reason ->
log "Not converting type %s::%s: %s" mod_name name reason) |
hhvm/hphp/hack/src/hh_oxidize/dune | (executable
(name hh_oxidize)
(link_flags
(:standard
(:include ../dune_config/ld-opts.sexp)))
(modes exe byte_complete)
(modules
configuration
convert_longident
convert_toplevel_phrase
convert_type
convert_type_decl
output
hh_oxidize
oxidized_module
rust_type
state
stringify
utils)
(preprocess
(pps ppx_deriving.std))
(libraries
core_kernel
core_kernel.caml_unix
signed_source
collections
ocaml-compiler-libs.common)) |
|
OCaml | hhvm/hphp/hack/src/hh_oxidize/hh_oxidize.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Core
module Sys = Stdlib.Sys
open Printf
open Reordered_argument_collections
open Utils
type env = { rustfmt: string }
let header =
"// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the \"hack\" directory of this source tree.
//
// "
^ Signed_source.signing_token
let regen_instructions = "
//
// To regenerate this file, run:
// "
let parse filename =
let ic = In_channel.create filename in
let lexbuf = Lexing.from_channel ic in
let phrases = Parse.use_file lexbuf in
In_channel.close ic;
phrases
let oxidize filename =
let phrases = parse filename in
let in_basename = Filename.basename filename in
let module_name = String.chop_suffix_exn in_basename ~suffix:".ml" in
let module_name = convert_module_name module_name in
log "Converting %s" module_name;
let oxidized_module =
Utils.with_log_indent (fun () ->
Output.with_output_context ~module_name (fun () ->
let _env =
Convert_toplevel_phrase.(
List.fold phrases ~f:toplevel_phrase ~init:Env.empty)
in
()))
in
(module_name, oxidized_module)
let read filename =
let ic = In_channel.create filename in
let contents = In_channel.input_all ic in
In_channel.close ic;
contents
let write filename contents =
let oc = Out_channel.create filename in
fprintf oc "%s%!" contents;
Out_channel.close oc
let write_format_and_sign env filename contents =
write filename contents;
if Sys.command (sprintf "%S %S" env.rustfmt filename) <> 0 then
failwith ("Could not format Rust output in " ^ filename);
let contents = read filename in
let contents =
try Signed_source.sign_file contents with
| Signed_source.Token_not_found -> contents
in
write filename contents
let make_header regen_command =
match regen_command with
| None -> header
| Some cmd -> header ^ regen_instructions ^ cmd
let convert_files env out_dir files regen_command =
ignore (Sys.command (sprintf "rm -f %S/*.rs" out_dir));
let header = make_header regen_command in
let modules = files |> List.map ~f:oxidize |> SMap.of_list in
let () =
modules
|> SMap.map ~f:Stringify.stringify
|> SMap.iter ~f:(fun name src ->
let src = sprintf "%s\n\n%s" header src in
let out_filename = Filename.concat out_dir (name ^ ".rs") in
write_format_and_sign env out_filename src)
in
let manifest_filename = Filename.concat out_dir "mod.rs" in
let module_names = SMap.ordered_keys modules in
let manifest_mods =
map_and_concat module_names ~f:(sprintf "pub mod %s;") ~sep:"\n"
in
let manifest = header ^ "\n\n" ^ manifest_mods in
write_format_and_sign env manifest_filename manifest
let convert_single_file env filename regen_command =
with_tempfile @@ fun out_filename ->
let (_, oxidized_module) = oxidize filename in
let src = Stringify.stringify oxidized_module in
write_format_and_sign env out_filename src;
let header = make_header regen_command in
printf "%s\n%s" header (read out_filename)
let parse_types_file filename =
let lines = ref [] in
let ic = Caml.open_in filename in
(try
while true do
lines := Caml.input_line ic :: !lines
done;
Caml.close_in ic
with
| End_of_file -> Caml.close_in ic);
List.filter_map !lines ~f:(fun name ->
(* Ignore comments beginning with '#' *)
let name =
match String.index name '#' with
| Some idx -> String.sub name ~pos:0 ~len:idx
| None -> name
in
(* Strip whitespace *)
let name = String.strip name in
if String.is_substring name ~substring:"::" then
Some name
else (
if String.(name <> "") then
failwith
(Printf.sprintf
"Failed to parse line in types file %S: %S"
filename
name);
None
))
let parse_extern_types_file filename =
parse_types_file filename
|> List.fold ~init:SMap.empty ~f:(fun map name ->
try
(* Map the name with the crate prefix stripped (since we do not expect to see
the crate name in our OCaml source) to the fully-qualified name. *)
let coloncolon_idx = String.substr_index_exn name ~pattern:"::" in
let after_coloncolon_idx = coloncolon_idx + 2 in
assert (Char.(name.[after_coloncolon_idx] <> ':'));
let name_without_crate =
String.subo name ~pos:after_coloncolon_idx
in
SMap.add map ~key:name_without_crate ~data:name
with
| _ ->
if String.(name <> "") then
failwith
(Printf.sprintf
"Failed to parse line in extern types file %S: %S"
filename
name);
map)
let parse_owned_types_file filename = SSet.of_list (parse_types_file filename)
let parse_copy_types_file filename = SSet.of_list (parse_types_file filename)
let usage =
"Usage: buck run hphp/hack/src/hh_oxidize -- [out_directory] [target_files]
buck run hphp/hack/src/hh_oxidize -- [target_file]"
type mode =
| File of {
file: string;
regen_command: string option;
}
| Files of {
out_dir: string;
files: string list;
regen_command: string option;
}
type options = {
mode: mode;
rustfmt_path: string;
}
let parse_args () =
let out_dir = ref None in
let regen_command = ref None in
let rustfmt_path = ref None in
let files = ref [] in
let mode = ref Configuration.ByBox in
let extern_types_file = ref None in
let owned_types_file = ref None in
let copy_types_file = ref None in
let options =
[
( "--out-dir",
Arg.String (fun s -> out_dir := Some s),
" Output directory for conversion of multiple files" );
( "--regen-command",
Arg.String (fun s -> regen_command := Some s),
" Include this command in file headers" );
( "--rustfmt-path",
Arg.String (fun s -> rustfmt_path := Some s),
" Path to rustfmt binary used to format output" );
( "--by-ref",
Arg.Unit (fun () -> mode := Configuration.ByRef),
" Use references instead of Box, slices instead of Vec and String" );
( "--extern-types-file",
Arg.String (fun s -> extern_types_file := Some s),
" Use the types listed in this file rather than assuming all types"
^ " are defined within the set of files being oxidized" );
( "--owned-types-file",
Arg.String (fun s -> owned_types_file := Some s),
" Do not add a lifetime parameter to the types listend in this file"
^ " (when --by-ref is enabled)" );
( "--copy-types-file",
Arg.String (fun s -> copy_types_file := Some s),
" Do not use references for the types listed in this file"
^ " (when --by-ref is enabled)" );
]
in
Arg.parse options (fun file -> files := file :: !files) usage;
let extern_types =
match !extern_types_file with
| None -> Configuration.(default.extern_types)
| Some filename -> parse_extern_types_file filename
in
let owned_types =
match !owned_types_file with
| None -> Configuration.(default.owned_types)
| Some filename -> parse_owned_types_file filename
in
let copy_types = Option.map !copy_types_file ~f:parse_copy_types_file in
Configuration.set
{ Configuration.mode = !mode; extern_types; owned_types; copy_types };
let rustfmt_path = Option.value !rustfmt_path ~default:"rustfmt" in
let regen_command = !regen_command in
match !files with
| [] ->
eprintf "%s\n" usage;
exit 1
| [file] ->
if Option.is_some !out_dir then
failwith "Cannot set output directory in single-file mode";
{ mode = File { file; regen_command }; rustfmt_path }
| files ->
let out_dir =
match !out_dir with
| Some d -> d
| None ->
failwith "Cannot convert multiple files without output directory"
in
{ mode = Files { out_dir; files; regen_command }; rustfmt_path }
let () =
let { mode; rustfmt_path } = parse_args () in
let env = { rustfmt = rustfmt_path } in
match mode with
| File { file; regen_command } -> convert_single_file env file regen_command
| Files { out_dir; files; regen_command } ->
convert_files env out_dir files regen_command |
OCaml | hhvm/hphp/hack/src/hh_oxidize/output.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Core
open Reordered_argument_collections
open Oxidized_module
let output = ref Oxidized_module.empty
let with_output_context ~module_name f =
State.with_module_name module_name (fun () ->
output := Oxidized_module.empty;
let () = f () in
let oxidized_module = !output in
output := Oxidized_module.empty;
oxidized_module)
let add_extern_use ty =
output := { !output with extern_uses = SSet.add !output.extern_uses ty }
let add_glob_use mod_name =
output := { !output with glob_uses = SSet.add !output.glob_uses mod_name }
let add_alias mod_name alias =
output := { !output with aliases = (mod_name, alias) :: !output.aliases }
let add_include mod_name =
output := { !output with includes = SSet.add !output.includes mod_name }
let add_ty_reexport ty =
output := { !output with ty_reexports = ty :: !output.ty_reexports }
let add_decl name decl =
output := { !output with decls = (name, decl) :: !output.decls }
let glob_uses () = SSet.elements !output.glob_uses |
OCaml Interface | hhvm/hphp/hack/src/hh_oxidize/output.mli | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
val with_output_context :
module_name:string -> (unit -> unit) -> Oxidized_module.t
val add_extern_use : string -> unit
val add_glob_use : string -> unit
val add_alias : string -> string -> unit
val add_include : string -> unit
val add_ty_reexport : string -> unit
val add_decl : string -> string -> unit
val glob_uses : unit -> string list |
OCaml | hhvm/hphp/hack/src/hh_oxidize/oxidized_module.ml | (*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*)
open Reordered_argument_collections
(** This type is mostly strings for the sake of making conversion easy, but we
retain some structure for the postprocessing and formatting we do in
{!Stringify}. *)
type t = {
extern_uses: SSet.t;
(* names of types (or derive macros) to import from other Rust crates *)
glob_uses: SSet.t;
(* names of opened modules (to convert to glob-imports in Rust) *)
aliases: (string * string) list;
(* (module_name, alias) pairs *)
includes: SSet.t;
(* names of directly-included modules *)
ty_reexports: string list;
(* fully-qualified type names to be re-exported *)
decls: (string * string) list; (* (name, rust_syntax_for_entire_declaration) *)
}
let empty =
{
extern_uses = SSet.empty;
glob_uses = SSet.empty;
aliases = [];
includes = SSet.empty;
ty_reexports = [];
decls = [];
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.