language
stringlengths
0
24
filename
stringlengths
9
214
code
stringlengths
99
9.93M
CMake
hhvm/build/fbcode_builder/CMake/FindZstd.cmake
# Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # - Try to find Facebook zstd library # This will define # ZSTD_FOUND # ZSTD_INCLUDE_DIR # ZSTD_LIBRARY # find_path(ZSTD_INCLUDE_DIR NAMES zstd.h) find_library(ZSTD_LIBRARY_DEBUG NAMES zstdd zstd_staticd) find_library(ZSTD_LIBRARY_RELEASE NAMES zstd zstd_static) include(SelectLibraryConfigurations) SELECT_LIBRARY_CONFIGURATIONS(ZSTD) include(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS( ZSTD DEFAULT_MSG ZSTD_LIBRARY ZSTD_INCLUDE_DIR ) if (ZSTD_FOUND) message(STATUS "Found Zstd: ${ZSTD_LIBRARY}") endif() mark_as_advanced(ZSTD_INCLUDE_DIR ZSTD_LIBRARY)
Python
hhvm/build/fbcode_builder/CMake/make_fbpy_archive.py
#!/usr/bin/env python3 # # Copyright (c) Facebook, Inc. and its affiliates. # import argparse import collections import errno import os import shutil import sys import tempfile import zipapp MANIFEST_SEPARATOR = " :: " MANIFEST_HEADER_V1 = "FBPY_MANIFEST 1\n" class UsageError(Exception): def __init__(self, message): self.message = message def __str__(self): return self.message class BadManifestError(UsageError): def __init__(self, path, line_num, message): full_msg = "%s:%s: %s" % (path, line_num, message) super().__init__(full_msg) self.path = path self.line_num = line_num self.raw_message = message PathInfo = collections.namedtuple( "PathInfo", ("src", "dest", "manifest_path", "manifest_line") ) def parse_manifest(manifest, path_map): bad_prefix = ".." + os.path.sep manifest_dir = os.path.dirname(manifest) with open(manifest, "r") as f: line_num = 1 line = f.readline() if line != MANIFEST_HEADER_V1: raise BadManifestError( manifest, line_num, "Unexpected manifest file header" ) for line in f: line_num += 1 if line.startswith("#"): continue line = line.rstrip("\n") parts = line.split(MANIFEST_SEPARATOR) if len(parts) != 2: msg = "line must be of the form SRC %s DEST" % MANIFEST_SEPARATOR raise BadManifestError(manifest, line_num, msg) src, dest = parts dest = os.path.normpath(dest) if dest.startswith(bad_prefix): msg = "destination path starts with %s: %s" % (bad_prefix, dest) raise BadManifestError(manifest, line_num, msg) if not os.path.isabs(src): src = os.path.normpath(os.path.join(manifest_dir, src)) if dest in path_map: prev_info = path_map[dest] msg = ( "multiple source paths specified for destination " "path %s. Previous source was %s from %s:%s" % ( dest, prev_info.src, prev_info.manifest_path, prev_info.manifest_line, ) ) raise BadManifestError(manifest, line_num, msg) info = PathInfo( src=src, dest=dest, manifest_path=manifest, manifest_line=line_num, ) path_map[dest] = info def populate_install_tree(inst_dir, path_map): os.mkdir(inst_dir) dest_dirs = {"": False} def make_dest_dir(path): if path in dest_dirs: return parent = os.path.dirname(path) make_dest_dir(parent) abs_path = os.path.join(inst_dir, path) os.mkdir(abs_path) dest_dirs[path] = False def install_file(info): dir_name, base_name = os.path.split(info.dest) make_dest_dir(dir_name) if base_name == "__init__.py": dest_dirs[dir_name] = True abs_dest = os.path.join(inst_dir, info.dest) shutil.copy2(info.src, abs_dest) # Copy all of the destination files for info in path_map.values(): install_file(info) # Create __init__ files in any directories that don't have them. for dir_path, has_init in dest_dirs.items(): if has_init: continue init_path = os.path.join(inst_dir, dir_path, "__init__.py") with open(init_path, "w"): pass def build_zipapp(args, path_map): """Create a self executing python binary using Python 3's built-in zipapp module. This type of Python binary is relatively simple, as zipapp is part of the standard library, but it does not support native language extensions (.so/.dll files). """ dest_dir = os.path.dirname(args.output) with tempfile.TemporaryDirectory(prefix="make_fbpy.", dir=dest_dir) as tmpdir: inst_dir = os.path.join(tmpdir, "tree") populate_install_tree(inst_dir, path_map) tmp_output = os.path.join(tmpdir, "output.exe") zipapp.create_archive( inst_dir, target=tmp_output, interpreter=args.python, main=args.main ) os.replace(tmp_output, args.output) def create_main_module(args, inst_dir, path_map): if not args.main: assert "__main__.py" in path_map return dest_path = os.path.join(inst_dir, "__main__.py") main_module, main_fn = args.main.split(":") main_contents = """\ #!{python} if __name__ == "__main__": import {main_module} {main_module}.{main_fn}() """.format( python=args.python, main_module=main_module, main_fn=main_fn ) with open(dest_path, "w") as f: f.write(main_contents) os.chmod(dest_path, 0o755) def build_install_dir(args, path_map): """Create a directory that contains all of the sources, with a __main__ module to run the program. """ # Populate a temporary directory first, then rename to the destination # location. This ensures that we don't ever leave a halfway-built # directory behind at the output path if something goes wrong. dest_dir = os.path.dirname(args.output) with tempfile.TemporaryDirectory(prefix="make_fbpy.", dir=dest_dir) as tmpdir: inst_dir = os.path.join(tmpdir, "tree") populate_install_tree(inst_dir, path_map) create_main_module(args, inst_dir, path_map) os.rename(inst_dir, args.output) def ensure_directory(path): try: os.makedirs(path) except OSError as ex: if ex.errno != errno.EEXIST: raise def install_library(args, path_map): """Create an installation directory a python library.""" out_dir = args.output out_manifest = args.output + ".manifest" install_dir = args.install_dir if not install_dir: install_dir = out_dir os.makedirs(out_dir) with open(out_manifest, "w") as manifest: manifest.write(MANIFEST_HEADER_V1) for info in path_map.values(): abs_dest = os.path.join(out_dir, info.dest) ensure_directory(os.path.dirname(abs_dest)) print("copy %r --> %r" % (info.src, abs_dest)) shutil.copy2(info.src, abs_dest) installed_dest = os.path.join(install_dir, info.dest) manifest.write("%s%s%s\n" % (installed_dest, MANIFEST_SEPARATOR, info.dest)) def parse_manifests(args): # Process args.manifest_separator to help support older versions of CMake if args.manifest_separator: manifests = [] for manifest_arg in args.manifests: split_arg = manifest_arg.split(args.manifest_separator) manifests.extend(split_arg) args.manifests = manifests path_map = {} for manifest in args.manifests: parse_manifest(manifest, path_map) return path_map def check_main_module(args, path_map): # Translate an empty string in the --main argument to None, # just to allow the CMake logic to be slightly simpler and pass in an # empty string when it really wants the default __main__.py module to be # used. if args.main == "": args.main = None if args.type == "lib-install": if args.main is not None: raise UsageError("cannot specify a --main argument with --type=lib-install") return main_info = path_map.get("__main__.py") if args.main: if main_info is not None: msg = ( "specified an explicit main module with --main, " "but the file listing already includes __main__.py" ) raise BadManifestError( main_info.manifest_path, main_info.manifest_line, msg ) parts = args.main.split(":") if len(parts) != 2: raise UsageError( "argument to --main must be of the form MODULE:CALLABLE " "(received %s)" % (args.main,) ) else: if main_info is None: raise UsageError( "no main module specified with --main, " "and no __main__.py module present" ) BUILD_TYPES = { "zipapp": build_zipapp, "dir": build_install_dir, "lib-install": install_library, } def main(): ap = argparse.ArgumentParser() ap.add_argument("-o", "--output", required=True, help="The output file path") ap.add_argument( "--install-dir", help="When used with --type=lib-install, this parameter specifies the " "final location where the library where be installed. This can be " "used to generate the library in one directory first, when you plan " "to move or copy it to another final location later.", ) ap.add_argument( "--manifest-separator", help="Split manifest arguments around this separator. This is used " "to support older versions of CMake that cannot supply the manifests " "as separate arguments.", ) ap.add_argument( "--main", help="The main module to run, specified as <module>:<callable>. " "This must be specified if and only if the archive does not contain " "a __main__.py file.", ) ap.add_argument( "--python", help="Explicitly specify the python interpreter to use for the " "executable.", ) ap.add_argument( "--type", choices=BUILD_TYPES.keys(), help="The type of output to build." ) ap.add_argument( "manifests", nargs="+", help="The manifest files specifying how to construct the archive", ) args = ap.parse_args() if args.python is None: args.python = sys.executable if args.type is None: # In the future we might want different default output types # for different platforms. args.type = "zipapp" build_fn = BUILD_TYPES[args.type] try: path_map = parse_manifests(args) check_main_module(args, path_map) except UsageError as ex: print("error: %s" % (ex,), file=sys.stderr) sys.exit(1) build_fn(args, path_map) if __name__ == "__main__": main()
CMake
hhvm/build/fbcode_builder/CMake/RustStaticLibrary.cmake
# Copyright (c) Meta Platforms, Inc. and affiliates. include(FBCMakeParseArgs) set( USE_CARGO_VENDOR AUTO CACHE STRING "Download Rust Crates from an internally vendored location" ) set_property(CACHE USE_CARGO_VENDOR PROPERTY STRINGS AUTO ON OFF) set( GENERATE_CARGO_VENDOR_CONFIG AUTO CACHE STRING "Whether to generate Rust cargo vendor config or use existing" ) set_property(CACHE GENERATE_CARGO_VENDOR_CONFIG PROPERTY STRINGS AUTO ON OFF) set(RUST_VENDORED_CRATES_DIR "$ENV{RUST_VENDORED_CRATES_DIR}") if("${USE_CARGO_VENDOR}" STREQUAL "AUTO") if(EXISTS "${RUST_VENDORED_CRATES_DIR}") set(USE_CARGO_VENDOR ON) else() set(USE_CARGO_VENDOR OFF) endif() endif() if("${GENERATE_CARGO_VENDOR_CONFIG}" STREQUAL "AUTO") set(GENERATE_CARGO_VENDOR_CONFIG "${USE_CARGO_VENDOR}") endif() if(GENERATE_CARGO_VENDOR_CONFIG) if(NOT EXISTS "${RUST_VENDORED_CRATES_DIR}") message( FATAL "vendored rust crates not present: " "${RUST_VENDORED_CRATES_DIR}" ) endif() set(RUST_CARGO_HOME "${CMAKE_BINARY_DIR}/_cargo_home") file(MAKE_DIRECTORY "${RUST_CARGO_HOME}") file( TO_NATIVE_PATH "${RUST_VENDORED_CRATES_DIR}" ESCAPED_RUST_VENDORED_CRATES_DIR ) string( REPLACE "\\" "\\\\" ESCAPED_RUST_VENDORED_CRATES_DIR "${ESCAPED_RUST_VENDORED_CRATES_DIR}" ) file( WRITE "${RUST_CARGO_HOME}/config" "[source.crates-io]\n" "replace-with = \"vendored-sources\"\n" "\n" "[source.vendored-sources]\n" "directory = \"${ESCAPED_RUST_VENDORED_CRATES_DIR}\"\n" ) endif() find_program(CARGO_COMMAND cargo REQUIRED) # Cargo is a build system in itself, and thus will try to take advantage of all # the cores on the system. Unfortunately, this conflicts with Ninja, since it # also tries to utilize all the cores. This can lead to a system that is # completely overloaded with compile jobs to the point where nothing else can # be achieved on the system. # # Let's inform Ninja of this fact so it won't try to spawn other jobs while # Rust being compiled. set_property(GLOBAL APPEND PROPERTY JOB_POOLS rust_job_pool=1) # This function creates an interface library target based on the static library # built by Cargo. It will call Cargo to build a staticlib and generate a CMake # interface library with it. # # This function requires `find_package(Python COMPONENTS Interpreter)`. # # You need to set `lib:crate-type = ["staticlib"]` in your Cargo.toml to make # Cargo build static library. # # ```cmake # rust_static_library(<TARGET> [CRATE <CRATE_NAME>] [FEATURES <FEATURE_NAME>]) # ``` # # Parameters: # - TARGET: # Name of the target name. This function will create an interface library # target with this name. # - CRATE_NAME: # Name of the crate. This parameter is optional. If unspecified, it will # fallback to `${TARGET}`. # - FEATURE_NAME: # Name of the Rust feature to enable. # # This function creates two targets: # - "${TARGET}": an interface library target contains the static library built # from Cargo. # - "${TARGET}.cargo": an internal custom target that invokes Cargo. # # If you are going to use this static library from C/C++, you will need to # write header files for the library (or generate with cbindgen) and bind these # headers with the interface library. # function(rust_static_library TARGET) fb_cmake_parse_args(ARG "" "CRATE;FEATURES" "" "${ARGN}") if(DEFINED ARG_CRATE) set(crate_name "${ARG_CRATE}") else() set(crate_name "${TARGET}") endif() if(DEFINED ARG_FEATURES) set(features --features ${ARG_FEATURES}) else() set(features ) endif() set(cargo_target "${TARGET}.cargo") set(target_dir $<IF:$<CONFIG:Debug>,debug,release>) set(staticlib_name "${CMAKE_STATIC_LIBRARY_PREFIX}${crate_name}${CMAKE_STATIC_LIBRARY_SUFFIX}") set(rust_staticlib "${CMAKE_CURRENT_BINARY_DIR}/${target_dir}/${staticlib_name}") if(DEFINED ARG_FEATURES) set(cargo_flags build $<IF:$<CONFIG:Debug>,,--release> -p ${crate_name} --features ${ARG_FEATURES}) else() set(cargo_flags build $<IF:$<CONFIG:Debug>,,--release> -p ${crate_name}) endif() if(USE_CARGO_VENDOR) set(extra_cargo_env "CARGO_HOME=${RUST_CARGO_HOME}") set(cargo_flags ${cargo_flags}) endif() add_custom_target( ${cargo_target} COMMAND "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock" COMMAND "${CMAKE_COMMAND}" -E env "CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR}" ${extra_cargo_env} ${CARGO_COMMAND} ${cargo_flags} COMMENT "Building Rust crate '${crate_name}'..." JOB_POOL rust_job_pool WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} BYPRODUCTS "${CMAKE_CURRENT_BINARY_DIR}/debug/${staticlib_name}" "${CMAKE_CURRENT_BINARY_DIR}/release/${staticlib_name}" ) add_library(${TARGET} INTERFACE) add_dependencies(${TARGET} ${cargo_target}) set_target_properties( ${TARGET} PROPERTIES INTERFACE_STATICLIB_OUTPUT_PATH "${rust_staticlib}" INTERFACE_INSTALL_LIBNAME "${CMAKE_STATIC_LIBRARY_PREFIX}${crate_name}_rs${CMAKE_STATIC_LIBRARY_SUFFIX}" ) target_link_libraries( ${TARGET} INTERFACE "$<BUILD_INTERFACE:${rust_staticlib}>" ) endfunction() # This function instructs CMake to define a target that will use `cargo build` # to build a bin crate referenced by the Cargo.toml file in the current source # directory. # It accepts a single `TARGET` parameter which will be passed as the package # name to `cargo build -p TARGET`. If binary has different name as package, # use optional flag BINARY_NAME to override it. # It also accepts a `FEATURES` parameter if you want to enable certain features # in your Rust binary. # The CMake target will be registered to build by default as part of the # ALL target. function(rust_executable TARGET) fb_cmake_parse_args(ARG "" "BINARY_NAME;FEATURES" "" "${ARGN}") set(crate_name "${TARGET}") set(cargo_target "${TARGET}.cargo") set(target_dir $<IF:$<CONFIG:Debug>,debug,release>) if(DEFINED ARG_BINARY_NAME) set(executable_name "${ARG_BINARY_NAME}${CMAKE_EXECUTABLE_SUFFIX}") else() set(executable_name "${crate_name}${CMAKE_EXECUTABLE_SUFFIX}") endif() if(DEFINED ARG_FEATURES) set(features --features ${ARG_FEATURES}) else() set(features ) endif() if(DEFINED ARG_FEATURES) set(cargo_flags build $<IF:$<CONFIG:Debug>,,--release> -p ${crate_name} --features ${ARG_FEATURES}) else() set(cargo_flags build $<IF:$<CONFIG:Debug>,,--release> -p ${crate_name}) endif() if(USE_CARGO_VENDOR) set(extra_cargo_env "CARGO_HOME=${RUST_CARGO_HOME}") set(cargo_flags ${cargo_flags}) endif() add_custom_target( ${cargo_target} ALL COMMAND "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock" COMMAND "${CMAKE_COMMAND}" -E env "CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR}" ${extra_cargo_env} ${CARGO_COMMAND} ${cargo_flags} COMMENT "Building Rust executable '${crate_name}'..." JOB_POOL rust_job_pool WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} BYPRODUCTS "${CMAKE_CURRENT_BINARY_DIR}/debug/${executable_name}" "${CMAKE_CURRENT_BINARY_DIR}/release/${executable_name}" ) set_property(TARGET "${cargo_target}" PROPERTY EXECUTABLE "${CMAKE_CURRENT_BINARY_DIR}/${target_dir}/${executable_name}") endfunction() # This function can be used to install the executable generated by a prior # call to the `rust_executable` function. # It requires a `TARGET` parameter to identify the target to be installed, # and an optional `DESTINATION` parameter to specify the installation # directory. If DESTINATION is not specified then the `bin` directory # will be assumed. function(install_rust_executable TARGET) # Parse the arguments set(one_value_args DESTINATION) set(multi_value_args) fb_cmake_parse_args( ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}" ) if(NOT DEFINED ARG_DESTINATION) set(ARG_DESTINATION bin) endif() get_target_property(foo "${TARGET}.cargo" EXECUTABLE) install( PROGRAMS "${foo}" DESTINATION "${ARG_DESTINATION}" ) endfunction() # This function installs the interface target generated from the function # `rust_static_library`. Use this function if you want to export your Rust # target to external CMake targets. # # ```cmake # install_rust_static_library( # <TARGET> # INSTALL_DIR <INSTALL_DIR> # [EXPORT <EXPORT_NAME>] # ) # ``` # # Parameters: # - TARGET: Name of the Rust static library target. # - EXPORT_NAME: Name of the exported target. # - INSTALL_DIR: Path to the directory where this library will be installed. # function(install_rust_static_library TARGET) fb_cmake_parse_args(ARG "" "EXPORT;INSTALL_DIR" "" "${ARGN}") get_property( staticlib_output_path TARGET "${TARGET}" PROPERTY INTERFACE_STATICLIB_OUTPUT_PATH ) get_property( staticlib_output_name TARGET "${TARGET}" PROPERTY INTERFACE_INSTALL_LIBNAME ) if(NOT DEFINED staticlib_output_path) message(FATAL_ERROR "Not a rust_static_library target.") endif() if(NOT DEFINED ARG_INSTALL_DIR) message(FATAL_ERROR "Missing required argument.") endif() if(DEFINED ARG_EXPORT) set(install_export_args EXPORT "${ARG_EXPORT}") endif() set(install_interface_dir "${ARG_INSTALL_DIR}") if(NOT IS_ABSOLUTE "${install_interface_dir}") set(install_interface_dir "\${_IMPORT_PREFIX}/${install_interface_dir}") endif() target_link_libraries( ${TARGET} INTERFACE "$<INSTALL_INTERFACE:${install_interface_dir}/${staticlib_output_name}>" ) install( TARGETS ${TARGET} ${install_export_args} LIBRARY DESTINATION ${ARG_INSTALL_DIR} ) install( FILES ${staticlib_output_path} RENAME ${staticlib_output_name} DESTINATION ${ARG_INSTALL_DIR} ) endfunction()
Python
hhvm/build/fbcode_builder/getdeps/builder.py
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import glob import json import os import pathlib import shutil import stat import subprocess import sys import typing from typing import Optional from .dyndeps import create_dyn_dep_munger from .envfuncs import add_path_entry, Env, path_search from .fetcher import copy_if_different from .runcmd import run_cmd if typing.TYPE_CHECKING: from .buildopts import BuildOptions class BuilderBase(object): def __init__( self, build_opts: "BuildOptions", ctx, manifest, src_dir, build_dir, inst_dir, env=None, final_install_prefix=None, ) -> None: self.env = Env() if env: self.env.update(env) subdir = manifest.get("build", "subdir", ctx=ctx) if subdir: src_dir = os.path.join(src_dir, subdir) self.patchfile = manifest.get("build", "patchfile", ctx=ctx) self.patchfile_opts = manifest.get("build", "patchfile_opts", ctx=ctx) or "" self.ctx = ctx self.src_dir = src_dir self.build_dir = build_dir or src_dir self.inst_dir = inst_dir self.build_opts = build_opts self.manifest = manifest self.final_install_prefix = final_install_prefix def _get_cmd_prefix(self): if self.build_opts.is_windows(): vcvarsall = self.build_opts.get_vcvars_path() if vcvarsall is not None: # Since it sets rather a large number of variables we mildly abuse # the cmd quoting rules to assemble a command that calls the script # to prep the environment and then triggers the actual command that # we wanted to run. return [vcvarsall, "amd64", "&&"] return [] def _run_cmd( self, cmd, cwd=None, env=None, use_cmd_prefix: bool = True, allow_fail: bool = False, ) -> int: if env: e = self.env.copy() e.update(env) env = e else: env = self.env if use_cmd_prefix: cmd_prefix = self._get_cmd_prefix() if cmd_prefix: cmd = cmd_prefix + cmd log_file = os.path.join(self.build_dir, "getdeps_build.log") return run_cmd( cmd=cmd, env=env, cwd=cwd or self.build_dir, log_file=log_file, allow_fail=allow_fail, ) def _reconfigure(self, reconfigure: bool) -> bool: if self.build_dir is not None: if not os.path.isdir(self.build_dir): os.makedirs(self.build_dir) reconfigure = True return reconfigure def _apply_patchfile(self) -> None: if self.patchfile is None: return patched_sentinel_file = pathlib.Path(self.src_dir + "/.getdeps_patched") if patched_sentinel_file.exists(): return old_wd = os.getcwd() os.chdir(self.src_dir) print(f"Patching {self.manifest.name} with {self.patchfile} in {self.src_dir}") patchfile = os.path.join( self.build_opts.fbcode_builder_dir, "patches", self.patchfile ) patchcmd = ["git", "apply"] if self.patchfile_opts: patchcmd.append(self.patchfile_opts) try: subprocess.check_call(patchcmd + [patchfile]) except subprocess.CalledProcessError: raise ValueError(f"Failed to apply patch to {self.manifest.name}") os.chdir(old_wd) patched_sentinel_file.touch() def prepare(self, install_dirs, reconfigure: bool) -> None: print("Preparing %s..." % self.manifest.name) reconfigure = self._reconfigure(reconfigure) self._apply_patchfile() self._prepare(install_dirs=install_dirs, reconfigure=reconfigure) def build(self, install_dirs, reconfigure: bool) -> None: print("Building %s..." % self.manifest.name) reconfigure = self._reconfigure(reconfigure) self._apply_patchfile() self._prepare(install_dirs=install_dirs, reconfigure=reconfigure) self._build(install_dirs=install_dirs, reconfigure=reconfigure) # On Windows, emit a wrapper script that can be used to run build artifacts # directly from the build directory, without installing them. On Windows $PATH # needs to be updated to include all of the directories containing the runtime # library dependencies in order to run the binaries. if self.build_opts.is_windows(): script_path = self.get_dev_run_script_path() dep_munger = create_dyn_dep_munger(self.build_opts, install_dirs) dep_dirs = self.get_dev_run_extra_path_dirs(install_dirs, dep_munger) # pyre-fixme[16]: Optional type has no attribute `emit_dev_run_script`. dep_munger.emit_dev_run_script(script_path, dep_dirs) @property def num_jobs(self) -> int: # This is a hack, but we don't have a "defaults manifest" that we can # customize per platform. # TODO: Introduce some sort of defaults config that can select by # platform, just like manifest contexts. if sys.platform.startswith("freebsd"): # clang on FreeBSD is quite memory-efficient. default_job_weight = 512 else: # 1.5 GiB is a lot to assume, but it's typical of Facebook-style C++. # Some manifests are even heavier and should override. default_job_weight = 1536 return self.build_opts.get_num_jobs( int( self.manifest.get( "build", "job_weight_mib", default_job_weight, ctx=self.ctx ) ) ) def run_tests( self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot ) -> None: """Execute any tests that we know how to run. If they fail, raise an exception.""" pass def _prepare(self, install_dirs, reconfigure) -> None: """Prepare the build. Useful when need to generate config, but builder is not the primary build system. e.g. cargo when called from cmake""" pass def _build(self, install_dirs, reconfigure) -> None: """Perform the build. install_dirs contains the list of installation directories for the dependencies of this project. reconfigure will be set to true if the fetcher determined that the sources have changed in such a way that the build system needs to regenerate its rules.""" pass def _compute_env(self, install_dirs): # CMAKE_PREFIX_PATH is only respected when passed through the # environment, so we construct an appropriate path to pass down return self.build_opts.compute_env_for_install_dirs( install_dirs, env=self.env, manifest=self.manifest ) def get_dev_run_script_path(self): assert self.build_opts.is_windows() return os.path.join(self.build_dir, "run.ps1") def get_dev_run_extra_path_dirs(self, install_dirs, dep_munger=None): assert self.build_opts.is_windows() if dep_munger is None: dep_munger = create_dyn_dep_munger(self.build_opts, install_dirs) return dep_munger.compute_dependency_paths(self.build_dir) class MakeBuilder(BuilderBase): def __init__( self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir, build_args, install_args, test_args, ) -> None: super(MakeBuilder, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) self.build_args = build_args or [] self.install_args = install_args or [] self.test_args = test_args @property def _make_binary(self): return self.manifest.get("build", "make_binary", "make", ctx=self.ctx) def _get_prefix(self): return ["PREFIX=" + self.inst_dir, "prefix=" + self.inst_dir] def _build(self, install_dirs, reconfigure) -> None: env = self._compute_env(install_dirs) # Need to ensure that PREFIX is set prior to install because # libbpf uses it when generating its pkg-config file. # The lowercase prefix is used by some projects. cmd = ( [self._make_binary, "-j%s" % self.num_jobs] + self.build_args + self._get_prefix() ) self._run_cmd(cmd, env=env) install_cmd = [self._make_binary] + self.install_args + self._get_prefix() self._run_cmd(install_cmd, env=env) # bz2's Makefile doesn't install its .so properly if self.manifest and self.manifest.name == "bz2": libdir = os.path.join(self.inst_dir, "lib") srcpattern = os.path.join(self.src_dir, "lib*.so.*") print(f"copying to {libdir} from {srcpattern}") for file in glob.glob(srcpattern): shutil.copy(file, libdir) def run_tests( self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot ) -> None: if not self.test_args: return env = self._compute_env(install_dirs) cmd = [self._make_binary] + self.test_args + self._get_prefix() self._run_cmd(cmd, env=env) class CMakeBootStrapBuilder(MakeBuilder): def _build(self, install_dirs, reconfigure) -> None: self._run_cmd( [ "./bootstrap", "--prefix=" + self.inst_dir, f"--parallel={self.num_jobs}", ] ) super(CMakeBootStrapBuilder, self)._build(install_dirs, reconfigure) class AutoconfBuilder(BuilderBase): def __init__( self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir, args, conf_env_args, ) -> None: super(AutoconfBuilder, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) self.args = args or [] self.conf_env_args = conf_env_args or {} @property def _make_binary(self): return self.manifest.get("build", "make_binary", "make", ctx=self.ctx) def _build(self, install_dirs, reconfigure) -> None: configure_path = os.path.join(self.src_dir, "configure") autogen_path = os.path.join(self.src_dir, "autogen.sh") env = self._compute_env(install_dirs) # Some configure scripts need additional env values passed derived from cmds for (k, cmd_args) in self.conf_env_args.items(): out = ( subprocess.check_output(cmd_args, env=dict(env.items())) .decode("utf-8") .strip() ) if out: env.set(k, out) if not os.path.exists(configure_path): print("%s doesn't exist, so reconfiguring" % configure_path) # This libtoolize call is a bit gross; the issue is that # `autoreconf` as invoked by libsodium's `autogen.sh` doesn't # seem to realize that it should invoke libtoolize and then # error out when the configure script references a libtool # related symbol. self._run_cmd(["libtoolize"], cwd=self.src_dir, env=env) # We generally prefer to call the `autogen.sh` script provided # by the project on the basis that it may know more than plain # autoreconf does. if os.path.exists(autogen_path): self._run_cmd(["bash", autogen_path], cwd=self.src_dir, env=env) else: self._run_cmd(["autoreconf", "-ivf"], cwd=self.src_dir, env=env) configure_cmd = [configure_path, "--prefix=" + self.inst_dir] + self.args self._run_cmd(configure_cmd, env=env) self._run_cmd([self._make_binary, "-j%s" % self.num_jobs], env=env) self._run_cmd([self._make_binary, "install"], env=env) class Iproute2Builder(BuilderBase): # ./configure --prefix does not work for iproute2. # Thus, explicitly copy sources from src_dir to build_dir, build, # and then install to inst_dir using DESTDIR # lastly, also copy include from build_dir to inst_dir def __init__(self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir) -> None: super(Iproute2Builder, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) def _patch(self) -> None: # FBOSS build currently depends on an old version of iproute2 (commit # 7ca63aef7d1b0c808da0040c6b366ef7a61f38c1). This is missing a commit # (ae717baf15fb4d30749ada3948d9445892bac239) needed to build iproute2 # successfully. Apply it viz.: include stdint.h # Reference: https://fburl.com/ilx9g5xm with open(self.build_dir + "/tc/tc_core.c", "r") as f: data = f.read() with open(self.build_dir + "/tc/tc_core.c", "w") as f: f.write("#include <stdint.h>\n") f.write(data) def _build(self, install_dirs, reconfigure) -> None: configure_path = os.path.join(self.src_dir, "configure") env = self.env.copy() self._run_cmd([configure_path], env=env) shutil.rmtree(self.build_dir) shutil.copytree(self.src_dir, self.build_dir) self._patch() self._run_cmd(["make", "-j%s" % self.num_jobs], env=env) install_cmd = ["make", "install", "DESTDIR=" + self.inst_dir] for d in ["include", "lib"]: if not os.path.isdir(os.path.join(self.inst_dir, d)): shutil.copytree( os.path.join(self.build_dir, d), os.path.join(self.inst_dir, d) ) self._run_cmd(install_cmd, env=env) class CMakeBuilder(BuilderBase): MANUAL_BUILD_SCRIPT = """\ #!{sys.executable} import argparse import subprocess import sys CMAKE = {cmake!r} CTEST = {ctest!r} SRC_DIR = {src_dir!r} BUILD_DIR = {build_dir!r} INSTALL_DIR = {install_dir!r} CMD_PREFIX = {cmd_prefix!r} CMAKE_ENV = {env_str} CMAKE_DEFINE_ARGS = {define_args_str} def get_jobs_argument(num_jobs_arg: int) -> str: if num_jobs_arg > 0: return "-j" + str(num_jobs_arg) import multiprocessing num_jobs = multiprocessing.cpu_count() // 2 return "-j" + str(num_jobs) def main(): ap = argparse.ArgumentParser() ap.add_argument( "cmake_args", nargs=argparse.REMAINDER, help='Any extra arguments after an "--" argument will be passed ' "directly to CMake." ) ap.add_argument( "--mode", choices=["configure", "build", "install", "test"], default="configure", help="The mode to run: configure, build, or install. " "Defaults to configure", ) ap.add_argument( "--build", action="store_const", const="build", dest="mode", help="An alias for --mode=build", ) ap.add_argument( "-j", "--num-jobs", action="store", type=int, default=0, help="Run the build or tests with the specified number of parallel jobs", ) ap.add_argument( "--install", action="store_const", const="install", dest="mode", help="An alias for --mode=install", ) ap.add_argument( "--test", action="store_const", const="test", dest="mode", help="An alias for --mode=test", ) args = ap.parse_args() # Strip off a leading "--" from the additional CMake arguments if args.cmake_args and args.cmake_args[0] == "--": args.cmake_args = args.cmake_args[1:] env = CMAKE_ENV if args.mode == "configure": full_cmd = CMD_PREFIX + [CMAKE, SRC_DIR] + CMAKE_DEFINE_ARGS + args.cmake_args elif args.mode in ("build", "install"): target = "all" if args.mode == "build" else "install" full_cmd = CMD_PREFIX + [ CMAKE, "--build", BUILD_DIR, "--target", target, "--config", "Release", get_jobs_argument(args.num_jobs), ] + args.cmake_args elif args.mode == "test": full_cmd = CMD_PREFIX + [ {dev_run_script}CTEST, "--output-on-failure", get_jobs_argument(args.num_jobs), ] + args.cmake_args else: ap.error("unknown invocation mode: %s" % (args.mode,)) cmd_str = " ".join(full_cmd) print("Running: %r" % (cmd_str,)) proc = subprocess.run(full_cmd, env=env, cwd=BUILD_DIR) sys.exit(proc.returncode) if __name__ == "__main__": main() """ def __init__( self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir, defines, loader=None, final_install_prefix=None, extra_cmake_defines=None, cmake_target="install", ) -> None: super(CMakeBuilder, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir, final_install_prefix=final_install_prefix, ) self.defines = defines or {} if extra_cmake_defines: self.defines.update(extra_cmake_defines) self.cmake_target = cmake_target try: from .facebook.vcvarsall import extra_vc_cmake_defines except ImportError: pass else: self.defines.update(extra_vc_cmake_defines) self.loader = loader if build_opts.shared_libs: self.defines["BUILD_SHARED_LIBS"] = "ON" def _invalidate_cache(self) -> None: for name in [ "CMakeCache.txt", "CMakeFiles/CMakeError.log", "CMakeFiles/CMakeOutput.log", ]: name = os.path.join(self.build_dir, name) if os.path.isdir(name): shutil.rmtree(name) elif os.path.exists(name): os.unlink(name) def _needs_reconfigure(self) -> bool: for name in ["CMakeCache.txt", "build.ninja"]: name = os.path.join(self.build_dir, name) if not os.path.exists(name): return True return False def _write_build_script(self, **kwargs) -> None: env_lines = [" {!r}: {!r},".format(k, v) for k, v in kwargs["env"].items()] kwargs["env_str"] = "\n".join(["{"] + env_lines + ["}"]) if self.build_opts.is_windows(): kwargs["dev_run_script"] = '"powershell.exe", {!r}, '.format( self.get_dev_run_script_path() ) else: kwargs["dev_run_script"] = "" define_arg_lines = ["["] for arg in kwargs["define_args"]: # Replace the CMAKE_INSTALL_PREFIX argument to use the INSTALL_DIR # variable that we define in the MANUAL_BUILD_SCRIPT code. if arg.startswith("-DCMAKE_INSTALL_PREFIX="): value = " {!r}.format(INSTALL_DIR),".format( "-DCMAKE_INSTALL_PREFIX={}" ) else: value = " {!r},".format(arg) define_arg_lines.append(value) define_arg_lines.append("]") kwargs["define_args_str"] = "\n".join(define_arg_lines) # In order to make it easier for developers to manually run builds for # CMake-based projects, write out some build scripts that can be used to invoke # CMake manually. build_script_path = os.path.join(self.build_dir, "run_cmake.py") script_contents = self.MANUAL_BUILD_SCRIPT.format(**kwargs) with open(build_script_path, "wb") as f: f.write(script_contents.encode()) os.chmod(build_script_path, 0o755) def _compute_cmake_define_args(self, env): defines = { "CMAKE_INSTALL_PREFIX": self.final_install_prefix or self.inst_dir, "BUILD_SHARED_LIBS": "OFF", # Some of the deps (rsocket) default to UBSAN enabled if left # unspecified. Some of the deps fail to compile in release mode # due to warning->error promotion. RelWithDebInfo is the happy # medium. "CMAKE_BUILD_TYPE": "RelWithDebInfo", } if "SANDCASTLE" not in os.environ: # We sometimes see intermittent ccache related breakages on some # of the FB internal CI hosts, so we prefer to disable ccache # when running in that environment. ccache = path_search(env, "ccache") if ccache: defines["CMAKE_CXX_COMPILER_LAUNCHER"] = ccache else: # rocksdb does its own probing for ccache. # Ensure that it is disabled on sandcastle env["CCACHE_DISABLE"] = "1" # Some sandcastle hosts have broken ccache related dirs, and # even though we've asked for it to be disabled ccache is # still invoked by rocksdb's cmake. # Redirect its config directory to somewhere that is guaranteed # fresh to us, and that won't have any ccache data inside. env["CCACHE_DIR"] = f"{self.build_opts.scratch_dir}/ccache" if "GITHUB_ACTIONS" in os.environ and self.build_opts.is_windows(): # GitHub actions: the host has both gcc and msvc installed, and # the default behavior of cmake is to prefer gcc. # Instruct cmake that we want it to use cl.exe; this is important # because Boost prefers cl.exe and the mismatch results in cmake # with gcc not being able to find boost built with cl.exe. defines["CMAKE_C_COMPILER"] = "cl.exe" defines["CMAKE_CXX_COMPILER"] = "cl.exe" if self.build_opts.is_darwin(): # Try to persuade cmake to set the rpath to match the lib # dirs of the dependencies. This isn't automatic, and to # make things more interesting, cmake uses `;` as the path # separator, so translate the runtime path to something # that cmake will parse defines["CMAKE_INSTALL_RPATH"] = ";".join( env.get("DYLD_LIBRARY_PATH", "").split(":") ) # Tell cmake that we want to set the rpath in the tree # at build time. Without this the rpath is only set # at the moment that the binaries are installed. That # default is problematic for example when using the # gtest integration in cmake which runs the built test # executables during the build to discover the set of # tests. defines["CMAKE_BUILD_WITH_INSTALL_RPATH"] = "ON" boost_169_is_required = False if self.loader: for m in self.loader.manifests_in_dependency_order(): preinstalled = m.get_section_as_dict("preinstalled.env", self.ctx) boost_169_is_required = "BOOST_ROOT_1_69_0" in preinstalled.keys() if boost_169_is_required: break if ( boost_169_is_required and self.build_opts.allow_system_packages and self.build_opts.host_type.get_package_manager() and self.build_opts.host_type.get_package_manager() == "rpm" ): # Boost 1.69 rpms don't install cmake config to the system, so to point to them explicitly defines["BOOST_INCLUDEDIR"] = "/usr/include/boost169" defines["BOOST_LIBRARYDIR"] = "/usr/lib64/boost169" defines.update(self.defines) define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()] # if self.build_opts.is_windows(): # define_args += ["-G", "Visual Studio 15 2017 Win64"] define_args += ["-G", "Ninja"] return define_args def _build(self, install_dirs, reconfigure: bool) -> None: reconfigure = reconfigure or self._needs_reconfigure() env = self._compute_env(install_dirs) if not self.build_opts.is_windows() and self.final_install_prefix: env["DESTDIR"] = self.inst_dir # Resolve the cmake that we installed cmake = path_search(env, "cmake") if cmake is None: raise Exception("Failed to find CMake") if reconfigure: define_args = self._compute_cmake_define_args(env) self._write_build_script( cmd_prefix=self._get_cmd_prefix(), cmake=cmake, ctest=path_search(env, "ctest"), env=env, define_args=define_args, src_dir=self.src_dir, build_dir=self.build_dir, install_dir=self.inst_dir, sys=sys, ) self._invalidate_cache() self._run_cmd([cmake, self.src_dir] + define_args, env=env) self._run_cmd( [ cmake, "--build", self.build_dir, "--target", self.cmake_target, "--config", "Release", "-j", str(self.num_jobs), ], env=env, ) def run_tests( self, install_dirs, schedule_type, owner, test_filter, retry: int, no_testpilot ) -> None: env = self._compute_env(install_dirs) ctest = path_search(env, "ctest") cmake = path_search(env, "cmake") def require_command(path: Optional[str], name: str) -> str: if path is None: raise RuntimeError("unable to find command `{}`".format(name)) return path # On Windows, we also need to update $PATH to include the directories that # contain runtime library dependencies. This is not needed on other platforms # since CMake will emit RPATH properly in the binary so they can find these # dependencies. if self.build_opts.is_windows(): path_entries = self.get_dev_run_extra_path_dirs(install_dirs) path = env.get("PATH") if path: path_entries.insert(0, path) env["PATH"] = ";".join(path_entries) # Don't use the cmd_prefix when running tests. This is vcvarsall.bat on # Windows. vcvarsall.bat is only needed for the build, not tests. It # unfortunately fails if invoked with a long PATH environment variable when # running the tests. use_cmd_prefix = False def get_property(test, propname, defval=None): """extracts a named property from a cmake test info json blob. The properties look like: [{"name": "WORKING_DIRECTORY"}, {"value": "something"}] We assume that it is invalid for the same named property to be listed more than once. """ props = test.get("properties", []) for p in props: if p.get("name", None) == propname: return p.get("value", defval) return defval def list_tests(): output = subprocess.check_output( [require_command(ctest, "ctest"), "--show-only=json-v1"], env=env, cwd=self.build_dir, ) try: data = json.loads(output.decode("utf-8")) except ValueError as exc: raise Exception( "Failed to decode cmake test info using %s: %s. Output was: %r" % (ctest, str(exc), output) ) tests = [] machine_suffix = self.build_opts.host_type.as_tuple_string() for test in data["tests"]: working_dir = get_property(test, "WORKING_DIRECTORY") labels = [] machine_suffix = self.build_opts.host_type.as_tuple_string() labels.append("tpx-fb-test-type=3") labels.append("tpx_test_config::buildsystem=getdeps") labels.append("tpx_test_config::platform={}".format(machine_suffix)) if get_property(test, "DISABLED"): labels.append("disabled") command = test["command"] if working_dir: command = [ require_command(cmake, "cmake"), "-E", "chdir", working_dir, ] + command import os tests.append( { "type": "custom", "target": "%s-%s-getdeps-%s" % (self.manifest.name, test["name"], machine_suffix), "command": command, "labels": labels, "env": {}, "required_paths": [], "contacts": [], "cwd": os.getcwd(), } ) return tests if schedule_type == "continuous" or schedule_type == "testwarden": # for continuous and testwarden runs, disabling retry can give up # better signals for flaky tests. retry = 0 tpx = path_search(env, "tpx") if tpx and not no_testpilot: buck_test_info = list_tests() import os from .facebook.testinfra import start_run buck_test_info_name = os.path.join(self.build_dir, ".buck-test-info.json") with open(buck_test_info_name, "w") as f: json.dump(buck_test_info, f) env.set("http_proxy", "") env.set("https_proxy", "") runs = [] from sys import platform with start_run(env["FBSOURCE_HASH"]) as run_id: testpilot_args = [ tpx, "--force-local-execution", "--buck-test-info", buck_test_info_name, "--retry=%d" % retry, "-j=%s" % str(self.num_jobs), "--print-long-results", ] if owner: testpilot_args += ["--contacts", owner] if env: testpilot_args.append("--env") testpilot_args.extend(f"{key}={val}" for key, val in env.items()) if run_id is not None: testpilot_args += ["--run-id", run_id] if test_filter: testpilot_args += ["--", test_filter] if schedule_type == "diff": runs.append(["--collection", "oss-diff", "--purpose", "diff"]) elif schedule_type == "continuous": runs.append( [ "--tag-new-tests", "--collection", "oss-continuous", "--purpose", "continuous", ] ) elif schedule_type == "testwarden": # One run to assess new tests runs.append( [ "--tag-new-tests", "--collection", "oss-new-test-stress", "--stress-runs", "10", "--purpose", "stress-run-new-test", ] ) # And another for existing tests runs.append( [ "--tag-new-tests", "--collection", "oss-existing-test-stress", "--stress-runs", "10", "--purpose", "stress-run", ] ) else: runs.append([]) for run in runs: self._run_cmd( testpilot_args + run, cwd=self.build_opts.fbcode_builder_dir, env=env, use_cmd_prefix=use_cmd_prefix, ) else: args = [ require_command(ctest, "ctest"), "--output-on-failure", "-j", str(self.num_jobs), ] if test_filter: args += ["-R", test_filter] count = 0 while count <= retry: retcode = self._run_cmd( args, env=env, use_cmd_prefix=use_cmd_prefix, allow_fail=True ) if retcode == 0: break if count == 0: # Only add this option in the second run. args += ["--rerun-failed"] count += 1 # pyre-fixme[61]: `retcode` is undefined, or not always defined. if retcode != 0: # Allow except clause in getdeps.main to catch and exit gracefully # This allows non-testpilot runs to fail through the same logic as failed testpilot runs, which may become handy in case if post test processing is needed in the future # pyre-fixme[61]: `retcode` is undefined, or not always defined. raise subprocess.CalledProcessError(retcode, args) class NinjaBootstrap(BuilderBase): def __init__(self, build_opts, ctx, manifest, build_dir, src_dir, inst_dir) -> None: super(NinjaBootstrap, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) def _build(self, install_dirs, reconfigure) -> None: self._run_cmd([sys.executable, "configure.py", "--bootstrap"], cwd=self.src_dir) src_ninja = os.path.join(self.src_dir, "ninja") dest_ninja = os.path.join(self.inst_dir, "bin/ninja") bin_dir = os.path.dirname(dest_ninja) if not os.path.exists(bin_dir): os.makedirs(bin_dir) shutil.copyfile(src_ninja, dest_ninja) shutil.copymode(src_ninja, dest_ninja) class OpenSSLBuilder(BuilderBase): def __init__(self, build_opts, ctx, manifest, build_dir, src_dir, inst_dir) -> None: super(OpenSSLBuilder, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) def _build(self, install_dirs, reconfigure) -> None: configure = os.path.join(self.src_dir, "Configure") # prefer to resolve the perl that we installed from # our manifest on windows, but fall back to the system # path on eg: darwin env = self.env.copy() for d in install_dirs: bindir = os.path.join(d, "bin") add_path_entry(env, "PATH", bindir, append=False) perl = typing.cast(str, path_search(env, "perl", "perl")) make_j_args = [] if self.build_opts.is_windows(): make = "nmake.exe" args = ["VC-WIN64A-masm", "-utf-8"] elif self.build_opts.is_darwin(): make = "make" make_j_args = ["-j%s" % self.num_jobs] args = ( ["darwin64-x86_64-cc"] if not self.build_opts.is_arm() else ["darwin64-arm64-cc"] ) elif self.build_opts.is_linux(): make = "make" make_j_args = ["-j%s" % self.num_jobs] args = ( ["linux-x86_64"] if not self.build_opts.is_arm() else ["linux-aarch64"] ) else: raise Exception("don't know how to build openssl for %r" % self.ctx) self._run_cmd( [ perl, configure, "--prefix=%s" % self.inst_dir, "--openssldir=%s" % self.inst_dir, ] + args + [ "enable-static-engine", "enable-capieng", "no-makedepend", "no-unit-test", "no-tests", ] ) make_build = [make] + make_j_args self._run_cmd(make_build) make_install = [make, "install_sw", "install_ssldirs"] self._run_cmd(make_install) class Boost(BuilderBase): def __init__( self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir, b2_args ) -> None: children = os.listdir(src_dir) assert len(children) == 1, "expected a single directory entry: %r" % (children,) boost_src = children[0] assert boost_src.startswith("boost") src_dir = os.path.join(src_dir, children[0]) super(Boost, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) self.b2_args = b2_args def _build(self, install_dirs, reconfigure) -> None: env = self._compute_env(install_dirs) linkage = ["static"] if self.build_opts.is_windows() or self.build_opts.shared_libs: linkage.append("shared") args = [] if self.build_opts.is_darwin(): clang = subprocess.check_output(["xcrun", "--find", "clang"]) user_config = os.path.join(self.build_dir, "project-config.jam") with open(user_config, "w") as jamfile: jamfile.write("using clang : : %s ;\n" % clang.decode().strip()) args.append("--user-config=%s" % user_config) for link in linkage: bootstrap_args = self.manifest.get_section_as_args( "bootstrap.args", self.ctx ) if self.build_opts.is_windows(): bootstrap = os.path.join(self.src_dir, "bootstrap.bat") self._run_cmd([bootstrap] + bootstrap_args, cwd=self.src_dir, env=env) args += ["address-model=64"] else: bootstrap = os.path.join(self.src_dir, "bootstrap.sh") self._run_cmd( [bootstrap, "--prefix=%s" % self.inst_dir] + bootstrap_args, cwd=self.src_dir, env=env, ) b2 = os.path.join(self.src_dir, "b2") self._run_cmd( [ b2, "-j%s" % self.num_jobs, "--prefix=%s" % self.inst_dir, "--builddir=%s" % self.build_dir, ] + args + self.b2_args + [ "link=%s" % link, "runtime-link=shared", "variant=release", "threading=multi", "debug-symbols=on", "visibility=global", "-d2", "install", ], cwd=self.src_dir, env=env, ) class NopBuilder(BuilderBase): def __init__(self, build_opts, ctx, manifest, src_dir, inst_dir) -> None: super(NopBuilder, self).__init__( build_opts, ctx, manifest, src_dir, None, inst_dir ) def build(self, install_dirs, reconfigure: bool) -> None: print("Installing %s -> %s" % (self.src_dir, self.inst_dir)) parent = os.path.dirname(self.inst_dir) if not os.path.exists(parent): os.makedirs(parent) install_files = self.manifest.get_section_as_ordered_pairs( "install.files", self.ctx ) if install_files: for src_name, dest_name in self.manifest.get_section_as_ordered_pairs( "install.files", self.ctx ): full_dest = os.path.join(self.inst_dir, dest_name) full_src = os.path.join(self.src_dir, src_name) dest_parent = os.path.dirname(full_dest) if not os.path.exists(dest_parent): os.makedirs(dest_parent) if os.path.isdir(full_src): if not os.path.exists(full_dest): shutil.copytree(full_src, full_dest) else: shutil.copyfile(full_src, full_dest) shutil.copymode(full_src, full_dest) # This is a bit gross, but the mac ninja.zip doesn't # give ninja execute permissions, so force them on # for things that look like they live in a bin dir if os.path.dirname(dest_name) == "bin": st = os.lstat(full_dest) os.chmod(full_dest, st.st_mode | stat.S_IXUSR) else: if not os.path.exists(self.inst_dir): shutil.copytree(self.src_dir, self.inst_dir) class SqliteBuilder(BuilderBase): def __init__(self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir) -> None: super(SqliteBuilder, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) def _build(self, install_dirs, reconfigure) -> None: for f in ["sqlite3.c", "sqlite3.h", "sqlite3ext.h"]: src = os.path.join(self.src_dir, f) dest = os.path.join(self.build_dir, f) copy_if_different(src, dest) cmake_lists = """ cmake_minimum_required(VERSION 3.1.3 FATAL_ERROR) project(sqlite3 C) add_library(sqlite3 STATIC sqlite3.c) # These options are taken from the defaults in Makefile.msc in # the sqlite distribution target_compile_definitions(sqlite3 PRIVATE -DSQLITE_ENABLE_COLUMN_METADATA=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_GEOPOLY=1 -DSQLITE_ENABLE_JSON1=1 -DSQLITE_ENABLE_STMTVTAB=1 -DSQLITE_ENABLE_DBPAGE_VTAB=1 -DSQLITE_ENABLE_DBSTAT_VTAB=1 -DSQLITE_INTROSPECTION_PRAGMAS=1 -DSQLITE_ENABLE_DESERIALIZE=1 ) install(TARGETS sqlite3) install(FILES sqlite3.h sqlite3ext.h DESTINATION include) """ with open(os.path.join(self.build_dir, "CMakeLists.txt"), "w") as f: f.write(cmake_lists) defines = { "CMAKE_INSTALL_PREFIX": self.inst_dir, "BUILD_SHARED_LIBS": "ON" if self.build_opts.shared_libs else "OFF", "CMAKE_BUILD_TYPE": "RelWithDebInfo", } define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()] define_args += ["-G", "Ninja"] env = self._compute_env(install_dirs) # Resolve the cmake that we installed cmake = path_search(env, "cmake") self._run_cmd([cmake, self.build_dir] + define_args, env=env) self._run_cmd( [ cmake, "--build", self.build_dir, "--target", "install", "--config", "Release", "-j", str(self.num_jobs), ], env=env, )
Python
hhvm/build/fbcode_builder/getdeps/buildopts.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import errno import glob import ntpath import os import subprocess import sys import tempfile from typing import Mapping, Optional from .copytree import containing_repo_type from .envfuncs import add_flag, add_path_entry, Env from .fetcher import get_fbsource_repo_data, homebrew_package_prefix from .manifest import ContextGenerator from .platform import get_available_ram, HostType, is_windows def detect_project(path): repo_type, repo_root = containing_repo_type(path) if repo_type is None: return None, None # Look for a .projectid file. If it exists, read the project name from it. project_id_path = os.path.join(repo_root, ".projectid") try: with open(project_id_path, "r") as f: project_name = f.read().strip() return repo_root, project_name except EnvironmentError as ex: if ex.errno != errno.ENOENT: raise return repo_root, None class BuildOptions(object): def __init__( self, fbcode_builder_dir, scratch_dir, host_type, install_dir=None, num_jobs: int = 0, use_shipit: bool = False, vcvars_path=None, allow_system_packages: bool = False, lfs_path=None, shared_libs: bool = False, facebook_internal=None, ) -> None: """fbcode_builder_dir - the path to either the in-fbsource fbcode_builder dir, or for shipit-transformed repos, the build dir that has been mapped into that dir. scratch_dir - a place where we can store repos and build bits. This path should be stable across runs and ideally should not be in the repo of the project being built, but that is ultimately where we generally fall back for builds outside of FB install_dir - where the project will ultimately be installed num_jobs - the level of concurrency to use while building use_shipit - use real shipit instead of the simple shipit transformer vcvars_path - Path to external VS toolchain's vsvarsall.bat shared_libs - whether to build shared libraries """ if not install_dir: install_dir = os.path.join(scratch_dir, "installed") self.project_hashes = None for p in ["../deps/github_hashes", "../project_hashes"]: hashes = os.path.join(fbcode_builder_dir, p) if os.path.exists(hashes): self.project_hashes = hashes break # Detect what repository and project we are being run from. self.repo_root, self.repo_project = detect_project(os.getcwd()) # If we are running from an fbsource repository, set self.fbsource_dir # to allow the ShipIt-based fetchers to use it. if self.repo_project == "fbsource": self.fbsource_dir: Optional[str] = self.repo_root else: self.fbsource_dir = None if facebook_internal is None: if self.fbsource_dir: facebook_internal = True else: facebook_internal = False self.facebook_internal = facebook_internal self.specified_num_jobs = num_jobs self.scratch_dir = scratch_dir self.install_dir = install_dir self.fbcode_builder_dir = fbcode_builder_dir self.host_type = host_type self.use_shipit = use_shipit self.allow_system_packages = allow_system_packages self.lfs_path = lfs_path self.shared_libs = shared_libs lib_path = None if self.is_darwin(): lib_path = "DYLD_LIBRARY_PATH" elif self.is_linux(): lib_path = "LD_LIBRARY_PATH" elif self.is_windows(): lib_path = "PATH" else: lib_path = None self.lib_path = lib_path if vcvars_path is None and is_windows(): try: # Allow a site-specific vcvarsall path. from .facebook.vcvarsall import build_default_vcvarsall except ImportError: vcvarsall = [] else: vcvarsall = ( build_default_vcvarsall(self.fbsource_dir) if self.fbsource_dir is not None else [] ) # On Windows, the compiler is not available in the PATH by # default so we need to run the vcvarsall script to populate the # environment. We use a glob to find some version of this script # as deployed with Visual Studio 2017. This logic can also # locate Visual Studio 2019 but note that at the time of writing # the version of boost in our manifest cannot be built with # VS 2019, so we're effectively tied to VS 2017 until we upgrade # the boost dependency. for year in ["2017", "2019"]: vcvarsall += glob.glob( os.path.join( os.environ["ProgramFiles(x86)"], "Microsoft Visual Studio", year, "*", "VC", "Auxiliary", "Build", "vcvarsall.bat", ) ) vcvars_path = vcvarsall[0] self.vcvars_path = vcvars_path @property def manifests_dir(self): return os.path.join(self.fbcode_builder_dir, "manifests") def is_darwin(self): return self.host_type.is_darwin() def is_windows(self): return self.host_type.is_windows() def is_arm(self): return self.host_type.is_arm() def get_vcvars_path(self): return self.vcvars_path def is_linux(self): return self.host_type.is_linux() def is_freebsd(self): return self.host_type.is_freebsd() def get_num_jobs(self, job_weight: int) -> int: """Given an estimated job_weight in MiB, compute a reasonable concurrency limit.""" if self.specified_num_jobs: return self.specified_num_jobs available_ram = get_available_ram() import multiprocessing return max(1, min(multiprocessing.cpu_count(), available_ram // job_weight)) def get_context_generator(self, host_tuple=None): """Create a manifest ContextGenerator for the specified target platform.""" if host_tuple is None: host_type = self.host_type elif isinstance(host_tuple, HostType): host_type = host_tuple else: host_type = HostType.from_tuple_string(host_tuple) return ContextGenerator( { "os": host_type.ostype, "distro": host_type.distro, "distro_vers": host_type.distrovers, "fb": "on" if self.facebook_internal else "off", "fbsource": "on" if self.fbsource_dir else "off", "test": "off", "shared_libs": "on" if self.shared_libs else "off", } ) def compute_env_for_install_dirs( self, install_dirs, env=None, manifest=None ): # noqa: C901 if env is not None: env = env.copy() else: env = Env() env["GETDEPS_BUILD_DIR"] = os.path.join(self.scratch_dir, "build") env["GETDEPS_INSTALL_DIR"] = self.install_dir # Python setuptools attempts to discover a local MSVC for # building Python extensions. On Windows, getdeps already # supports invoking a vcvarsall prior to compilation. # # Tell setuptools to bypass its own search. This fixes a bug # where setuptools would fail when run from CMake on GitHub # Actions with the inscrutable message 'error: Microsoft # Visual C++ 14.0 is required. Get it with "Build Tools for # Visual Studio"'. I suspect the actual error is that the # environment or PATH is overflowing. # # For extra credit, someone could patch setuptools to # propagate the actual error message from vcvarsall, because # often it does not mean Visual C++ is not available. # # Related discussions: # - https://github.com/pypa/setuptools/issues/2028 # - https://github.com/pypa/setuptools/issues/2307 # - https://developercommunity.visualstudio.com/t/error-microsoft-visual-c-140-is-required/409173 # - https://github.com/OpenMS/OpenMS/pull/4779 # - https://github.com/actions/virtual-environments/issues/1484 if self.is_windows() and self.get_vcvars_path(): env["DISTUTILS_USE_SDK"] = "1" # On macOS we need to set `SDKROOT` when we use clang for system # header files. if self.is_darwin() and "SDKROOT" not in env: sdkroot = subprocess.check_output(["xcrun", "--show-sdk-path"]) env["SDKROOT"] = sdkroot.decode().strip() if ( self.is_darwin() and self.allow_system_packages and self.host_type.get_package_manager() == "homebrew" and manifest and manifest.resolved_system_packages ): # Homebrew packages may not be on the default PATHs brew_packages = manifest.resolved_system_packages.get("homebrew", []) for p in brew_packages: found = self.add_homebrew_package_to_env(p, env) # Try extra hard to find openssl, needed with homebrew on macOS if found and p.startswith("openssl"): candidate = homebrew_package_prefix("[email protected]") if os.path.exists(candidate): os.environ["OPENSSL_ROOT_DIR"] = candidate env["OPENSSL_ROOT_DIR"] = os.environ["OPENSSL_ROOT_DIR"] if self.fbsource_dir: env["YARN_YARN_OFFLINE_MIRROR"] = os.path.join( self.fbsource_dir, "xplat/third-party/yarn/offline-mirror" ) yarn_exe = "yarn.bat" if self.is_windows() else "yarn" env["YARN_PATH"] = os.path.join( self.fbsource_dir, "xplat/third-party/yarn/", yarn_exe ) node_exe = "node-win-x64.exe" if self.is_windows() else "node" env["NODE_BIN"] = os.path.join( self.fbsource_dir, "xplat/third-party/node/bin/", node_exe ) env["RUST_VENDORED_CRATES_DIR"] = os.path.join( self.fbsource_dir, "third-party/rust/vendor" ) hash_data = get_fbsource_repo_data(self) env["FBSOURCE_HASH"] = hash_data.hash env["FBSOURCE_DATE"] = hash_data.date # reverse as we are prepending to the PATHs for d in reversed(install_dirs): self.add_prefix_to_env(d, env, append=False) # Linux is always system openssl system_openssl = self.is_linux() # For other systems lets see if package is requested if not system_openssl and manifest and manifest.resolved_system_packages: for _pkg_type, pkgs in manifest.resolved_system_packages.items(): for p in pkgs: if p.startswith("openssl") or p.startswith("libssl"): system_openssl = True break # Let openssl know to pick up the system certs if present if system_openssl or "OPENSSL_DIR" in env: for system_ssl_cfg in ["/etc/pki/tls", "/etc/ssl"]: if os.path.isdir(system_ssl_cfg): cert_dir = system_ssl_cfg + "/certs" if os.path.isdir(cert_dir): env["SSL_CERT_DIR"] = cert_dir cert_file = system_ssl_cfg + "/cert.pem" if os.path.isfile(cert_file): env["SSL_CERT_FILE"] = cert_file return env def add_homebrew_package_to_env(self, package, env) -> bool: prefix = homebrew_package_prefix(package) if prefix and os.path.exists(prefix): return self.add_prefix_to_env( prefix, env, append=False, add_library_path=True ) return False def add_prefix_to_env( self, d, env, append: bool = True, add_library_path: bool = False ) -> bool: # noqa: C901 bindir = os.path.join(d, "bin") found = False pkgconfig = os.path.join(d, "lib", "pkgconfig") if os.path.exists(pkgconfig): found = True add_path_entry(env, "PKG_CONFIG_PATH", pkgconfig, append=append) pkgconfig = os.path.join(d, "lib64", "pkgconfig") if os.path.exists(pkgconfig): found = True add_path_entry(env, "PKG_CONFIG_PATH", pkgconfig, append=append) add_path_entry(env, "CMAKE_PREFIX_PATH", d, append=append) # Tell the thrift compiler about includes it needs to consider thriftdir = os.path.join(d, "include", "thrift-files") if os.path.exists(thriftdir): found = True add_path_entry(env, "THRIFT_INCLUDE_PATH", thriftdir, append=append) # module detection for python is old fashioned and needs flags includedir = os.path.join(d, "include") if os.path.exists(includedir): found = True ncursesincludedir = os.path.join(d, "include", "ncurses") if os.path.exists(ncursesincludedir): add_path_entry(env, "C_INCLUDE_PATH", ncursesincludedir, append=append) add_flag(env, "CPPFLAGS", f"-I{includedir}", append=append) add_flag(env, "CPPFLAGS", f"-I{ncursesincludedir}", append=append) elif "/bz2-" in d: add_flag(env, "CPPFLAGS", f"-I{includedir}", append=append) # Map from FB python manifests to PYTHONPATH pydir = os.path.join(d, "lib", "fb-py-libs") if os.path.exists(pydir): found = True manifest_ext = ".manifest" pymanifestfiles = [ f for f in os.listdir(pydir) if f.endswith(manifest_ext) and os.path.isfile(os.path.join(pydir, f)) ] for f in pymanifestfiles: subdir = f[: -len(manifest_ext)] add_path_entry( env, "PYTHONPATH", os.path.join(pydir, subdir), append=append ) # Allow resolving shared objects built earlier (eg: zstd # doesn't include the full path to the dylib in its linkage # so we need to give it an assist) if self.lib_path: for lib in ["lib", "lib64"]: libdir = os.path.join(d, lib) if os.path.exists(libdir): found = True add_path_entry(env, self.lib_path, libdir, append=append) # module detection for python is old fashioned and needs flags if "/ncurses-" in d: add_flag(env, "LDFLAGS", f"-L{libdir}", append=append) elif "/bz2-" in d: add_flag(env, "LDFLAGS", f"-L{libdir}", append=append) if add_library_path: add_path_entry(env, "LIBRARY_PATH", libdir, append=append) # Allow resolving binaries (eg: cmake, ninja) and dlls # built by earlier steps if os.path.exists(bindir): found = True add_path_entry(env, "PATH", bindir, append=append) # If rustc is present in the `bin` directory, set RUSTC to prevent # cargo uses the rustc installed in the system. if self.is_windows(): cargo_path = os.path.join(bindir, "cargo.exe") rustc_path = os.path.join(bindir, "rustc.exe") rustdoc_path = os.path.join(bindir, "rustdoc.exe") else: cargo_path = os.path.join(bindir, "cargo") rustc_path = os.path.join(bindir, "rustc") rustdoc_path = os.path.join(bindir, "rustdoc") if os.path.isfile(rustc_path): env["CARGO_BIN"] = cargo_path env["RUSTC"] = rustc_path env["RUSTDOC"] = rustdoc_path openssl_include = os.path.join(d, "include", "openssl") if os.path.isdir(openssl_include) and any( os.path.isfile(os.path.join(d, "lib", libcrypto)) for libcrypto in ("libcrypto.lib", "libcrypto.so", "libcrypto.a") ): # This must be the openssl library, let Rust know about it env["OPENSSL_DIR"] = d return found def list_win32_subst_letters(): output = subprocess.check_output(["subst"]).decode("utf-8") # The output is a set of lines like: `F:\: => C:\open\some\where` lines = output.strip().split("\r\n") mapping = {} for line in lines: fields = line.split(": => ") if len(fields) != 2: continue letter = fields[0] path = fields[1] mapping[letter] = path return mapping def find_existing_win32_subst_for_path( path: str, subst_mapping: Mapping[str, str], ) -> Optional[str]: path = ntpath.normcase(ntpath.normpath(path)) for letter, target in subst_mapping.items(): if ntpath.normcase(target) == path: return letter return None def find_unused_drive_letter(): import ctypes buffer_len = 256 blen = ctypes.c_uint(buffer_len) rv = ctypes.c_uint() bufs = ctypes.create_string_buffer(buffer_len) rv = ctypes.windll.kernel32.GetLogicalDriveStringsA(blen, bufs) if rv > buffer_len: raise Exception("GetLogicalDriveStringsA result too large for buffer") nul = "\x00".encode("ascii") used = [drive.decode("ascii")[0] for drive in bufs.raw.strip(nul).split(nul)] possible = [c for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"] available = sorted(list(set(possible) - set(used))) if len(available) == 0: return None # Prefer to assign later letters rather than earlier letters return available[-1] def create_subst_path(path: str) -> str: for _attempt in range(0, 24): drive = find_existing_win32_subst_for_path( path, subst_mapping=list_win32_subst_letters() ) if drive: return drive available = find_unused_drive_letter() if available is None: raise Exception( ( "unable to make shorter subst mapping for %s; " "no available drive letters" ) % path ) # Try to set up a subst mapping; note that we may be racing with # other processes on the same host, so this may not succeed. try: subprocess.check_call(["subst", "%s:" % available, path]) return "%s:\\" % available except Exception: print("Failed to map %s -> %s" % (available, path)) raise Exception("failed to set up a subst path for %s" % path) def _check_host_type(args, host_type): if host_type is None: host_tuple_string = getattr(args, "host_type", None) if host_tuple_string: host_type = HostType.from_tuple_string(host_tuple_string) else: host_type = HostType() assert isinstance(host_type, HostType) return host_type def setup_build_options(args, host_type=None) -> BuildOptions: """Create a BuildOptions object based on the arguments""" fbcode_builder_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) scratch_dir = args.scratch_path if not scratch_dir: # TODO: `mkscratch` doesn't currently know how best to place things on # sandcastle, so whip up something reasonable-ish if "SANDCASTLE" in os.environ: if "DISK_TEMP" not in os.environ: raise Exception( ( "I need DISK_TEMP to be set in the sandcastle environment " "so that I can store build products somewhere sane" ) ) scratch_dir = os.path.join( os.environ["DISK_TEMP"], "fbcode_builder_getdeps" ) if not scratch_dir: try: scratch_dir = ( subprocess.check_output( ["mkscratch", "path", "--subdir", "fbcode_builder_getdeps"] ) .strip() .decode("utf-8") ) except OSError as exc: if exc.errno != errno.ENOENT: # A legit failure; don't fall back, surface the error raise # This system doesn't have mkscratch so we fall back to # something local. munged = fbcode_builder_dir.replace("Z", "zZ") for s in ["/", "\\", ":"]: munged = munged.replace(s, "Z") if is_windows() and os.path.isdir("c:/open"): temp = "c:/open/scratch" else: temp = tempfile.gettempdir() scratch_dir = os.path.join(temp, "fbcode_builder_getdeps-%s" % munged) if not is_windows() and os.geteuid() == 0: # Running as root; in the case where someone runs # sudo getdeps.py install-system-deps # and then runs as build without privs, we want to avoid creating # a scratch dir that the second stage cannot write to. # So we generate a different path if we are root. scratch_dir += "-root" if not os.path.exists(scratch_dir): os.makedirs(scratch_dir) if is_windows(): subst = create_subst_path(scratch_dir) print( "Mapping scratch dir %s -> %s" % (scratch_dir, subst), file=sys.stderr ) scratch_dir = subst else: if not os.path.exists(scratch_dir): os.makedirs(scratch_dir) # Make sure we normalize the scratch path. This path is used as part of the hash # computation for detecting if projects have been updated, so we need to always # use the exact same string to refer to a given directory. # But! realpath in some combinations of Windows/Python3 versions can expand the # drive substitutions on Windows, so avoid that! if not is_windows(): scratch_dir = os.path.realpath(scratch_dir) # Save these args passed by the user in an env variable, so it # can be used while hashing this build. os.environ["GETDEPS_CMAKE_DEFINES"] = getattr(args, "extra_cmake_defines", "") or "" host_type = _check_host_type(args, host_type) build_args = { k: v for (k, v) in vars(args).items() if k in { "num_jobs", "use_shipit", "vcvars_path", "allow_system_packages", "lfs_path", "shared_libs", } } return BuildOptions( fbcode_builder_dir, scratch_dir, host_type, install_dir=args.install_prefix, facebook_internal=args.facebook_internal, **build_args, )
Python
hhvm/build/fbcode_builder/getdeps/cache.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. class ArtifactCache(object): """The ArtifactCache is a small abstraction that allows caching named things in some external storage mechanism. The primary use case is for storing the build products on CI systems to accelerate the build""" def download_to_file(self, name, dest_file_name) -> bool: """If `name` exists in the cache, download it and place it in the specified `dest_file_name` location on the filesystem. If a transient issue was encountered a TransientFailure shall be raised. If `name` doesn't exist in the cache `False` shall be returned. If `dest_file_name` was successfully updated `True` shall be returned. All other conditions shall raise an appropriate exception.""" return False def upload_from_file(self, name, source_file_name) -> None: """Causes `name` to be populated in the cache by uploading the contents of `source_file_name` to the storage system. If a transient issue was encountered a TransientFailure shall be raised. If the upload failed for some other reason, an appropriate exception shall be raised.""" pass def create_cache() -> None: """This function is monkey patchable to provide an actual implementation""" return None
Python
hhvm/build/fbcode_builder/getdeps/cargo.py
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import re import shutil import typing from .builder import BuilderBase if typing.TYPE_CHECKING: from .buildopts import BuildOptions class CargoBuilder(BuilderBase): def __init__( self, build_opts: "BuildOptions", ctx, manifest, src_dir, build_dir, inst_dir, build_doc, workspace_dir, manifests_to_build, loader, cargo_config_file, ) -> None: super(CargoBuilder, self).__init__( build_opts, ctx, manifest, src_dir, build_dir, inst_dir ) self.build_doc = build_doc self.ws_dir = workspace_dir self.manifests_to_build = manifests_to_build and manifests_to_build.split(",") self.loader = loader self.cargo_config_file_subdir = cargo_config_file def run_cargo(self, install_dirs, operation, args=None) -> None: args = args or [] env = self._compute_env(install_dirs) # Enable using nightly features with stable compiler env["RUSTC_BOOTSTRAP"] = "1" env["LIBZ_SYS_STATIC"] = "1" cmd = [ "cargo", operation, "--workspace", "-j%s" % self.num_jobs, ] + args self._run_cmd(cmd, cwd=self.workspace_dir(), env=env) def build_source_dir(self): return os.path.join(self.build_dir, "source") def workspace_dir(self): return os.path.join(self.build_source_dir(), self.ws_dir or "") def manifest_dir(self, manifest): return os.path.join(self.build_source_dir(), manifest) def recreate_dir(self, src, dst) -> None: if os.path.isdir(dst): shutil.rmtree(dst) shutil.copytree(src, dst) def cargo_config_file(self): build_source_dir = self.build_dir if self.cargo_config_file_subdir: return os.path.join(build_source_dir, self.cargo_config_file_subdir) else: return os.path.join(build_source_dir, ".cargo", "config") def _create_cargo_config(self): cargo_config_file = self.cargo_config_file() cargo_config_dir = os.path.dirname(cargo_config_file) if not os.path.isdir(cargo_config_dir): os.mkdir(cargo_config_dir) print(f"Writing cargo config for {self.manifest.name} to {cargo_config_file}") with open(cargo_config_file, "w+") as f: f.write( """\ # Generated by getdeps.py [build] target-dir = '''{}''' [profile.dev] debug = false incremental = false """.format( self.build_dir.replace("\\", "\\\\") ) ) # Point to vendored sources from getdeps manifests dep_to_git = self._resolve_dep_to_git() for _dep, git_conf in dep_to_git.items(): if "cargo_vendored_sources" in git_conf: with open(cargo_config_file, "a") as f: vendored_dir = git_conf["cargo_vendored_sources"].replace( "\\", "\\\\" ) f.write( f""" [source."{git_conf["repo_url"]}"] directory = "{vendored_dir}" """ ) if self.build_opts.fbsource_dir: # Point to vendored crates.io if possible try: from .facebook.rust import vendored_crates vendored_crates(self.build_opts.fbsource_dir, cargo_config_file) except ImportError: # This FB internal module isn't shippped to github, # so just rely on cargo downloading crates on it's own pass return dep_to_git def _prepare(self, install_dirs, reconfigure) -> None: build_source_dir = self.build_source_dir() self.recreate_dir(self.src_dir, build_source_dir) dep_to_git = self._create_cargo_config() if self.ws_dir is not None: self._patchup_workspace(dep_to_git) def _build(self, install_dirs, reconfigure) -> None: # _prepare has been run already. Actually do the build build_source_dir = self.build_source_dir() if self.manifests_to_build is None: self.run_cargo( install_dirs, "build", ["--out-dir", os.path.join(self.inst_dir, "bin"), "-Zunstable-options"], ) else: for manifest in self.manifests_to_build: self.run_cargo( install_dirs, "build", [ "--out-dir", os.path.join(self.inst_dir, "bin"), "-Zunstable-options", "--manifest-path", self.manifest_dir(manifest), ], ) self.recreate_dir(build_source_dir, os.path.join(self.inst_dir, "source")) def run_tests( self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot ) -> None: if test_filter: args = ["--", test_filter] else: args = [] if self.manifests_to_build is None: self.run_cargo(install_dirs, "test", args) if self.build_doc: self.run_cargo(install_dirs, "doc", ["--no-deps"]) else: for manifest in self.manifests_to_build: margs = ["--manifest-path", self.manifest_dir(manifest)] self.run_cargo(install_dirs, "test", args + margs) if self.build_doc: self.run_cargo(install_dirs, "doc", ["--no-deps"] + margs) def _patchup_workspace(self, dep_to_git) -> None: """ This method makes some assumptions about the state of the project and its cargo dependendies: 1. Crates from cargo dependencies can be extracted from Cargo.toml files using _extract_crates function. It is using a heuristic so check its code to understand how it is done. 2. The extracted cargo dependencies crates can be found in the dependency's install dir using _resolve_crate_to_path function which again is using a heuristic. Notice that many things might go wrong here. E.g. if someone depends on another getdeps crate by writing in their Cargo.toml file: my-rename-of-crate = { package = "crate", git = "..." } they can count themselves lucky because the code will raise an Exception. There might be more cases where the code will silently pass producing bad results. """ workspace_dir = self.workspace_dir() config = self._resolve_config(dep_to_git) if config: patch_cargo = os.path.join(workspace_dir, "Cargo.toml") print(f"writing patch to {patch_cargo}") with open(patch_cargo, "r+") as f: manifest_content = f.read() if "[package]" not in manifest_content: # A fake manifest has to be crated to change the virtual # manifest into a non-virtual. The virtual manifests are limited # in many ways and the inability to define patches on them is # one. Check https://github.com/rust-lang/cargo/issues/4934 to # see if it is resolved. null_file = "/dev/null" if self.build_opts.is_windows(): null_file = "nul" f.write( f""" [package] name = "fake_manifest_of_{self.manifest.name}" version = "0.0.0" [lib] path = "{null_file}" """ ) else: f.write("\n") f.write(config) def _resolve_config(self, dep_to_git) -> str: """ Returns a configuration to be put inside root Cargo.toml file which patches the dependencies git code with local getdeps versions. See https://doc.rust-lang.org/cargo/reference/manifest.html#the-patch-section """ dep_to_crates = self._resolve_dep_to_crates(self.build_source_dir(), dep_to_git) config = [] git_url_to_crates_and_paths = {} for dep_name in sorted(dep_to_git.keys()): git_conf = dep_to_git[dep_name] req_crates = sorted(dep_to_crates.get(dep_name, [])) if not req_crates: continue # nothing to patch, move along git_url = git_conf.get("repo_url", None) crate_source_map = git_conf["crate_source_map"] if git_url and crate_source_map: crates_to_patch_path = git_url_to_crates_and_paths.get(git_url, {}) for c in req_crates: if c in crate_source_map and c not in crates_to_patch_path: crates_to_patch_path[c] = crate_source_map[c] print( f"{self.manifest.name}: Patching crate {c} via virtual manifest in {self.workspace_dir()}" ) if crates_to_patch_path: git_url_to_crates_and_paths[git_url] = crates_to_patch_path for git_url, crates_to_patch_path in git_url_to_crates_and_paths.items(): crates_patches = [ '{} = {{ path = "{}" }}'.format( crate, crates_to_patch_path[crate].replace("\\", "\\\\"), ) for crate in sorted(crates_to_patch_path.keys()) ] config.append(f'\n[patch."{git_url}"]\n' + "\n".join(crates_patches)) return "\n".join(config) def _resolve_dep_to_git(self): """ For each direct dependency of the currently build manifest check if it is also cargo-builded and if yes then extract it's git configs and install dir """ dependencies = self.manifest.get_dependencies(self.ctx) if not dependencies: return [] dep_to_git = {} for dep in dependencies: dep_manifest = self.loader.load_manifest(dep) dep_builder = dep_manifest.get("build", "builder", ctx=self.ctx) dep_cargo_conf = dep_manifest.get_section_as_dict("cargo", self.ctx) dep_crate_map = dep_manifest.get_section_as_dict("crate.pathmap", self.ctx) if ( not (dep_crate_map or dep_cargo_conf) and dep_builder not in ["cargo"] or dep == "rust" ): # This dependency has no cargo rust content so ignore it. # The "rust" dependency is an exception since it contains the # toolchain. continue git_conf = dep_manifest.get_section_as_dict("git", self.ctx) if dep != "rust" and "repo_url" not in git_conf: raise Exception( f"{dep}: A cargo dependency requires git.repo_url to be defined." ) if dep_builder == "cargo": dep_source_dir = self.loader.get_project_install_dir(dep_manifest) dep_source_dir = os.path.join(dep_source_dir, "source") else: fetcher = self.loader.create_fetcher(dep_manifest) dep_source_dir = fetcher.get_src_dir() crate_source_map = {} if dep_crate_map: for (crate, subpath) in dep_crate_map.items(): if crate not in crate_source_map: if self.build_opts.is_windows(): subpath = subpath.replace("/", "\\") crate_path = os.path.join(dep_source_dir, subpath) print( f"{self.manifest.name}: Mapped crate {crate} to dep {dep} dir {crate_path}" ) crate_source_map[crate] = crate_path elif dep_cargo_conf: # We don't know what crates are defined buy the dep, look for them search_pattern = re.compile('\\[package\\]\nname = "(.*)"') for crate_root, _, files in os.walk(dep_source_dir): if "Cargo.toml" in files: with open(os.path.join(crate_root, "Cargo.toml"), "r") as f: content = f.read() match = search_pattern.search(content) if match: crate = match.group(1) if crate: print( f"{self.manifest.name}: Discovered crate {crate} in dep {dep} dir {crate_root}" ) crate_source_map[crate] = crate_root git_conf["crate_source_map"] = crate_source_map if not dep_crate_map and dep_cargo_conf: dep_cargo_dir = self.loader.get_project_build_dir(dep_manifest) dep_cargo_dir = os.path.join(dep_cargo_dir, "source") dep_ws_dir = dep_cargo_conf.get("workspace_dir", None) if dep_ws_dir: dep_cargo_dir = os.path.join(dep_cargo_dir, dep_ws_dir) git_conf["cargo_vendored_sources"] = dep_cargo_dir dep_to_git[dep] = git_conf return dep_to_git def _resolve_dep_to_crates(self, build_source_dir, dep_to_git): """ This function traverse the build_source_dir in search of Cargo.toml files, extracts the crate names from them using _extract_crates function and returns a merged result containing crate names per dependency name from all Cargo.toml files in the project. """ if not dep_to_git: return {} # no deps, so don't waste time traversing files dep_to_crates = {} # First populate explicit crate paths from dependencies for name, git_conf in dep_to_git.items(): crates = git_conf["crate_source_map"].keys() if crates: dep_to_crates.setdefault(name, set()).update(crates) # Now find from Cargo.tomls for root, _, files in os.walk(build_source_dir): for f in files: if f == "Cargo.toml": more_dep_to_crates = CargoBuilder._extract_crates_used( os.path.join(root, f), dep_to_git ) for dep_name, crates in more_dep_to_crates.items(): existing_crates = dep_to_crates.get(dep_name, set()) for c in crates: if c not in existing_crates: print( f"Patch {self.manifest.name} uses {dep_name} crate {crates}" ) existing_crates.insert(c) dep_to_crates.setdefault(name, set()).update(existing_crates) return dep_to_crates @staticmethod def _extract_crates_used(cargo_toml_file, dep_to_git): """ This functions reads content of provided cargo toml file and extracts crate names per each dependency. The extraction is done by a heuristic so it might be incorrect. """ deps_to_crates = {} with open(cargo_toml_file, "r") as f: for line in f.readlines(): if line.startswith("#") or "git = " not in line: continue # filter out commented lines and ones without git deps for dep_name, conf in dep_to_git.items(): # Only redirect deps that point to git URLS if 'git = "{}"'.format(conf["repo_url"]) in line: pkg_template = ' package = "' if pkg_template in line: crate_name, _, _ = line.partition(pkg_template)[ 2 ].partition('"') else: crate_name, _, _ = line.partition("=") deps_to_crates.setdefault(dep_name, set()).add( crate_name.strip() ) return deps_to_crates def _resolve_crate_to_path(self, crate, crate_source_map): """ Tries to find <crate> in source_dir by searching a [package] keyword followed by name = "<crate>". """ search_pattern = '[package]\nname = "{}"'.format(crate) for (_crate, crate_source_dir) in crate_source_map.items(): for crate_root, _, files in os.walk(crate_source_dir): if "Cargo.toml" in files: with open(os.path.join(crate_root, "Cargo.toml"), "r") as f: content = f.read() if search_pattern in content: return crate_root raise Exception( f"{self.manifest.name}: Failed to find dep crate {crate} in paths {crate_source_map}" )
Python
hhvm/build/fbcode_builder/getdeps/copytree.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import shutil import subprocess from .platform import is_windows PREFETCHED_DIRS = set() def containing_repo_type(path): while True: if os.path.exists(os.path.join(path, ".git")): return ("git", path) if os.path.exists(os.path.join(path, ".hg")): return ("hg", path) parent = os.path.dirname(path) if parent == path: return None, None path = parent def find_eden_root(dirpath): """If the specified directory is inside an EdenFS checkout, returns the canonical absolute path to the root of that checkout. Returns None if the specified directory is not in an EdenFS checkout. """ if is_windows(): repo_type, repo_root = containing_repo_type(dirpath) if repo_root is not None: if os.path.exists(os.path.join(repo_root, ".eden", "config")): return repo_root return None try: return os.readlink(os.path.join(dirpath, ".eden", "root")) except OSError: return None def prefetch_dir_if_eden(dirpath) -> None: """After an amend/rebase, Eden may need to fetch a large number of trees from the servers. The simplistic single threaded walk performed by copytree makes this more expensive than is desirable so we help accelerate things by performing a prefetch on the source directory""" global PREFETCHED_DIRS if dirpath in PREFETCHED_DIRS: return root = find_eden_root(dirpath) if root is None: return glob = f"{os.path.relpath(dirpath, root).replace(os.sep, '/')}/**" print(f"Prefetching {glob}") subprocess.call( ["edenfsctl", "prefetch", "--repo", root, "--silent", glob, "--background"] ) PREFETCHED_DIRS.add(dirpath) # pyre-fixme[9]: ignore has type `bool`; used as `None`. def copytree(src_dir, dest_dir, ignore: bool = None): """Recursively copy the src_dir to the dest_dir, filtering out entries using the ignore lambda. The behavior of the ignore lambda must match that described by `shutil.copytree`. This `copytree` function knows how to prefetch data when running in an eden repo. TODO: I'd like to either extend this or add a variant that uses watchman to mirror src_dir into dest_dir. """ prefetch_dir_if_eden(src_dir) # pyre-fixme[6]: For 3rd param expected # `Union[typing.Callable[[Union[PathLike[str], str], List[str]], Iterable[str]], # typing.Callable[[str, List[str]], Iterable[str]], None]` but got `bool`. return shutil.copytree(src_dir, dest_dir, ignore=ignore)
Python
hhvm/build/fbcode_builder/getdeps/dyndeps.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import errno import glob import os import re import shlex import shutil import stat import subprocess import sys from struct import unpack from typing import List, Optional OBJECT_SUBDIRS = ("bin", "lib", "lib64") def copyfile(src, dest) -> None: shutil.copyfile(src, dest) shutil.copymode(src, dest) class DepBase(object): def __init__(self, buildopts, install_dirs, strip) -> None: self.buildopts = buildopts self.env = buildopts.compute_env_for_install_dirs(install_dirs) self.install_dirs = install_dirs self.strip = strip # Deduplicates dependency processing. Keyed on the library # destination path. self.processed_deps = set() def list_dynamic_deps(self, objfile): raise RuntimeError("list_dynamic_deps not implemented") def interesting_dep(self, d) -> bool: return True # final_install_prefix must be the equivalent path to `destdir` on the # installed system. For example, if destdir is `/tmp/RANDOM/usr/local' which # is intended to map to `/usr/local` in the install image, then # final_install_prefix='/usr/local'. # If left unspecified, destdir will be used. def process_deps(self, destdir, final_install_prefix=None) -> None: if self.buildopts.is_windows(): lib_dir = "bin" else: lib_dir = "lib" # pyre-fixme[16]: `DepBase` has no attribute `munged_lib_dir`. self.munged_lib_dir = os.path.join(destdir, lib_dir) final_lib_dir = os.path.join(final_install_prefix or destdir, lib_dir) if not os.path.isdir(self.munged_lib_dir): os.makedirs(self.munged_lib_dir) # Look only at the things that got installed in the leaf package, # which will be the last entry in the install dirs list inst_dir = self.install_dirs[-1] print("Process deps under %s" % inst_dir, file=sys.stderr) for dir in OBJECT_SUBDIRS: src_dir = os.path.join(inst_dir, dir) if not os.path.isdir(src_dir): continue dest_dir = os.path.join(destdir, dir) if not os.path.exists(dest_dir): os.makedirs(dest_dir) for objfile in self.list_objs_in_dir(src_dir): print("Consider %s/%s" % (dir, objfile)) dest_obj = os.path.join(dest_dir, objfile) copyfile(os.path.join(src_dir, objfile), dest_obj) self.munge_in_place(dest_obj, final_lib_dir) def find_all_dependencies(self, build_dir): all_deps = set() for objfile in self.list_objs_in_dir( build_dir, recurse=True, output_prefix=build_dir ): for d in self.list_dynamic_deps(objfile): all_deps.add(d) interesting_deps = {d for d in all_deps if self.interesting_dep(d)} dep_paths = [] for dep in interesting_deps: dep_path = self.resolve_loader_path(dep) if dep_path: dep_paths.append(dep_path) return dep_paths def munge_in_place(self, objfile, final_lib_dir) -> None: print("Munging %s" % objfile) for d in self.list_dynamic_deps(objfile): if not self.interesting_dep(d): continue # Resolve this dep: does it exist in any of our installation # directories? If so, then it is a candidate for processing dep = self.resolve_loader_path(d) if dep: # pyre-fixme[16]: `DepBase` has no attribute `munged_lib_dir`. dest_dep = os.path.join(self.munged_lib_dir, os.path.basename(dep)) print("dep: %s -> %s" % (d, dest_dep)) if dest_dep in self.processed_deps: # A previous dependency with the same name has already # been installed at dest_dep, so there is no need to copy # or munge the dependency again. # TODO: audit that both source paths have the same inode number pass else: self.processed_deps.add(dest_dep) copyfile(dep, dest_dep) self.munge_in_place(dest_dep, final_lib_dir) self.rewrite_dep(objfile, d, dep, dest_dep, final_lib_dir) if self.strip: self.strip_debug_info(objfile) def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir): raise RuntimeError("rewrite_dep not implemented") def resolve_loader_path(self, dep: str) -> Optional[str]: if os.path.isabs(dep): return dep d = os.path.basename(dep) for inst_dir in self.install_dirs: for libdir in OBJECT_SUBDIRS: candidate = os.path.join(inst_dir, libdir, d) if os.path.exists(candidate): return candidate return None def list_objs_in_dir(self, dir, recurse: bool = False, output_prefix: str = ""): for entry in os.listdir(dir): entry_path = os.path.join(dir, entry) st = os.lstat(entry_path) if stat.S_ISREG(st.st_mode): if self.is_objfile(entry_path): relative_result = os.path.join(output_prefix, entry) yield os.path.normcase(relative_result) elif recurse and stat.S_ISDIR(st.st_mode): child_prefix = os.path.join(output_prefix, entry) for result in self.list_objs_in_dir( entry_path, recurse=recurse, output_prefix=child_prefix ): yield result def is_objfile(self, objfile) -> bool: return True def strip_debug_info(self, objfile) -> None: """override this to define how to remove debug information from an object file""" pass def check_call_verbose(self, args: List[str]) -> None: print(" ".join(map(shlex.quote, args))) subprocess.check_call(args) class WinDeps(DepBase): def __init__(self, buildopts, install_dirs, strip) -> None: super(WinDeps, self).__init__(buildopts, install_dirs, strip) self.dumpbin = self.find_dumpbin() def find_dumpbin(self) -> str: # Looking for dumpbin in the following hardcoded paths. # The registry option to find the install dir doesn't work anymore. globs = [ ( "C:/Program Files (x86)/" "Microsoft Visual Studio/" "*/*/VC/Tools/" "MSVC/*/bin/Hostx64/x64/dumpbin.exe" ), ( "C:/Program Files (x86)/" "Common Files/" "Microsoft/Visual C++ for Python/*/" "VC/bin/dumpbin.exe" ), ("c:/Program Files (x86)/Microsoft Visual Studio */VC/bin/dumpbin.exe"), ( "C:/Program Files/Microsoft Visual Studio/*/Professional/VC/Tools/MSVC/*/bin/HostX64/x64/dumpbin.exe" ), ] for pattern in globs: for exe in glob.glob(pattern): return exe raise RuntimeError("could not find dumpbin.exe") def list_dynamic_deps(self, exe): deps = [] print("Resolve deps for %s" % exe) output = subprocess.check_output( [self.dumpbin, "/nologo", "/dependents", exe] ).decode("utf-8") lines = output.split("\n") for line in lines: m = re.match("\\s+(\\S+.dll)", line, re.IGNORECASE) if m: deps.append(m.group(1).lower()) return deps def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir) -> None: # We can't rewrite on windows, but we will # place the deps alongside the exe so that # they end up in the search path pass # These are the Windows system dll, which we don't want to copy while # packaging. SYSTEM_DLLS = set( # noqa: C405 [ "advapi32.dll", "dbghelp.dll", "kernel32.dll", "msvcp140.dll", "vcruntime140.dll", "ws2_32.dll", "ntdll.dll", "shlwapi.dll", ] ) def interesting_dep(self, d) -> bool: if "api-ms-win-crt" in d: return False if d in self.SYSTEM_DLLS: return False return True def is_objfile(self, objfile) -> bool: if not os.path.isfile(objfile): return False if objfile.lower().endswith(".exe"): return True return False def emit_dev_run_script(self, script_path, dep_dirs) -> None: """Emit a script that can be used to run build artifacts directly from the build directory, without installing them. The dep_dirs parameter should be a list of paths that need to be added to $PATH. This can be computed by calling compute_dependency_paths() or compute_dependency_paths_fast(). This is only necessary on Windows, which does not have RPATH, and instead requires the $PATH environment variable be updated in order to find the proper library dependencies. """ contents = self._get_dev_run_script_contents(dep_dirs) with open(script_path, "w") as f: f.write(contents) def compute_dependency_paths(self, build_dir): """Return a list of all directories that need to be added to $PATH to ensure that library dependencies can be found correctly. This is computed by scanning binaries to determine exactly the right list of dependencies. The compute_dependency_paths_fast() is a alternative function that runs faster but may return additional extraneous paths. """ dep_dirs = set() # Find paths by scanning the binaries. for dep in self.find_all_dependencies(build_dir): dep_dirs.add(os.path.dirname(dep)) dep_dirs.update(self.read_custom_dep_dirs(build_dir)) return sorted(dep_dirs) def compute_dependency_paths_fast(self, build_dir): """Similar to compute_dependency_paths(), but rather than actually scanning binaries, just add all library paths from the specified installation directories. This is much faster than scanning the binaries, but may result in more paths being returned than actually necessary. """ dep_dirs = set() for inst_dir in self.install_dirs: for subdir in OBJECT_SUBDIRS: path = os.path.join(inst_dir, subdir) if os.path.exists(path): dep_dirs.add(path) dep_dirs.update(self.read_custom_dep_dirs(build_dir)) return sorted(dep_dirs) def read_custom_dep_dirs(self, build_dir): # The build system may also have included libraries from other locations that # we might not be able to find normally in find_all_dependencies(). # To handle this situation we support reading additional library paths # from a LIBRARY_DEP_DIRS.txt file that may have been generated in the build # output directory. dep_dirs = set() try: explicit_dep_dirs_path = os.path.join(build_dir, "LIBRARY_DEP_DIRS.txt") with open(explicit_dep_dirs_path, "r") as f: for line in f.read().splitlines(): dep_dirs.add(line) except OSError as ex: if ex.errno != errno.ENOENT: raise return dep_dirs def _get_dev_run_script_contents(self, path_dirs) -> str: path_entries = ["$env:PATH"] + path_dirs path_str = ";".join(path_entries) return """\ $orig_env = $env:PATH $env:PATH = "{path_str}" try {{ $cmd_args = $args[1..$args.length] & $args[0] @cmd_args }} finally {{ $env:PATH = $orig_env }} """.format( path_str=path_str ) class ElfDeps(DepBase): def __init__(self, buildopts, install_dirs, strip) -> None: super(ElfDeps, self).__init__(buildopts, install_dirs, strip) # We need patchelf to rewrite deps, so ensure that it is built... subprocess.check_call([sys.executable, sys.argv[0], "build", "patchelf"]) # ... and that we know where it lives self.patchelf = os.path.join( os.fsdecode( subprocess.check_output( [sys.executable, sys.argv[0], "show-inst-dir", "patchelf"] ).strip() ), "bin/patchelf", ) def list_dynamic_deps(self, objfile): out = ( subprocess.check_output( [self.patchelf, "--print-needed", objfile], env=dict(self.env.items()) ) .decode("utf-8") .strip() ) lines = out.split("\n") return lines def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir) -> None: final_dep = os.path.join( final_lib_dir, # pyre-fixme[16]: `ElfDeps` has no attribute `munged_lib_dir`. os.path.relpath(new_dep, self.munged_lib_dir), ) self.check_call_verbose( [self.patchelf, "--replace-needed", depname, final_dep, objfile] ) def is_objfile(self, objfile) -> bool: if not os.path.isfile(objfile): return False with open(objfile, "rb") as f: # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header magic = f.read(4) return magic == b"\x7fELF" def strip_debug_info(self, objfile) -> None: self.check_call_verbose(["strip", objfile]) # MACH-O magic number MACH_MAGIC = 0xFEEDFACF class MachDeps(DepBase): def interesting_dep(self, d) -> bool: if d.startswith("/usr/lib/") or d.startswith("/System/"): return False return True def is_objfile(self, objfile): if not os.path.isfile(objfile): return False with open(objfile, "rb") as f: # mach stores the magic number in native endianness, # so unpack as native here and compare header = f.read(4) if len(header) != 4: return False magic = unpack("I", header)[0] return magic == MACH_MAGIC def list_dynamic_deps(self, objfile): if not self.interesting_dep(objfile): return out = ( subprocess.check_output( ["otool", "-L", objfile], env=dict(self.env.items()) ) .decode("utf-8") .strip() ) lines = out.split("\n") deps = [] for line in lines: m = re.match("\t(\\S+)\\s", line) if m: if os.path.basename(m.group(1)) != os.path.basename(objfile): deps.append(os.path.normcase(m.group(1))) return deps def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir) -> None: if objfile.endswith(".dylib"): # Erase the original location from the id of the shared # object. It doesn't appear to hurt to retain it, but # it does look weird, so let's rewrite it to be sure. self.check_call_verbose( ["install_name_tool", "-id", os.path.basename(objfile), objfile] ) final_dep = os.path.join( final_lib_dir, # pyre-fixme[16]: `MachDeps` has no attribute `munged_lib_dir`. os.path.relpath(new_dep, self.munged_lib_dir), ) self.check_call_verbose( ["install_name_tool", "-change", depname, final_dep, objfile] ) def create_dyn_dep_munger( buildopts, install_dirs, strip: bool = False ) -> Optional[DepBase]: if buildopts.is_linux(): return ElfDeps(buildopts, install_dirs, strip) if buildopts.is_darwin(): return MachDeps(buildopts, install_dirs, strip) if buildopts.is_windows(): return WinDeps(buildopts, install_dirs, strip) if buildopts.is_freebsd(): return ElfDeps(buildopts, install_dirs, strip) return None
Python
hhvm/build/fbcode_builder/getdeps/envfuncs.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import shlex import sys from typing import Optional class Env(object): def __init__(self, src=None) -> None: self._dict = {} if src is None: self.update(os.environ) else: self.update(src) def update(self, src) -> None: for k, v in src.items(): self.set(k, v) def copy(self) -> "Env": return Env(self._dict) def _key(self, key): # The `str` cast may not appear to be needed, but without it we run # into issues when passing the environment to subprocess. The main # issue is that in python2 `os.environ` (which is the initial source # of data for the environment) uses byte based strings, but this # project uses `unicode_literals`. `subprocess` will raise an error # if the environment that it is passed has a mixture of byte and # unicode strings. # It is simplest to force everything to be `str` for the sake of # consistency. key = str(key) if sys.platform.startswith("win"): # Windows env var names are case insensitive but case preserving. # An implementation of PAR files on windows gets confused if # the env block contains keys with conflicting case, so make a # pass over the contents to remove any. # While this O(n) scan is technically expensive and gross, it # is practically not a problem because the volume of calls is # relatively low and the cost of manipulating the env is dwarfed # by the cost of spawning a process on windows. In addition, # since the processes that we run are expensive anyway, this # overhead is not the worst thing to worry about. for k in list(self._dict.keys()): if str(k).lower() == key.lower(): return k elif key in self._dict: return key return None def get(self, key, defval=None): key = self._key(key) if key is None: return defval return self._dict[key] def __getitem__(self, key): val = self.get(key) if key is None: raise KeyError(key) return val def unset(self, key) -> None: if key is None: raise KeyError("attempting to unset env[None]") key = self._key(key) if key: del self._dict[key] def __delitem__(self, key) -> None: self.unset(key) def __repr__(self): return repr(self._dict) def set(self, key, value) -> None: if key is None: raise KeyError("attempting to assign env[None] = %r" % value) if value is None: raise ValueError("attempting to assign env[%s] = None" % key) # The `str` conversion is important to avoid triggering errors # with subprocess if we pass in a unicode value; see commentary # in the `_key` method. key = str(key) value = str(value) # The `unset` call is necessary on windows where the keys are # case insensitive. Since this dict is case sensitive, simply # assigning the value to the new key is not sufficient to remove # the old value. The `unset` call knows how to match keys and # remove any potential duplicates. self.unset(key) self._dict[key] = value def __setitem__(self, key, value) -> None: self.set(key, value) def __iter__(self): return self._dict.__iter__() def __len__(self) -> int: return len(self._dict) def keys(self): return self._dict.keys() def values(self): return self._dict.values() def items(self): return self._dict.items() def add_path_entry( env, name, item, append: bool = True, separator: str = os.pathsep ) -> None: """Cause `item` to be added to the path style env var named `name` held in the `env` dict. `append` specifies whether the item is added to the end (the default) or should be prepended if `name` already exists.""" val = env.get(name, "") if len(val) > 0: val = val.split(separator) else: val = [] if append: val.append(item) else: val.insert(0, item) env.set(name, separator.join(val)) def add_flag(env, name, flag: str, append: bool = True) -> None: """Cause `flag` to be added to the CXXFLAGS-style env var named `name` held in the `env` dict. `append` specifies whether the flag is added to the end (the default) or should be prepended if `name` already exists.""" val = shlex.split(env.get(name, "")) if append: val.append(flag) else: val.insert(0, flag) env.set(name, " ".join(val)) _path_search_cache = {} _not_found = object() def tpx_path() -> str: return "xplat/testinfra/tpx/ctp.tpx" def path_search(env, exename: str, defval: Optional[str] = None) -> Optional[str]: """Search for exename in the PATH specified in env. exename is eg: `ninja` and this function knows to append a .exe to the end on windows. Returns the path to the exe if found, or None if either no PATH is set in env or no executable is found.""" path = env.get("PATH", None) if path is None: return defval # The project hash computation code searches for C++ compilers (g++, clang, etc) # repeatedly. Cache the result so we don't end up searching for these over and over # again. cache_key = (path, exename) result = _path_search_cache.get(cache_key, _not_found) if result is _not_found: result = _perform_path_search(path, exename) _path_search_cache[cache_key] = result return result def _perform_path_search(path, exename: str) -> Optional[str]: is_win = sys.platform.startswith("win") if is_win: exename = "%s.exe" % exename for bindir in path.split(os.pathsep): full_name = os.path.join(bindir, exename) if os.path.exists(full_name) and os.path.isfile(full_name): if not is_win and not os.access(full_name, os.X_OK): continue return full_name return None
Python
hhvm/build/fbcode_builder/getdeps/errors.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. class TransientFailure(Exception): """Raising this error causes getdeps to return with an error code that Sandcastle will consider to be a retryable transient infrastructure error""" pass class ManifestNotFound(Exception): def __init__(self, manifest_name) -> None: super(Exception, self).__init__("Unable to find manifest '%s'" % manifest_name)
Python
hhvm/build/fbcode_builder/getdeps/expr.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import re import shlex def parse_expr(expr_text, valid_variables): """parses the simple criteria expression syntax used in dependency specifications. Returns an ExprNode instance that can be evaluated like this: ``` expr = parse_expr("os=windows") ok = expr.eval({ "os": "windows" }) ``` Whitespace is allowed between tokens. The following terms are recognized: KEY = VALUE # Evaluates to True if ctx[KEY] == VALUE not(EXPR) # Evaluates to True if EXPR evaluates to False # and vice versa all(EXPR1, EXPR2, ...) # Evaluates True if all of the supplied # EXPR's also evaluate True any(EXPR1, EXPR2, ...) # Evaluates True if any of the supplied # EXPR's also evaluate True, False if # none of them evaluated true. """ p = Parser(expr_text, valid_variables) return p.parse() class ExprNode(object): def eval(self, ctx) -> bool: return False class TrueExpr(ExprNode): def eval(self, ctx) -> bool: return True def __str__(self) -> str: return "true" class NotExpr(ExprNode): def __init__(self, node) -> None: self._node = node def eval(self, ctx) -> bool: return not self._node.eval(ctx) def __str__(self) -> str: return "not(%s)" % self._node class AllExpr(ExprNode): def __init__(self, nodes) -> None: self._nodes = nodes def eval(self, ctx) -> bool: for node in self._nodes: if not node.eval(ctx): return False return True def __str__(self) -> str: items = [] for node in self._nodes: items.append(str(node)) return "all(%s)" % ",".join(items) class AnyExpr(ExprNode): def __init__(self, nodes) -> None: self._nodes = nodes def eval(self, ctx) -> bool: for node in self._nodes: if node.eval(ctx): return True return False def __str__(self) -> str: items = [] for node in self._nodes: items.append(str(node)) return "any(%s)" % ",".join(items) class EqualExpr(ExprNode): def __init__(self, key, value) -> None: self._key = key self._value = value def eval(self, ctx): return ctx.get(self._key) == self._value def __str__(self) -> str: return "%s=%s" % (self._key, self._value) class Parser(object): def __init__(self, text, valid_variables) -> None: self.text = text self.lex = shlex.shlex(text) self.valid_variables = valid_variables def parse(self): expr = self.top() garbage = self.lex.get_token() if garbage != "": raise Exception( "Unexpected token %s after EqualExpr in %s" % (garbage, self.text) ) return expr def top(self): name = self.ident() op = self.lex.get_token() if op == "(": parsers = { "not": self.parse_not, "any": self.parse_any, "all": self.parse_all, } func = parsers.get(name) if not func: raise Exception("invalid term %s in %s" % (name, self.text)) return func() if op == "=": if name not in self.valid_variables: raise Exception("unknown variable %r in expression" % (name,)) # remove shell quote from value so can test things with period in them, e.g "18.04" unquoted = " ".join(shlex.split(self.lex.get_token())) return EqualExpr(name, unquoted) raise Exception( "Unexpected token sequence '%s %s' in %s" % (name, op, self.text) ) def ident(self) -> str: ident = self.lex.get_token() if not re.match("[a-zA-Z]+", ident): raise Exception("expected identifier found %s" % ident) return ident def parse_not(self) -> NotExpr: node = self.top() expr = NotExpr(node) tok = self.lex.get_token() if tok != ")": raise Exception("expected ')' found %s" % tok) return expr def parse_any(self) -> AnyExpr: nodes = [] while True: nodes.append(self.top()) tok = self.lex.get_token() if tok == ")": break if tok != ",": raise Exception("expected ',' or ')' but found %s" % tok) return AnyExpr(nodes) def parse_all(self) -> AllExpr: nodes = [] while True: nodes.append(self.top()) tok = self.lex.get_token() if tok == ")": break if tok != ",": raise Exception("expected ',' or ')' but found %s" % tok) return AllExpr(nodes)
Python
hhvm/build/fbcode_builder/getdeps/fetcher.py
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import errno import hashlib import os import re import shutil import stat import subprocess import sys import tarfile import time import zipfile from datetime import datetime from typing import Dict, NamedTuple from urllib.parse import urlparse from urllib.request import Request, urlopen from .copytree import prefetch_dir_if_eden from .envfuncs import Env from .errors import TransientFailure from .platform import is_windows from .runcmd import run_cmd def file_name_is_cmake_file(file_name): file_name = file_name.lower() base = os.path.basename(file_name) return ( base.endswith(".cmake") or base.endswith(".cmake.in") or base == "cmakelists.txt" ) class ChangeStatus(object): """Indicates the nature of changes that happened while updating the source directory. There are two broad uses: * When extracting archives for third party software we want to know that we did something (eg: we either extracted code or we didn't do anything) * For 1st party code where we use shipit to transform the code, we want to know if we changed anything so that we can perform a build, but we generally want to be a little more nuanced and be able to distinguish between just changing a source file and whether we might need to reconfigure the build system. """ def __init__(self, all_changed: bool = False) -> None: """Construct a ChangeStatus object. The default is to create a status that indicates no changes, but passing all_changed=True will create one that indicates that everything changed""" if all_changed: self.source_files = 1 self.make_files = 1 else: self.source_files = 0 self.make_files = 0 def record_change(self, file_name) -> None: """Used by the shipit fetcher to record changes as it updates files in the destination. If the file name might be one used in the cmake build system that we use for 1st party code, then record that as a "make file" change. We could broaden this to match any file used by various build systems, but it is only really useful for our internal cmake stuff at this time. If the file isn't a build file and is under the `fbcode_builder` dir then we don't class that as an interesting change that we might need to rebuild, so we ignore it. Otherwise we record the file as a source file change.""" file_name = file_name.lower() if file_name_is_cmake_file(file_name): self.make_files += 1 elif "/fbcode_builder/cmake" in file_name: self.source_files += 1 elif "/fbcode_builder/" not in file_name: self.source_files += 1 def sources_changed(self) -> bool: """Returns true if any source files were changed during an update operation. This will typically be used to decide that the build system to be run on the source dir in an incremental mode""" return self.source_files > 0 def build_changed(self) -> bool: """Returns true if any build files were changed during an update operation. This will typically be used to decidfe that the build system should be reconfigured and re-run as a full build""" return self.make_files > 0 class Fetcher(object): """The Fetcher is responsible for fetching and extracting the sources for project. The Fetcher instance defines where the extracted data resides and reports this to the consumer via its `get_src_dir` method.""" def update(self) -> ChangeStatus: """Brings the src dir up to date, ideally minimizing changes so that a subsequent build doesn't over-build. Returns a ChangeStatus object that helps the caller to understand the nature of the changes required during the update.""" return ChangeStatus() def clean(self) -> None: """Reverts any changes that might have been made to the src dir""" pass def hash(self) -> None: """Returns a hash that identifies the version of the code in the working copy. For a git repo this is commit hash for the working copy. For other Fetchers this should relate to the version of the code in the src dir. The intent is that if a manifest changes the version/rev of a project that the hash be different. Importantly, this should be computable without actually fetching the code, as we want this to factor into a hash used to download a pre-built version of the code, without having to first download and extract its sources (eg: boost on windows is pretty painful). """ pass def get_src_dir(self) -> None: """Returns the source directory that the project was extracted into""" pass class LocalDirFetcher(object): """This class exists to override the normal fetching behavior, and use an explicit user-specified directory for the project sources. This fetcher cannot update or track changes. It always reports that the project has changed, forcing it to always be built.""" def __init__(self, path) -> None: self.path = os.path.realpath(path) def update(self) -> ChangeStatus: return ChangeStatus(all_changed=True) def hash(self) -> str: return "0" * 40 def get_src_dir(self): return self.path class SystemPackageFetcher(object): def __init__(self, build_options, packages) -> None: self.manager = build_options.host_type.get_package_manager() self.packages = packages.get(self.manager) self.host_type = build_options.host_type if self.packages: self.installed = None else: self.installed = False def packages_are_installed(self): if self.installed is not None: return self.installed cmd = None if self.manager == "rpm": cmd = ["rpm", "-q"] + sorted(self.packages) elif self.manager == "deb": cmd = ["dpkg", "-s"] + sorted(self.packages) elif self.manager == "homebrew": cmd = ["brew", "ls", "--versions"] + sorted(self.packages) if cmd: proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if proc.returncode == 0: # captured as binary as we will hash this later self.installed = proc.stdout else: # Need all packages to be present to consider us installed self.installed = False else: self.installed = False return bool(self.installed) def update(self) -> ChangeStatus: assert self.installed return ChangeStatus(all_changed=False) def hash(self) -> str: if self.packages_are_installed(): return hashlib.sha256(self.installed).hexdigest() else: return "0" * 40 def get_src_dir(self) -> None: return None class PreinstalledNopFetcher(SystemPackageFetcher): def __init__(self) -> None: self.installed = True class GitFetcher(Fetcher): DEFAULT_DEPTH = 1 def __init__(self, build_options, manifest, repo_url, rev, depth) -> None: # Extract the host/path portions of the URL and generate a flattened # directory name. eg: # github.com/facebook/folly.git -> github.com-facebook-folly.git url = urlparse(repo_url) directory = "%s%s" % (url.netloc, url.path) for s in ["/", "\\", ":"]: directory = directory.replace(s, "-") # Place it in a repos dir in the scratch space repos_dir = os.path.join(build_options.scratch_dir, "repos") if not os.path.exists(repos_dir): os.makedirs(repos_dir) self.repo_dir = os.path.join(repos_dir, directory) if not rev and build_options.project_hashes: hash_file = os.path.join( build_options.project_hashes, re.sub("\\.git$", "-rev.txt", url.path[1:]), ) if os.path.exists(hash_file): with open(hash_file, "r") as f: data = f.read() m = re.match("Subproject commit ([a-fA-F0-9]{40})", data) if not m: raise Exception("Failed to parse rev from %s" % hash_file) rev = m.group(1) print("Using pinned rev %s for %s" % (rev, repo_url)) self.rev = rev or "main" self.origin_repo = repo_url self.manifest = manifest self.depth = depth if depth else GitFetcher.DEFAULT_DEPTH def _update(self) -> ChangeStatus: current_hash = ( subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=self.repo_dir) .strip() .decode("utf-8") ) target_hash = ( subprocess.check_output(["git", "rev-parse", self.rev], cwd=self.repo_dir) .strip() .decode("utf-8") ) if target_hash == current_hash: # It's up to date, so there are no changes. This doesn't detect eg: # if origin/main moved and rev='main', but that's ok for our purposes; # we should be using explicit hashes or eg: a stable branch for the cases # that we care about, and it isn't unreasonable to require that the user # explicitly perform a clean build if those have moved. For the most # part we prefer that folks build using a release tarball from github # rather than use the git protocol, as it is generally a bit quicker # to fetch and easier to hash and verify tarball downloads. return ChangeStatus() print("Updating %s -> %s" % (self.repo_dir, self.rev)) run_cmd(["git", "fetch", "origin", self.rev], cwd=self.repo_dir) run_cmd(["git", "checkout", self.rev], cwd=self.repo_dir) run_cmd(["git", "submodule", "update", "--init"], cwd=self.repo_dir) return ChangeStatus(True) def update(self) -> ChangeStatus: if os.path.exists(self.repo_dir): return self._update() self._clone() return ChangeStatus(True) def _clone(self) -> None: print("Cloning %s..." % self.origin_repo) # The basename/dirname stuff allows us to dance around issues where # eg: this python process is native win32, but the git.exe is cygwin # or msys and doesn't like the absolute windows path that we'd otherwise # pass to it. Careful use of cwd helps avoid headaches with cygpath. run_cmd( [ "git", "clone", "--depth=" + str(self.depth), "--", self.origin_repo, os.path.basename(self.repo_dir), ], cwd=os.path.dirname(self.repo_dir), ) self._update() def clean(self) -> None: if os.path.exists(self.repo_dir): run_cmd(["git", "clean", "-fxd"], cwd=self.repo_dir) def hash(self): return self.rev def get_src_dir(self): return self.repo_dir def does_file_need_update(src_name, src_st, dest_name): try: target_st = os.lstat(dest_name) except OSError as exc: if exc.errno != errno.ENOENT: raise return True if src_st.st_size != target_st.st_size: return True if stat.S_IFMT(src_st.st_mode) != stat.S_IFMT(target_st.st_mode): return True if stat.S_ISLNK(src_st.st_mode): return os.readlink(src_name) != os.readlink(dest_name) if not stat.S_ISREG(src_st.st_mode): return True # They might have the same content; compare. with open(src_name, "rb") as sf, open(dest_name, "rb") as df: chunk_size = 8192 while True: src_data = sf.read(chunk_size) dest_data = df.read(chunk_size) if src_data != dest_data: return True if len(src_data) < chunk_size: # EOF break return False def copy_if_different(src_name, dest_name) -> bool: """Copy src_name -> dest_name, but only touch dest_name if src_name is different from dest_name, making this a more build system friendly way to copy.""" src_st = os.lstat(src_name) if not does_file_need_update(src_name, src_st, dest_name): return False dest_parent = os.path.dirname(dest_name) if not os.path.exists(dest_parent): os.makedirs(dest_parent) if stat.S_ISLNK(src_st.st_mode): try: os.unlink(dest_name) except OSError as exc: if exc.errno != errno.ENOENT: raise target = os.readlink(src_name) print("Symlinking %s -> %s" % (dest_name, target)) os.symlink(target, dest_name) else: print("Copying %s -> %s" % (src_name, dest_name)) shutil.copy2(src_name, dest_name) return True def list_files_under_dir_newer_than_timestamp(dir_to_scan, ts): for root, _dirs, files in os.walk(dir_to_scan): for src_file in files: full_name = os.path.join(root, src_file) st = os.lstat(full_name) if st.st_mtime > ts: yield full_name class ShipitPathMap(object): def __init__(self) -> None: self.roots = [] self.mapping = [] self.exclusion = [] def add_mapping(self, fbsource_dir, target_dir) -> None: """Add a posix path or pattern. We cannot normpath the input here because that would change the paths from posix to windows form and break the logic throughout this class.""" self.roots.append(fbsource_dir) self.mapping.append((fbsource_dir, target_dir)) def add_exclusion(self, pattern) -> None: self.exclusion.append(re.compile(pattern)) def _minimize_roots(self) -> None: """compute the de-duplicated set of roots within fbsource. We take the shortest common directory prefix to make this determination""" self.roots.sort(key=len) minimized = [] for r in self.roots: add_this_entry = True for existing in minimized: if r.startswith(existing + "/"): add_this_entry = False break if add_this_entry: minimized.append(r) self.roots = minimized def _sort_mapping(self) -> None: self.mapping.sort(reverse=True, key=lambda x: len(x[0])) def _map_name(self, norm_name, dest_root): if norm_name.endswith(".pyc") or norm_name.endswith(".swp"): # Ignore some incidental garbage while iterating return None for excl in self.exclusion: if excl.match(norm_name): return None for src_name, dest_name in self.mapping: if norm_name == src_name or norm_name.startswith(src_name + "/"): rel_name = os.path.relpath(norm_name, src_name) # We can have "." as a component of some paths, depending # on the contents of the shipit transformation section. # normpath doesn't always remove `.` as the final component # of the path, which be problematic when we later mkdir # the dirname of the path that we return. Take care to avoid # returning a path with a `.` in it. rel_name = os.path.normpath(rel_name) if dest_name == ".": return os.path.normpath(os.path.join(dest_root, rel_name)) dest_name = os.path.normpath(dest_name) return os.path.normpath(os.path.join(dest_root, dest_name, rel_name)) raise Exception("%s did not match any rules" % norm_name) def mirror(self, fbsource_root, dest_root) -> ChangeStatus: self._minimize_roots() self._sort_mapping() change_status = ChangeStatus() # Record the full set of files that should be in the tree full_file_list = set() if sys.platform == "win32": # Let's not assume st_dev has a consistent value on Windows. def st_dev(path): return 1 else: def st_dev(path): return os.lstat(path).st_dev for fbsource_subdir in self.roots: dir_to_mirror = os.path.join(fbsource_root, fbsource_subdir) root_dev = st_dev(dir_to_mirror) prefetch_dir_if_eden(dir_to_mirror) if not os.path.exists(dir_to_mirror): raise Exception( "%s doesn't exist; check your sparse profile!" % dir_to_mirror ) for root, dirs, files in os.walk(dir_to_mirror): dirs[:] = [d for d in dirs if root_dev == st_dev(os.path.join(root, d))] for src_file in files: full_name = os.path.join(root, src_file) rel_name = os.path.relpath(full_name, fbsource_root) norm_name = rel_name.replace("\\", "/") target_name = self._map_name(norm_name, dest_root) if target_name: full_file_list.add(target_name) if copy_if_different(full_name, target_name): change_status.record_change(target_name) # Compare the list of previously shipped files; if a file is # in the old list but not the new list then it has been # removed from the source and should be removed from the # destination. # Why don't we simply create this list by walking dest_root? # Some builds currently have to be in-source builds and # may legitimately need to keep some state in the source tree :-/ installed_name = os.path.join(dest_root, ".shipit_shipped") if os.path.exists(installed_name): with open(installed_name, "rb") as f: for name in f.read().decode("utf-8").splitlines(): name = name.strip() if name not in full_file_list: print("Remove %s" % name) os.unlink(name) change_status.record_change(name) with open(installed_name, "wb") as f: for name in sorted(list(full_file_list)): f.write(("%s\n" % name).encode("utf-8")) return change_status class FbsourceRepoData(NamedTuple): hash: str date: str FBSOURCE_REPO_DATA: Dict[str, FbsourceRepoData] = {} def get_fbsource_repo_data(build_options) -> FbsourceRepoData: """Returns the commit metadata for the fbsource repo. Since we may have multiple first party projects to hash, and because we don't mutate the repo, we cache this hash in a global.""" cached_data = FBSOURCE_REPO_DATA.get(build_options.fbsource_dir) if cached_data: return cached_data if "GETDEPS_HG_REPO_DATA" in os.environ: log_data = os.environ["GETDEPS_HG_REPO_DATA"] else: cmd = ["hg", "log", "-r.", "-T{node}\n{date|hgdate}"] env = Env() env.set("HGPLAIN", "1") log_data = subprocess.check_output( cmd, cwd=build_options.fbsource_dir, env=dict(env.items()) ).decode("ascii") (hash, datestr) = log_data.split("\n") # datestr is like "seconds fractionalseconds" # We want "20200324.113140" (unixtime, _fractional) = datestr.split(" ") date = datetime.fromtimestamp(int(unixtime)).strftime("%Y%m%d.%H%M%S") cached_data = FbsourceRepoData(hash=hash, date=date) FBSOURCE_REPO_DATA[build_options.fbsource_dir] = cached_data return cached_data class SimpleShipitTransformerFetcher(Fetcher): def __init__(self, build_options, manifest, ctx) -> None: self.build_options = build_options self.manifest = manifest self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", manifest.name) self.ctx = ctx def clean(self) -> None: if os.path.exists(self.repo_dir): shutil.rmtree(self.repo_dir) def update(self) -> ChangeStatus: mapping = ShipitPathMap() for src, dest in self.manifest.get_section_as_ordered_pairs( "shipit.pathmap", self.ctx ): mapping.add_mapping(src, dest) if self.manifest.shipit_fbcode_builder: mapping.add_mapping( "fbcode/opensource/fbcode_builder", "build/fbcode_builder" ) for pattern in self.manifest.get_section_as_args("shipit.strip", self.ctx): mapping.add_exclusion(pattern) return mapping.mirror(self.build_options.fbsource_dir, self.repo_dir) # pyre-fixme[15]: `hash` overrides method defined in `Fetcher` inconsistently. def hash(self) -> str: # We return a fixed non-hash string for in-fbsource builds. # We're relying on the `update` logic to correctly invalidate # the build in the case that files have changed. return "fbsource" def get_src_dir(self): return self.repo_dir class ShipitTransformerFetcher(Fetcher): SHIPIT = "/var/www/scripts/opensource/shipit/run_shipit.php" def __init__(self, build_options, project_name) -> None: self.build_options = build_options self.project_name = project_name self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", project_name) def update(self) -> ChangeStatus: if os.path.exists(self.repo_dir): return ChangeStatus() self.run_shipit() return ChangeStatus(True) def clean(self) -> None: if os.path.exists(self.repo_dir): shutil.rmtree(self.repo_dir) @classmethod def available(cls): return os.path.exists(cls.SHIPIT) def run_shipit(self) -> None: tmp_path = self.repo_dir + ".new" try: if os.path.exists(tmp_path): shutil.rmtree(tmp_path) # Run shipit run_cmd( [ "php", ShipitTransformerFetcher.SHIPIT, "--project=" + self.project_name, "--create-new-repo", "--source-repo-dir=" + self.build_options.fbsource_dir, "--source-branch=.", "--skip-source-init", "--skip-source-pull", "--skip-source-clean", "--skip-push", "--skip-reset", "--destination-use-anonymous-https", "--create-new-repo-output-path=" + tmp_path, ] ) # Remove the .git directory from the repository it generated. # There is no need to commit this. repo_git_dir = os.path.join(tmp_path, ".git") shutil.rmtree(repo_git_dir) os.rename(tmp_path, self.repo_dir) except Exception: # Clean up after a failed extraction if os.path.exists(tmp_path): shutil.rmtree(tmp_path) self.clean() raise # pyre-fixme[15]: `hash` overrides method defined in `Fetcher` inconsistently. def hash(self) -> str: # We return a fixed non-hash string for in-fbsource builds. return "fbsource" def get_src_dir(self): return self.repo_dir def download_url_to_file_with_progress(url: str, file_name) -> None: print("Download with %s -> %s ..." % (url, file_name)) class Progress(object): last_report = 0 def write_update(self, total, amount): if total == -1: total = "(Unknown)" if sys.stdout.isatty(): sys.stdout.write("\r downloading %s of %s " % (amount, total)) else: # When logging to CI logs, avoid spamming the logs and print # status every few seconds now = time.time() if now - self.last_report > 5: sys.stdout.write(".. %s of %s " % (amount, total)) self.last_report = now sys.stdout.flush() def progress_pycurl(self, total, amount, _uploadtotal, _uploadamount): self.write_update(total, amount) progress = Progress() start = time.time() try: if os.environ.get("GETDEPS_USE_WGET") is not None: subprocess.run( [ "wget", "-O", file_name, url, ] ) headers = None elif os.environ.get("GETDEPS_USE_LIBCURL") is not None: import pycurl with open(file_name, "wb") as f: c = pycurl.Curl() c.setopt(pycurl.URL, url) c.setopt(pycurl.WRITEDATA, f) # display progress c.setopt(pycurl.NOPROGRESS, False) c.setopt(pycurl.XFERINFOFUNCTION, progress.progress_pycurl) c.perform() c.close() headers = None else: req_header = {"Accept": "application/*"} res = urlopen(Request(url, None, req_header)) chunk_size = 8192 # urlretrieve uses this value headers = res.headers content_length = res.headers.get("Content-Length") total = int(content_length.strip()) if content_length else -1 amount = 0 with open(file_name, "wb") as f: chunk = res.read(chunk_size) while chunk: f.write(chunk) amount += len(chunk) progress.write_update(total, amount) chunk = res.read(chunk_size) except (OSError, IOError) as exc: # noqa: B014 raise TransientFailure( "Failed to download %s to %s: %s" % (url, file_name, str(exc)) ) end = time.time() sys.stdout.write(" [Complete in %f seconds]\n" % (end - start)) sys.stdout.flush() if headers is not None: print(f"{headers}") class ArchiveFetcher(Fetcher): def __init__(self, build_options, manifest, url, sha256) -> None: self.manifest = manifest self.url = url self.sha256 = sha256 self.build_options = build_options url = urlparse(self.url) basename = "%s-%s" % (manifest.name, os.path.basename(url.path)) self.file_name = os.path.join(build_options.scratch_dir, "downloads", basename) self.src_dir = os.path.join(build_options.scratch_dir, "extracted", basename) self.hash_file = self.src_dir + ".hash" def _verify_hash(self) -> None: h = hashlib.sha256() with open(self.file_name, "rb") as f: while True: block = f.read(8192) if not block: break h.update(block) digest = h.hexdigest() if digest != self.sha256: os.unlink(self.file_name) raise Exception( "%s: expected sha256 %s but got %s" % (self.url, self.sha256, digest) ) def _download_dir(self): """returns the download dir, creating it if it doesn't already exist""" download_dir = os.path.dirname(self.file_name) if not os.path.exists(download_dir): os.makedirs(download_dir) return download_dir def _download(self) -> None: self._download_dir() download_url_to_file_with_progress(self.url, self.file_name) self._verify_hash() def clean(self) -> None: if os.path.exists(self.src_dir): shutil.rmtree(self.src_dir) def update(self) -> ChangeStatus: try: with open(self.hash_file, "r") as f: saved_hash = f.read().strip() if saved_hash == self.sha256 and os.path.exists(self.src_dir): # Everything is up to date return ChangeStatus() print( "saved hash %s doesn't match expected hash %s, re-validating" % (saved_hash, self.sha256) ) os.unlink(self.hash_file) except EnvironmentError: pass # If we got here we know the contents of src_dir are either missing # or wrong, so blow away whatever happened to be there first. if os.path.exists(self.src_dir): shutil.rmtree(self.src_dir) # If we already have a file here, make sure it looks legit before # proceeding: any errors and we just remove it and re-download if os.path.exists(self.file_name): try: self._verify_hash() except Exception: if os.path.exists(self.file_name): os.unlink(self.file_name) if not os.path.exists(self.file_name): self._download() if tarfile.is_tarfile(self.file_name): opener = tarfile.open elif zipfile.is_zipfile(self.file_name): opener = zipfile.ZipFile else: raise Exception("don't know how to extract %s" % self.file_name) os.makedirs(self.src_dir) print("Extract %s -> %s" % (self.file_name, self.src_dir)) t = opener(self.file_name) if is_windows(): # Ensure that we don't fall over when dealing with long paths # on windows src = r"\\?\%s" % os.path.normpath(self.src_dir) else: src = self.src_dir # The `str` here is necessary to ensure that we don't pass a unicode # object down to tarfile.extractall on python2. When extracting # the boost tarball it makes some assumptions and tries to convert # a non-ascii path to ascii and throws. src = str(src) t.extractall(src) with open(self.hash_file, "w") as f: f.write(self.sha256) return ChangeStatus(True) def hash(self): return self.sha256 def get_src_dir(self): return self.src_dir def homebrew_package_prefix(package): cmd = ["brew", "--prefix", package] try: proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except FileNotFoundError: return if proc.returncode == 0: return proc.stdout.decode("utf-8").rstrip()
Python
hhvm/build/fbcode_builder/getdeps/load.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import base64 import copy import hashlib import os from . import fetcher from .envfuncs import path_search from .errors import ManifestNotFound from .manifest import ManifestParser class Loader(object): """The loader allows our tests to patch the load operation""" def _list_manifests(self, build_opts): """Returns a generator that iterates all the available manifests""" for (path, _, files) in os.walk(build_opts.manifests_dir): for name in files: # skip hidden files if name.startswith("."): continue yield os.path.join(path, name) def _load_manifest(self, path): return ManifestParser(path) def load_project(self, build_opts, project_name): if "/" in project_name or "\\" in project_name: # Assume this is a path already return ManifestParser(project_name) for manifest in self._list_manifests(build_opts): if os.path.basename(manifest) == project_name: return ManifestParser(manifest) raise ManifestNotFound(project_name) def load_all(self, build_opts): manifests_by_name = {} for manifest in self._list_manifests(build_opts): m = self._load_manifest(manifest) if m.name in manifests_by_name: raise Exception("found duplicate manifest '%s'" % m.name) manifests_by_name[m.name] = m return manifests_by_name class ResourceLoader(Loader): def __init__(self, namespace, manifests_dir) -> None: self.namespace = namespace self.manifests_dir = manifests_dir def _list_manifests(self, _build_opts): import pkg_resources dirs = [self.manifests_dir] while dirs: current = dirs.pop(0) for name in pkg_resources.resource_listdir(self.namespace, current): path = "%s/%s" % (current, name) if pkg_resources.resource_isdir(self.namespace, path): dirs.append(path) else: yield "%s/%s" % (current, name) def _find_manifest(self, project_name): for name in self._list_manifests(): if name.endswith("/%s" % project_name): return name raise ManifestNotFound(project_name) def _load_manifest(self, path: str): import pkg_resources contents = pkg_resources.resource_string(self.namespace, path).decode("utf8") return ManifestParser(file_name=path, fp=contents) def load_project(self, build_opts, project_name): project_name = self._find_manifest(project_name) return self._load_resource_manifest(project_name) LOADER = Loader() def patch_loader(namespace, manifests_dir: str = "manifests") -> None: global LOADER LOADER = ResourceLoader(namespace, manifests_dir) def load_project(build_opts, project_name): """given the name of a project or a path to a manifest file, load up the ManifestParser instance for it and return it""" return LOADER.load_project(build_opts, project_name) def load_all_manifests(build_opts): return LOADER.load_all(build_opts) class ManifestLoader(object): """ManifestLoader stores information about project manifest relationships for a given set of (build options + platform) configuration. The ManifestLoader class primarily serves as a location to cache project dependency relationships and project hash values for this build configuration. """ def __init__(self, build_opts, ctx_gen=None) -> None: self._loader = LOADER self.build_opts = build_opts if ctx_gen is None: self.ctx_gen = self.build_opts.get_context_generator() else: self.ctx_gen = ctx_gen self.manifests_by_name = {} self._loaded_all = False self._project_hashes = {} self._fetcher_overrides = {} self._build_dir_overrides = {} self._install_dir_overrides = {} self._install_prefix_overrides = {} def load_manifest(self, name): manifest = self.manifests_by_name.get(name) if manifest is None: manifest = self._loader.load_project(self.build_opts, name) self.manifests_by_name[name] = manifest return manifest def load_all_manifests(self): if not self._loaded_all: all_manifests_by_name = self._loader.load_all(self.build_opts) if self.manifests_by_name: # To help ensure that we only ever have a single manifest object for a # given project, and that it can't change once we have loaded it, # only update our mapping for projects that weren't already loaded. for name, manifest in all_manifests_by_name.items(): self.manifests_by_name.setdefault(name, manifest) else: self.manifests_by_name = all_manifests_by_name self._loaded_all = True return self.manifests_by_name def manifests_in_dependency_order(self, manifest=None): """Compute all dependencies of the specified project. Returns a list of the dependencies plus the project itself, in topologically sorted order. Each entry in the returned list only depends on projects that appear before it in the list. If the input manifest is None, the dependencies for all currently loaded projects will be computed. i.e., if you call load_all_manifests() followed by manifests_in_dependency_order() this will return a global dependency ordering of all projects.""" # The list of deps that have been fully processed seen = set() # The list of deps which have yet to be evaluated. This # can potentially contain duplicates. if manifest is None: deps = list(self.manifests_by_name.values()) else: assert manifest.name in self.manifests_by_name deps = [manifest] # The list of manifests in dependency order dep_order = [] system_packages = {} while len(deps) > 0: m = deps.pop(0) if m.name in seen: continue # Consider its deps, if any. # We sort them for increased determinism; we'll produce # a correct order even if they aren't sorted, but we prefer # to produce the same order regardless of how they are listed # in the project manifest files. ctx = self.ctx_gen.get_context(m.name) dep_list = m.get_dependencies(ctx) dep_count = 0 for dep_name in dep_list: # If we're not sure whether it is done, queue it up if dep_name not in seen: dep = self.manifests_by_name.get(dep_name) if dep is None: dep = self._loader.load_project(self.build_opts, dep_name) self.manifests_by_name[dep.name] = dep deps.append(dep) dep_count += 1 if dep_count > 0: # If we queued anything, re-queue this item, as it depends # those new item(s) and their transitive deps. deps.append(m) continue # Its deps are done, so we can emit it seen.add(m.name) # Capture system packages as we may need to set PATHs to then later if ( self.build_opts.allow_system_packages and self.build_opts.host_type.get_package_manager() ): packages = m.get_required_system_packages(ctx) for pkg_type, v in packages.items(): merged = system_packages.get(pkg_type, []) if v not in merged: merged += v system_packages[pkg_type] = merged # A manifest depends on all system packages in it dependencies as well m.resolved_system_packages = copy.copy(system_packages) dep_order.append(m) return dep_order def set_project_src_dir(self, project_name, path) -> None: self._fetcher_overrides[project_name] = fetcher.LocalDirFetcher(path) def set_project_build_dir(self, project_name, path) -> None: self._build_dir_overrides[project_name] = path def set_project_install_dir(self, project_name, path) -> None: self._install_dir_overrides[project_name] = path def set_project_install_prefix(self, project_name, path) -> None: self._install_prefix_overrides[project_name] = path def create_fetcher(self, manifest): override = self._fetcher_overrides.get(manifest.name) if override is not None: return override ctx = self.ctx_gen.get_context(manifest.name) return manifest.create_fetcher(self.build_opts, ctx) def get_project_hash(self, manifest): h = self._project_hashes.get(manifest.name) if h is None: h = self._compute_project_hash(manifest) self._project_hashes[manifest.name] = h return h def _compute_project_hash(self, manifest) -> str: """This recursive function computes a hash for a given manifest. The hash takes into account some environmental factors on the host machine and includes the hashes of its dependencies. No caching of the computation is performed, which is theoretically wasteful but the computation is fast enough that it is not required to cache across multiple invocations.""" ctx = self.ctx_gen.get_context(manifest.name) hasher = hashlib.sha256() # Some environmental and configuration things matter env = {} env["install_dir"] = self.build_opts.install_dir env["scratch_dir"] = self.build_opts.scratch_dir env["vcvars_path"] = self.build_opts.vcvars_path env["os"] = self.build_opts.host_type.ostype env["distro"] = self.build_opts.host_type.distro env["distro_vers"] = self.build_opts.host_type.distrovers env["shared_libs"] = str(self.build_opts.shared_libs) for name in [ "CXXFLAGS", "CPPFLAGS", "LDFLAGS", "CXX", "CC", "GETDEPS_CMAKE_DEFINES", ]: env[name] = os.environ.get(name) for tool in ["cc", "c++", "gcc", "g++", "clang", "clang++"]: env["tool-%s" % tool] = path_search(os.environ, tool) for name in manifest.get_section_as_args("depends.environment", ctx): env[name] = os.environ.get(name) fetcher = self.create_fetcher(manifest) env["fetcher.hash"] = fetcher.hash() for name in sorted(env.keys()): hasher.update(name.encode("utf-8")) value = env.get(name) if value is not None: try: hasher.update(value.encode("utf-8")) except AttributeError as exc: raise AttributeError("name=%r, value=%r: %s" % (name, value, exc)) manifest.update_hash(hasher, ctx) dep_list = manifest.get_dependencies(ctx) for dep in dep_list: dep_manifest = self.load_manifest(dep) dep_hash = self.get_project_hash(dep_manifest) hasher.update(dep_hash.encode("utf-8")) # Use base64 to represent the hash, rather than the simple hex digest, # so that the string is shorter. Use the URL-safe encoding so that # the hash can also be safely used as a filename component. h = base64.urlsafe_b64encode(hasher.digest()).decode("ascii") # ... and because cmd.exe is troublesome with `=` signs, nerf those. # They tend to be padding characters at the end anyway, so we can # safely discard them. h = h.replace("=", "") return h def _get_project_dir_name(self, manifest): if manifest.is_first_party_project(): return manifest.name else: project_hash = self.get_project_hash(manifest) return "%s-%s" % (manifest.name, project_hash) def get_project_install_dir(self, manifest): override = self._install_dir_overrides.get(manifest.name) if override: return override project_dir_name = self._get_project_dir_name(manifest) return os.path.join(self.build_opts.install_dir, project_dir_name) def get_project_build_dir(self, manifest): override = self._build_dir_overrides.get(manifest.name) if override: return override project_dir_name = self._get_project_dir_name(manifest) return os.path.join(self.build_opts.scratch_dir, "build", project_dir_name) def get_project_install_prefix(self, manifest): return self._install_prefix_overrides.get(manifest.name) def get_project_install_dir_respecting_install_prefix(self, manifest): inst_dir = self.get_project_install_dir(manifest) prefix = self.get_project_install_prefix(manifest) if prefix: return inst_dir + prefix return inst_dir
Python
hhvm/build/fbcode_builder/getdeps/manifest.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import configparser import io import os from typing import List from .builder import ( AutoconfBuilder, Boost, CMakeBootStrapBuilder, CMakeBuilder, Iproute2Builder, MakeBuilder, NinjaBootstrap, NopBuilder, OpenSSLBuilder, SqliteBuilder, ) from .cargo import CargoBuilder from .expr import parse_expr from .fetcher import ( ArchiveFetcher, GitFetcher, PreinstalledNopFetcher, ShipitTransformerFetcher, SimpleShipitTransformerFetcher, SystemPackageFetcher, ) from .py_wheel_builder import PythonWheelBuilder REQUIRED = "REQUIRED" OPTIONAL = "OPTIONAL" SCHEMA = { "manifest": { "optional_section": False, "fields": { "name": REQUIRED, "fbsource_path": OPTIONAL, "shipit_project": OPTIONAL, "shipit_fbcode_builder": OPTIONAL, }, }, "dependencies": {"optional_section": True, "allow_values": False}, "depends.environment": {"optional_section": True}, "git": { "optional_section": True, "fields": {"repo_url": REQUIRED, "rev": OPTIONAL, "depth": OPTIONAL}, }, "download": { "optional_section": True, "fields": {"url": REQUIRED, "sha256": REQUIRED}, }, "build": { "optional_section": True, "fields": { "builder": REQUIRED, "subdir": OPTIONAL, "make_binary": OPTIONAL, "build_in_src_dir": OPTIONAL, "job_weight_mib": OPTIONAL, "patchfile": OPTIONAL, "patchfile_opts": OPTIONAL, }, }, "msbuild": {"optional_section": True, "fields": {"project": REQUIRED}}, "cargo": { "optional_section": True, "fields": { "build_doc": OPTIONAL, "workspace_dir": OPTIONAL, "manifests_to_build": OPTIONAL, # Where to write cargo config (defaults to build_dir/.cargo/config.toml) "cargo_config_file": OPTIONAL, }, }, "github.actions": { "optional_section": True, "fields": { "run_tests": OPTIONAL, }, }, "crate.pathmap": {"optional_section": True}, "cmake.defines": {"optional_section": True}, "autoconf.args": {"optional_section": True}, "autoconf.envcmd.LDFLAGS": {"optional_section": True}, "rpms": {"optional_section": True}, "debs": {"optional_section": True}, "homebrew": {"optional_section": True}, "preinstalled.env": {"optional_section": True}, "bootstrap.args": {"optional_section": True}, "b2.args": {"optional_section": True}, "make.build_args": {"optional_section": True}, "make.install_args": {"optional_section": True}, "make.test_args": {"optional_section": True}, "header-only": {"optional_section": True, "fields": {"includedir": REQUIRED}}, "shipit.pathmap": {"optional_section": True}, "shipit.strip": {"optional_section": True}, "install.files": {"optional_section": True}, # fb-only "sandcastle": {"optional_section": True, "fields": {"run_tests": OPTIONAL}}, } # These sections are allowed to vary for different platforms # using the expression syntax to enable/disable sections ALLOWED_EXPR_SECTIONS = [ "autoconf.args", "autoconf.envcmd.LDFLAGS", "build", "cmake.defines", "dependencies", "make.build_args", "make.install_args", "bootstrap.args", "b2.args", "download", "git", "install.files", "rpms", "debs", "shipit.pathmap", "shipit.strip", "homebrew", "github.actions", ] def parse_conditional_section_name(name, section_def): expr = name[len(section_def) + 1 :] return parse_expr(expr, ManifestContext.ALLOWED_VARIABLES) def validate_allowed_fields(file_name, section, config, allowed_fields): for field in config.options(section): if not allowed_fields.get(field): raise Exception( ("manifest file %s section '%s' contains " "unknown field '%s'") % (file_name, section, field) ) for field in allowed_fields: if allowed_fields[field] == REQUIRED and not config.has_option(section, field): raise Exception( ("manifest file %s section '%s' is missing " "required field '%s'") % (file_name, section, field) ) def validate_allow_values(file_name, section, config): for field in config.options(section): value = config.get(section, field) if value is not None: raise Exception( ( "manifest file %s section '%s' has '%s = %s' but " "this section doesn't allow specifying values " "for its entries" ) % (file_name, section, field, value) ) def validate_section(file_name, section, config): section_def = SCHEMA.get(section) if not section_def: for name in ALLOWED_EXPR_SECTIONS: if section.startswith(name + "."): # Verify that the conditional parses, but discard it try: parse_conditional_section_name(section, name) except Exception as exc: raise Exception( ("manifest file %s section '%s' has invalid " "conditional: %s") % (file_name, section, str(exc)) ) section_def = SCHEMA.get(name) canonical_section_name = name break if not section_def: raise Exception( "manifest file %s contains unknown section '%s'" % (file_name, section) ) else: canonical_section_name = section allowed_fields = section_def.get("fields") if allowed_fields: validate_allowed_fields(file_name, section, config, allowed_fields) elif not section_def.get("allow_values", True): validate_allow_values(file_name, section, config) return canonical_section_name class ManifestParser(object): def __init__(self, file_name, fp=None): # allow_no_value enables listing parameters in the # autoconf.args section one per line config = configparser.RawConfigParser(allow_no_value=True) config.optionxform = str # make it case sensitive if fp is None: with open(file_name, "r") as fp: config.read_file(fp) elif isinstance(fp, type("")): # For testing purposes, parse from a string (str # or unicode) config.read_file(io.StringIO(fp)) else: config.read_file(fp) # validate against the schema seen_sections = set() for section in config.sections(): seen_sections.add(validate_section(file_name, section, config)) for section in SCHEMA.keys(): section_def = SCHEMA[section] if ( not section_def.get("optional_section", False) and section not in seen_sections ): raise Exception( "manifest file %s is missing required section %s" % (file_name, section) ) self._config = config self.name = config.get("manifest", "name") self.fbsource_path = self.get("manifest", "fbsource_path") self.shipit_project = self.get("manifest", "shipit_project") self.shipit_fbcode_builder = self.get("manifest", "shipit_fbcode_builder") self.resolved_system_packages = {} if self.name != os.path.basename(file_name): raise Exception( "filename of the manifest '%s' does not match the manifest name '%s'" % (file_name, self.name) ) def get(self, section, key, defval=None, ctx=None): ctx = ctx or {} for s in self._config.sections(): if s == section: if self._config.has_option(s, key): return self._config.get(s, key) return defval if s.startswith(section + "."): expr = parse_conditional_section_name(s, section) if not expr.eval(ctx): continue if self._config.has_option(s, key): return self._config.get(s, key) return defval def get_dependencies(self, ctx): dep_list = list(self.get_section_as_dict("dependencies", ctx).keys()) dep_list.sort() builder = self.get("build", "builder", ctx=ctx) if builder in ("cmake", "python-wheel"): dep_list.insert(0, "cmake") elif builder == "autoconf" and self.name not in ( "autoconf", "libtool", "automake", ): # they need libtool and its deps (automake, autoconf) so add # those as deps (but obviously not if we're building those # projects themselves) dep_list.insert(0, "libtool") return dep_list def get_section_as_args(self, section, ctx=None) -> List[str]: """Intended for use with the make.[build_args/install_args] and autoconf.args sections, this method collects the entries and returns an array of strings. If the manifest contains conditional sections, ctx is used to evaluate the condition and merge in the values. """ args = [] ctx = ctx or {} for s in self._config.sections(): if s != section: if not s.startswith(section + "."): continue expr = parse_conditional_section_name(s, section) if not expr.eval(ctx): continue for field in self._config.options(s): value = self._config.get(s, field) if value is None: args.append(field) else: args.append("%s=%s" % (field, value)) return args def get_section_as_ordered_pairs(self, section, ctx=None): """Used for eg: shipit.pathmap which has strong ordering requirements""" res = [] ctx = ctx or {} for s in self._config.sections(): if s != section: if not s.startswith(section + "."): continue expr = parse_conditional_section_name(s, section) if not expr.eval(ctx): continue for key in self._config.options(s): value = self._config.get(s, key) res.append((key, value)) return res def get_section_as_dict(self, section, ctx): d = {} for s in self._config.sections(): if s != section: if not s.startswith(section + "."): continue expr = parse_conditional_section_name(s, section) if not expr.eval(ctx): continue for field in self._config.options(s): value = self._config.get(s, field) d[field] = value return d def update_hash(self, hasher, ctx): """Compute a hash over the configuration for the given context. The goal is for the hash to change if the config for that context changes, but not if a change is made to the config only for a different platform than that expressed by ctx. The hash is intended to be used to help invalidate a future cache for the third party build products. The hasher argument is a hash object returned from hashlib.""" for section in sorted(SCHEMA.keys()): hasher.update(section.encode("utf-8")) # Note: at the time of writing, nothing in the implementation # relies on keys in any config section being ordered. # In theory we could have conflicting flags in different # config sections and later flags override earlier flags. # For the purposes of computing a hash we're not super # concerned about this: manifest changes should be rare # enough and we'd rather that this trigger an invalidation # than strive for a cache hit at this time. pairs = self.get_section_as_ordered_pairs(section, ctx) pairs.sort(key=lambda pair: pair[0]) for key, value in pairs: hasher.update(key.encode("utf-8")) if value is not None: hasher.update(value.encode("utf-8")) def is_first_party_project(self): """returns true if this is an FB first-party project""" return self.shipit_project is not None def get_required_system_packages(self, ctx): """Returns dictionary of packager system -> list of packages""" return { "rpm": self.get_section_as_args("rpms", ctx), "deb": self.get_section_as_args("debs", ctx), "homebrew": self.get_section_as_args("homebrew", ctx), } def _is_satisfied_by_preinstalled_environment(self, ctx): envs = self.get_section_as_args("preinstalled.env", ctx) if not envs: return False for key in envs: val = os.environ.get(key, None) print(f"Testing ENV[{key}]: {repr(val)}") if val is None: return False if len(val) == 0: return False return True def get_repo_url(self, ctx): return self.get("git", "repo_url", ctx=ctx) def create_fetcher(self, build_options, ctx): use_real_shipit = ( ShipitTransformerFetcher.available() and build_options.use_shipit ) if ( not use_real_shipit and self.fbsource_path and build_options.fbsource_dir and self.shipit_project ): return SimpleShipitTransformerFetcher(build_options, self, ctx) if ( self.fbsource_path and build_options.fbsource_dir and self.shipit_project and ShipitTransformerFetcher.available() ): # We can use the code from fbsource return ShipitTransformerFetcher(build_options, self.shipit_project) # Can we satisfy this dep with system packages? if build_options.allow_system_packages: if self._is_satisfied_by_preinstalled_environment(ctx): return PreinstalledNopFetcher() packages = self.get_required_system_packages(ctx) package_fetcher = SystemPackageFetcher(build_options, packages) if package_fetcher.packages_are_installed(): return package_fetcher repo_url = self.get_repo_url(ctx) if repo_url: rev = self.get("git", "rev") depth = self.get("git", "depth") return GitFetcher(build_options, self, repo_url, rev, depth) url = self.get("download", "url", ctx=ctx) if url: # We need to defer this import until now to avoid triggering # a cycle when the facebook/__init__.py is loaded. try: from .facebook.lfs import LFSCachingArchiveFetcher return LFSCachingArchiveFetcher( build_options, self, url, self.get("download", "sha256", ctx=ctx) ) except ImportError: # This FB internal module isn't shippped to github, # so just use its base class return ArchiveFetcher( build_options, self, url, self.get("download", "sha256", ctx=ctx) ) raise KeyError( "project %s has no fetcher configuration matching %s" % (self.name, ctx) ) def get_builder_name(self, ctx): builder = self.get("build", "builder", ctx=ctx) if not builder: raise Exception("project %s has no builder for %r" % (self.name, ctx)) return builder def create_builder( # noqa:C901 self, build_options, src_dir, build_dir, inst_dir, ctx, loader, final_install_prefix=None, extra_cmake_defines=None, cmake_target=None, extra_b2_args=None, ): builder = self.get_builder_name(ctx) build_in_src_dir = self.get("build", "build_in_src_dir", "false", ctx=ctx) if build_in_src_dir == "true": # Some scripts don't work when they are configured and build in # a different directory than source (or when the build directory # is not a subdir of source). build_dir = src_dir subdir = self.get("build", "subdir", None, ctx=ctx) if subdir is not None: build_dir = os.path.join(build_dir, subdir) print("build_dir is %s" % build_dir) # just to quiet lint if builder == "make" or builder == "cmakebootstrap": build_args = self.get_section_as_args("make.build_args", ctx) install_args = self.get_section_as_args("make.install_args", ctx) test_args = self.get_section_as_args("make.test_args", ctx) if builder == "cmakebootstrap": return CMakeBootStrapBuilder( build_options, ctx, self, src_dir, None, inst_dir, build_args, install_args, test_args, ) else: return MakeBuilder( build_options, ctx, self, src_dir, None, inst_dir, build_args, install_args, test_args, ) if builder == "autoconf": args = self.get_section_as_args("autoconf.args", ctx) conf_env_args = {} ldflags_cmd = self.get_section_as_args("autoconf.envcmd.LDFLAGS", ctx) if ldflags_cmd: conf_env_args["LDFLAGS"] = ldflags_cmd return AutoconfBuilder( build_options, ctx, self, src_dir, build_dir, inst_dir, args, conf_env_args, ) if builder == "boost": args = self.get_section_as_args("b2.args", ctx) if extra_b2_args is not None: args += extra_b2_args return Boost(build_options, ctx, self, src_dir, build_dir, inst_dir, args) if builder == "cmake": defines = self.get_section_as_dict("cmake.defines", ctx) return CMakeBuilder( build_options, ctx, self, src_dir, build_dir, inst_dir, defines, loader, final_install_prefix, extra_cmake_defines, cmake_target, ) if builder == "python-wheel": return PythonWheelBuilder( build_options, ctx, self, src_dir, build_dir, inst_dir ) if builder == "sqlite": return SqliteBuilder(build_options, ctx, self, src_dir, build_dir, inst_dir) if builder == "ninja_bootstrap": return NinjaBootstrap( build_options, ctx, self, build_dir, src_dir, inst_dir ) if builder == "nop": return NopBuilder(build_options, ctx, self, src_dir, inst_dir) if builder == "openssl": return OpenSSLBuilder( build_options, ctx, self, build_dir, src_dir, inst_dir ) if builder == "iproute2": return Iproute2Builder( build_options, ctx, self, src_dir, build_dir, inst_dir ) if builder == "cargo": return self.create_cargo_builder( build_options, ctx, src_dir, build_dir, inst_dir, loader ) raise KeyError("project %s has no known builder" % (self.name)) def create_prepare_builders( self, build_options, ctx, src_dir, build_dir, inst_dir, loader ): """Create builders that have a prepare step run, e.g. to write config files""" prepare_builders = [] builder = self.get_builder_name(ctx) cargo = self.get_section_as_dict("cargo", ctx) if not builder == "cargo" and cargo: cargo_builder = self.create_cargo_builder( build_options, ctx, src_dir, build_dir, inst_dir, loader ) prepare_builders.append(cargo_builder) return prepare_builders def create_cargo_builder( self, build_options, ctx, src_dir, build_dir, inst_dir, loader ): build_doc = self.get("cargo", "build_doc", False, ctx) workspace_dir = self.get("cargo", "workspace_dir", None, ctx) manifests_to_build = self.get("cargo", "manifests_to_build", None, ctx) cargo_config_file = self.get("cargo", "cargo_config_file", None, ctx) return CargoBuilder( build_options, ctx, self, src_dir, build_dir, inst_dir, build_doc, workspace_dir, manifests_to_build, loader, cargo_config_file, ) class ManifestContext(object): """ProjectContext contains a dictionary of values to use when evaluating boolean expressions in a project manifest. This object should be passed as the `ctx` parameter in ManifestParser.get() calls. """ ALLOWED_VARIABLES = { "os", "distro", "distro_vers", "fb", "fbsource", "test", "shared_libs", } def __init__(self, ctx_dict): assert set(ctx_dict.keys()) == self.ALLOWED_VARIABLES self.ctx_dict = ctx_dict def get(self, key): return self.ctx_dict[key] def set(self, key, value): assert key in self.ALLOWED_VARIABLES self.ctx_dict[key] = value def copy(self): return ManifestContext(dict(self.ctx_dict)) def __str__(self): s = ", ".join( "%s=%s" % (key, value) for key, value in sorted(self.ctx_dict.items()) ) return "{" + s + "}" class ContextGenerator(object): """ContextGenerator allows creating ManifestContext objects on a per-project basis. This allows us to evaluate different projects with slightly different contexts. For instance, this can be used to only enable tests for some projects.""" def __init__(self, default_ctx): self.default_ctx = ManifestContext(default_ctx) self.ctx_by_project = {} def set_value_for_project(self, project_name, key, value): project_ctx = self.ctx_by_project.get(project_name) if project_ctx is None: project_ctx = self.default_ctx.copy() self.ctx_by_project[project_name] = project_ctx project_ctx.set(key, value) def set_value_for_all_projects(self, key, value): self.default_ctx.set(key, value) for ctx in self.ctx_by_project.values(): ctx.set(key, value) def get_context(self, project_name): return self.ctx_by_project.get(project_name, self.default_ctx)
Python
hhvm/build/fbcode_builder/getdeps/platform.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import platform import re import shlex import sys from typing import Optional, Tuple def is_windows() -> bool: """Returns true if the system we are currently running on is a Windows system""" return sys.platform.startswith("win") def get_linux_type() -> Tuple[Optional[str], Optional[str], Optional[str]]: try: with open("/etc/os-release") as f: data = f.read() except EnvironmentError: return (None, None, None) os_vars = {} for line in data.splitlines(): parts = line.split("=", 1) if len(parts) != 2: continue key = parts[0].strip() value_parts = shlex.split(parts[1].strip()) if not value_parts: value = "" else: value = value_parts[0] os_vars[key] = value name = os_vars.get("NAME") if name: name = name.lower() name = re.sub("linux", "", name) name = name.strip().replace(" ", "_") version_id = os_vars.get("VERSION_ID") if version_id: version_id = version_id.lower() return "linux", name, version_id # Ideally we'd use a common library like `psutil` to read system information, # but getdeps can't take third-party dependencies. def _get_available_ram_linux() -> int: # TODO: Ideally, this function would inspect the current cgroup for any # limits, rather than solely relying on system RAM. meminfo_path = "/proc/meminfo" try: with open(meminfo_path) as f: for line in f: try: key, value = line.split(":", 1) except ValueError: continue suffix = " kB\n" if key == "MemAvailable" and value.endswith(suffix): value = value[: -len(suffix)] try: return int(value) // 1024 except ValueError: continue except OSError: print("error opening {}".format(meminfo_path), end="", file=sys.stderr) else: print( "{} had no valid MemAvailable".format(meminfo_path), end="", file=sys.stderr ) guess = 8 print(", guessing {} GiB".format(guess), file=sys.stderr) return guess * 1024 def _get_available_ram_macos() -> int: import ctypes.util libc = ctypes.CDLL(ctypes.util.find_library("libc"), use_errno=True) sysctlbyname = libc.sysctlbyname sysctlbyname.restype = ctypes.c_int sysctlbyname.argtypes = [ ctypes.c_char_p, ctypes.c_void_p, ctypes.POINTER(ctypes.c_size_t), ctypes.c_void_p, ctypes.c_size_t, ] # TODO: There may be some way to approximate an availability # metric, but just use total RAM for now. memsize = ctypes.c_int64() memsizesize = ctypes.c_size_t(8) res = sysctlbyname( b"hw.memsize", ctypes.byref(memsize), ctypes.byref(memsizesize), None, 0 ) if res != 0: raise NotImplementedError( f"failed to retrieve hw.memsize sysctl: {ctypes.get_errno()}" ) return memsize.value // (1024 * 1024) def _get_available_ram_windows() -> int: import ctypes DWORD = ctypes.c_uint32 QWORD = ctypes.c_uint64 class MEMORYSTATUSEX(ctypes.Structure): _fields_ = [ ("dwLength", DWORD), ("dwMemoryLoad", DWORD), ("ullTotalPhys", QWORD), ("ullAvailPhys", QWORD), ("ullTotalPageFile", QWORD), ("ullAvailPageFile", QWORD), ("ullTotalVirtual", QWORD), ("ullAvailVirtual", QWORD), ("ullExtendedVirtual", QWORD), ] ms = MEMORYSTATUSEX() ms.dwLength = ctypes.sizeof(ms) # pyre-ignore[16] res = ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(ms)) if res == 0: raise NotImplementedError("error calling GlobalMemoryStatusEx") # This is fuzzy, but AvailPhys is too conservative, and AvailTotal is too # aggressive, so average the two. It's okay for builds to use some swap. return (ms.ullAvailPhys + ms.ullTotalPhys) // (2 * 1024 * 1024) def _get_available_ram_freebsd() -> int: import ctypes.util libc = ctypes.CDLL(ctypes.util.find_library("libc"), use_errno=True) sysctlbyname = libc.sysctlbyname sysctlbyname.restype = ctypes.c_int sysctlbyname.argtypes = [ ctypes.c_char_p, ctypes.c_void_p, ctypes.POINTER(ctypes.c_size_t), ctypes.c_void_p, ctypes.c_size_t, ] # hw.usermem is pretty close to what we want. memsize = ctypes.c_int64() memsizesize = ctypes.c_size_t(8) res = sysctlbyname( b"hw.usermem", ctypes.byref(memsize), ctypes.byref(memsizesize), None, 0 ) if res != 0: raise NotImplementedError( f"failed to retrieve hw.memsize sysctl: {ctypes.get_errno()}" ) return memsize.value // (1024 * 1024) def get_available_ram() -> int: """ Returns a platform-appropriate available RAM metric in MiB. """ if sys.platform == "linux": return _get_available_ram_linux() elif sys.platform == "darwin": return _get_available_ram_macos() elif sys.platform == "win32": return _get_available_ram_windows() elif sys.platform.startswith("freebsd"): return _get_available_ram_freebsd() else: raise NotImplementedError( f"platform {sys.platform} does not have an implementation of get_available_ram" ) def is_current_host_arm() -> bool: if sys.platform.startswith("darwin"): # platform.machine() can be fooled by rosetta for python < 3.9.2 return "ARM64" in os.uname().version else: machine = platform.machine().lower() return "arm" in machine or "aarch" in machine class HostType(object): def __init__(self, ostype=None, distro=None, distrovers=None) -> None: # Maybe we should allow callers to indicate whether this machine uses # an ARM architecture, but we need to change HostType serialization # and deserialization in that case and hunt down anywhere that is # persisting that serialized data. isarm = False if ostype is None: distro = None distrovers = None if sys.platform.startswith("linux"): ostype, distro, distrovers = get_linux_type() elif sys.platform.startswith("darwin"): ostype = "darwin" elif is_windows(): ostype = "windows" # pyre-fixme[16]: Module `sys` has no attribute `getwindowsversion`. distrovers = str(sys.getwindowsversion().major) elif sys.platform.startswith("freebsd"): ostype = "freebsd" else: ostype = sys.platform isarm = is_current_host_arm() # The operating system type self.ostype = ostype # The distribution, if applicable self.distro = distro # The OS/distro version if known self.distrovers = distrovers # Does the CPU use an ARM architecture? ARM includes Apple Silicon # Macs as well as other ARM systems that might be running Linux or # something. self.isarm = isarm def is_windows(self): return self.ostype == "windows" # is_arm is kinda half implemented at the moment. This method is only # intended to be used when HostType represents information about the # current machine we are running on. # When HostType is being used to enumerate platform types (represent # information about machine types that we may or may not be running on) # the result could be nonsense (under the current implementation its always # false.) def is_arm(self): return self.isarm def is_darwin(self): return self.ostype == "darwin" def is_linux(self): return self.ostype == "linux" def is_freebsd(self): return self.ostype == "freebsd" def as_tuple_string(self) -> str: return "%s-%s-%s" % ( self.ostype, self.distro or "none", self.distrovers or "none", ) def get_package_manager(self): if not self.is_linux() and not self.is_darwin(): return None if self.is_darwin(): return "homebrew" if self.distro in ("fedora", "centos", "centos_stream"): return "rpm" if self.distro.startswith(("debian", "ubuntu")): return "deb" return None @staticmethod def from_tuple_string(s) -> "HostType": ostype, distro, distrovers = s.split("-") return HostType(ostype=ostype, distro=distro, distrovers=distrovers) def __eq__(self, b): return ( self.ostype == b.ostype and self.distro == b.distro and self.distrovers == b.distrovers )
Python
hhvm/build/fbcode_builder/getdeps/py_wheel_builder.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import codecs import collections import email import os import re import stat from typing import Dict, List from .builder import BuilderBase, CMakeBuilder WheelNameInfo = collections.namedtuple( "WheelNameInfo", ("distribution", "version", "build", "python", "abi", "platform") ) CMAKE_HEADER = """ cmake_minimum_required(VERSION 3.8) project("{manifest_name}" LANGUAGES C) set(CMAKE_MODULE_PATH "{cmake_dir}" ${{CMAKE_MODULE_PATH}} ) include(FBPythonBinary) set(CMAKE_INSTALL_DIR lib/cmake/{manifest_name} CACHE STRING "The subdirectory where CMake package config files should be installed") """ CMAKE_FOOTER = """ install_fb_python_library({lib_name} EXPORT all) install( EXPORT all FILE {manifest_name}-targets.cmake NAMESPACE {namespace}:: DESTINATION ${{CMAKE_INSTALL_DIR}} ) include(CMakePackageConfigHelpers) configure_package_config_file( ${{CMAKE_BINARY_DIR}}/{manifest_name}-config.cmake.in {manifest_name}-config.cmake INSTALL_DESTINATION ${{CMAKE_INSTALL_DIR}} PATH_VARS CMAKE_INSTALL_DIR ) install( FILES ${{CMAKE_CURRENT_BINARY_DIR}}/{manifest_name}-config.cmake DESTINATION ${{CMAKE_INSTALL_DIR}} ) """ CMAKE_CONFIG_FILE = """ @PACKAGE_INIT@ include(CMakeFindDependencyMacro) set_and_check({upper_name}_CMAKE_DIR "@PACKAGE_CMAKE_INSTALL_DIR@") if (NOT TARGET {namespace}::{lib_name}) include("${{{upper_name}_CMAKE_DIR}}/{manifest_name}-targets.cmake") endif() set({upper_name}_LIBRARIES {namespace}::{lib_name}) {find_dependency_lines} if (NOT {manifest_name}_FIND_QUIETLY) message(STATUS "Found {manifest_name}: ${{PACKAGE_PREFIX_DIR}}") endif() """ # Note: for now we are manually manipulating the wheel packet contents. # The wheel format is documented here: # https://www.python.org/dev/peps/pep-0491/#file-format # # We currently aren't particularly smart about correctly handling the full wheel # functionality, but this is good enough to handle simple pure-python wheels, # which is the main thing we care about right now. # # We could potentially use pip to install the wheel to a temporary location and # then copy its "installed" files, but this has its own set of complications. # This would require pip to already be installed and available, and we would # need to correctly find the right version of pip or pip3 to use. # If we did ever want to go down that path, we would probably want to use # something like the following pip3 command: # pip3 --isolated install --no-cache-dir --no-index --system \ # --target <install_dir> <wheel_file> # pyre-fixme[13] fields initialized in _build class PythonWheelBuilder(BuilderBase): """This Builder can take Python wheel archives and install them as python libraries that can be used by add_fb_python_library()/add_fb_python_executable() CMake rules. """ dist_info_dir: str template_format_dict: Dict[str, str] def _build(self, install_dirs: List[str], reconfigure: bool) -> None: # When we are invoked, self.src_dir contains the unpacked wheel contents. # # Since a wheel file is just a zip file, the Fetcher code recognizes it as such # and goes ahead and unpacks it. (We could disable that Fetcher behavior in the # future if we ever wanted to, say if we wanted to call pip here.) wheel_name = self._parse_wheel_name() name_version_prefix = "-".join((wheel_name.distribution, wheel_name.version)) dist_info_name = name_version_prefix + ".dist-info" data_dir_name = name_version_prefix + ".data" self.dist_info_dir = os.path.join(self.src_dir, dist_info_name) wheel_metadata = self._read_wheel_metadata(wheel_name) # Check that we can understand the wheel version. # We don't really care about wheel_metadata["Root-Is-Purelib"] since # we are generating our own standalone python archives rather than installing # into site-packages. version = wheel_metadata["Wheel-Version"] if not version.startswith("1."): raise Exception("unsupported wheel version %s" % (version,)) # Add a find_dependency() call for each of our dependencies. # The dependencies are also listed in the wheel METADATA file, but it is simpler # to pull this directly from the getdeps manifest. dep_list = sorted( self.manifest.get_section_as_dict("dependencies", self.ctx).keys() ) find_dependency_lines = ["find_dependency({})".format(dep) for dep in dep_list] getdeps_cmake_dir = os.path.join( os.path.dirname(os.path.dirname(__file__)), "CMake" ) self.template_format_dict = { # Note that CMake files always uses forward slash separators in path names, # even on Windows. Therefore replace path separators here. "cmake_dir": _to_cmake_path(getdeps_cmake_dir), "lib_name": self.manifest.name, "manifest_name": self.manifest.name, "namespace": self.manifest.name, "upper_name": self.manifest.name.upper().replace("-", "_"), "find_dependency_lines": "\n".join(find_dependency_lines), } # Find sources from the root directory path_mapping = {} for entry in os.listdir(self.src_dir): if entry in (dist_info_name, data_dir_name): continue self._add_sources(path_mapping, os.path.join(self.src_dir, entry), entry) # Files under the .data directory also need to be installed in the correct # locations if os.path.exists(data_dir_name): # TODO: process the subdirectories of data_dir_name # This isn't implemented yet since for now we have only needed dependencies # on some simple pure Python wheels, so I haven't tested against wheels with # additional files in the .data directory. raise Exception( "handling of the subdirectories inside %s is not implemented yet" % data_dir_name ) # Emit CMake files self._write_cmakelists(path_mapping, dep_list) self._write_cmake_config_template() # Run the build self._run_cmake_build(install_dirs, reconfigure) def _run_cmake_build(self, install_dirs: List[str], reconfigure: bool) -> None: cmake_builder = CMakeBuilder( build_opts=self.build_opts, ctx=self.ctx, manifest=self.manifest, # Note that we intentionally supply src_dir=build_dir, # since we wrote out our generated CMakeLists.txt in the build directory src_dir=self.build_dir, build_dir=self.build_dir, inst_dir=self.inst_dir, loader=None, defines={}, final_install_prefix=None, ) cmake_builder.build(install_dirs=install_dirs, reconfigure=reconfigure) def _write_cmakelists(self, path_mapping: Dict[str, str], dependencies) -> None: cmake_path = os.path.join(self.build_dir, "CMakeLists.txt") with open(cmake_path, "w") as f: f.write(CMAKE_HEADER.format(**self.template_format_dict)) for dep in dependencies: f.write("find_package({0} REQUIRED)\n".format(dep)) f.write( "add_fb_python_library({lib_name}\n".format(**self.template_format_dict) ) f.write(' BASE_DIR "%s"\n' % _to_cmake_path(self.src_dir)) f.write(" SOURCES\n") for src_path, install_path in path_mapping.items(): f.write( ' "%s=%s"\n' % (_to_cmake_path(src_path), _to_cmake_path(install_path)) ) if dependencies: f.write(" DEPENDS\n") for dep in dependencies: f.write(' "{0}::{0}"\n'.format(dep)) f.write(")\n") f.write(CMAKE_FOOTER.format(**self.template_format_dict)) def _write_cmake_config_template(self) -> None: config_path_name = self.manifest.name + "-config.cmake.in" output_path = os.path.join(self.build_dir, config_path_name) with open(output_path, "w") as f: f.write(CMAKE_CONFIG_FILE.format(**self.template_format_dict)) def _add_sources( self, path_mapping: Dict[str, str], src_path: str, install_path: str ) -> None: s = os.lstat(src_path) if not stat.S_ISDIR(s.st_mode): path_mapping[src_path] = install_path return for entry in os.listdir(src_path): self._add_sources( path_mapping, os.path.join(src_path, entry), os.path.join(install_path, entry), ) def _parse_wheel_name(self) -> WheelNameInfo: # The ArchiveFetcher prepends "manifest_name-", so strip that off first. wheel_name = os.path.basename(self.src_dir) prefix = self.manifest.name + "-" if not wheel_name.startswith(prefix): raise Exception( "expected wheel source directory to be of the form %s-NAME.whl" % (prefix,) ) wheel_name = wheel_name[len(prefix) :] wheel_name_re = re.compile( r"(?P<distribution>[^-]+)" r"-(?P<version>\d+[^-]*)" r"(-(?P<build>\d+[^-]*))?" r"-(?P<python>\w+\d+(\.\w+\d+)*)" r"-(?P<abi>\w+)" r"-(?P<platform>\w+(\.\w+)*)" r"\.whl" ) match = wheel_name_re.match(wheel_name) if not match: raise Exception( "bad python wheel name %s: expected to have the form " "DISTRIBUTION-VERSION-[-BUILD]-PYTAG-ABI-PLATFORM" ) return WheelNameInfo( distribution=match.group("distribution"), version=match.group("version"), build=match.group("build"), python=match.group("python"), abi=match.group("abi"), platform=match.group("platform"), ) def _read_wheel_metadata(self, wheel_name): metadata_path = os.path.join(self.dist_info_dir, "WHEEL") with codecs.open(metadata_path, "r", encoding="utf-8") as f: return email.message_from_file(f) def _to_cmake_path(path): # CMake always uses forward slashes to separate paths in CMakeLists.txt files, # even on Windows. It treats backslashes as character escapes, so using # backslashes in the path will cause problems. Therefore replace all path # separators with forward slashes to make sure the paths are correct on Windows. # e.g. "C:\foo\bar.txt" becomes "C:/foo/bar.txt" return path.replace(os.path.sep, "/")
Python
hhvm/build/fbcode_builder/getdeps/runcmd.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import select import subprocess import sys from .envfuncs import Env from .platform import is_windows try: from shlex import quote as shellquote except ImportError: from pipes import quote as shellquote class RunCommandError(Exception): pass def _print_env_diff(env, log_fn) -> None: current_keys = set(os.environ.keys()) wanted_env = set(env.keys()) unset_keys = current_keys.difference(wanted_env) for k in sorted(unset_keys): log_fn("+ unset %s\n" % k) added_keys = wanted_env.difference(current_keys) for k in wanted_env.intersection(current_keys): if os.environ[k] != env[k]: added_keys.add(k) for k in sorted(added_keys): if ("PATH" in k) and (os.pathsep in env[k]): log_fn("+ %s=\\\n" % k) for elem in env[k].split(os.pathsep): log_fn("+ %s%s\\\n" % (shellquote(elem), os.pathsep)) else: log_fn("+ %s=%s \\\n" % (k, shellquote(env[k]))) def run_cmd(cmd, env=None, cwd=None, allow_fail: bool = False, log_file=None) -> int: def log_to_stdout(msg): sys.stdout.buffer.write(msg.encode(errors="surrogateescape")) if log_file is not None: with open(log_file, "a", encoding="utf-8", errors="surrogateescape") as log: def log_function(msg): log.write(msg) log_to_stdout(msg) return _run_cmd( cmd, env=env, cwd=cwd, allow_fail=allow_fail, log_fn=log_function ) else: return _run_cmd( cmd, env=env, cwd=cwd, allow_fail=allow_fail, log_fn=log_to_stdout ) def _run_cmd(cmd, env, cwd, allow_fail, log_fn) -> int: log_fn("---\n") try: cmd_str = " \\\n+ ".join(shellquote(arg) for arg in cmd) except TypeError: # eg: one of the elements is None raise RunCommandError("problem quoting cmd: %r" % cmd) if env: assert isinstance(env, Env) _print_env_diff(env, log_fn) # Convert from our Env type to a regular dict. # This is needed because python3 looks up b'PATH' and 'PATH' # and emits an error if both are present. In our Env type # we'll return the same value for both requests, but we don't # have duplicate potentially conflicting values which is the # spirit of the check. env = dict(env.items()) if cwd: log_fn("+ cd %s && \\\n" % shellquote(cwd)) # Our long path escape sequence may confuse cmd.exe, so if the cwd # is short enough, strip that off. if is_windows() and (len(cwd) < 250) and cwd.startswith("\\\\?\\"): cwd = cwd[4:] log_fn("+ %s\n" % cmd_str) isinteractive = os.isatty(sys.stdout.fileno()) if isinteractive: stdout = None sys.stdout.buffer.flush() else: stdout = subprocess.PIPE try: p = subprocess.Popen( cmd, env=env, cwd=cwd, stdout=stdout, stderr=subprocess.STDOUT ) except (TypeError, ValueError, OSError) as exc: log_fn("error running `%s`: %s" % (cmd_str, exc)) raise RunCommandError( "%s while running `%s` with env=%r\nos.environ=%r" % (str(exc), cmd_str, env, os.environ) ) if not isinteractive: _pipe_output(p, log_fn) p.wait() if p.returncode != 0 and not allow_fail: raise subprocess.CalledProcessError(p.returncode, cmd) return p.returncode if hasattr(select, "poll"): def _pipe_output(p, log_fn): """Read output from p.stdout and call log_fn() with each chunk of data as it becomes available.""" # Perform non-blocking reads import fcntl fcntl.fcntl(p.stdout.fileno(), fcntl.F_SETFL, os.O_NONBLOCK) poll = select.poll() poll.register(p.stdout.fileno(), select.POLLIN) buffer_size = 4096 while True: poll.poll() data = p.stdout.read(buffer_size) if not data: break # log_fn() accepts arguments as str (binary in Python 2, unicode in # Python 3). In Python 3 the subprocess output will be plain bytes, # and need to be decoded. if not isinstance(data, str): data = data.decode("utf-8", errors="surrogateescape") log_fn(data) else: def _pipe_output(p, log_fn): """Read output from p.stdout and call log_fn() with each chunk of data as it becomes available.""" # Perform blocking reads. Use a smaller buffer size to avoid blocking # for very long when data is available. buffer_size = 64 while True: data = p.stdout.read(buffer_size) if not data: break # log_fn() accepts arguments as str (binary in Python 2, unicode in # Python 3). In Python 3 the subprocess output will be plain bytes, # and need to be decoded. if not isinstance(data, str): data = data.decode("utf-8", errors="surrogateescape") log_fn(data)
Python
hhvm/build/fbcode_builder/getdeps/subcmd.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. class SubCmd(object): NAME = None HELP = None def run(self, args) -> int: """perform the command""" return 0 def setup_parser(self, parser) -> None: # Subclasses should override setup_parser() if they have any # command line options or arguments. pass CmdTable = [] def add_subcommands(parser, common_args, cmd_table=CmdTable) -> None: """Register parsers for the defined commands with the provided parser""" for cls in cmd_table: command = cls() command_parser = parser.add_parser( command.NAME, help=command.HELP, parents=[common_args] ) command.setup_parser(command_parser) command_parser.set_defaults(func=command.run) def cmd(name, help=None, cmd_table=CmdTable): """ @cmd() is a decorator that can be used to help define Subcmd instances Example usage: @subcmd('list', 'Show the result list') class ListCmd(Subcmd): def run(self, args): # Perform the command actions here... pass """ def wrapper(cls): class SubclassedCmd(cls): NAME = name HELP = help cmd_table.append(SubclassedCmd) return SubclassedCmd return wrapper
Python
hhvm/build/fbcode_builder/getdeps/test/expr_test.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import unittest from ..expr import parse_expr class ExprTest(unittest.TestCase): def test_equal(self) -> None: valid_variables = {"foo", "some_var", "another_var"} e = parse_expr("foo=bar", valid_variables) self.assertTrue(e.eval({"foo": "bar"})) self.assertFalse(e.eval({"foo": "not-bar"})) self.assertFalse(e.eval({"not-foo": "bar"})) def test_not_equal(self) -> None: valid_variables = {"foo"} e = parse_expr("not(foo=bar)", valid_variables) self.assertFalse(e.eval({"foo": "bar"})) self.assertTrue(e.eval({"foo": "not-bar"})) def test_bad_not(self) -> None: valid_variables = {"foo"} with self.assertRaises(Exception): parse_expr("foo=not(bar)", valid_variables) def test_bad_variable(self) -> None: valid_variables = {"bar"} with self.assertRaises(Exception): parse_expr("foo=bar", valid_variables) def test_all(self) -> None: valid_variables = {"foo", "baz"} e = parse_expr("all(foo = bar, baz = qux)", valid_variables) self.assertTrue(e.eval({"foo": "bar", "baz": "qux"})) self.assertFalse(e.eval({"foo": "bar", "baz": "nope"})) self.assertFalse(e.eval({"foo": "nope", "baz": "nope"})) def test_any(self) -> None: valid_variables = {"foo", "baz"} e = parse_expr("any(foo = bar, baz = qux)", valid_variables) self.assertTrue(e.eval({"foo": "bar", "baz": "qux"})) self.assertTrue(e.eval({"foo": "bar", "baz": "nope"})) self.assertFalse(e.eval({"foo": "nope", "baz": "nope"}))
Python
hhvm/build/fbcode_builder/getdeps/test/manifest_test.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import sys import unittest from ..load import load_all_manifests, patch_loader from ..manifest import ManifestParser class ManifestTest(unittest.TestCase): def test_missing_section(self) -> None: with self.assertRaisesRegex( Exception, "manifest file test is missing required section manifest" ): ManifestParser("test", "") def test_missing_name(self) -> None: with self.assertRaisesRegex( Exception, "manifest file test section 'manifest' is missing required field 'name'", ): ManifestParser( "test", """ [manifest] """, ) def test_minimal(self) -> None: p = ManifestParser( "test", """ [manifest] name = test """, ) self.assertEqual(p.name, "test") self.assertEqual(p.fbsource_path, None) def test_minimal_with_fbsource_path(self) -> None: p = ManifestParser( "test", """ [manifest] name = test fbsource_path = fbcode/wat """, ) self.assertEqual(p.name, "test") self.assertEqual(p.fbsource_path, "fbcode/wat") def test_unknown_field(self) -> None: with self.assertRaisesRegex( Exception, ( "manifest file test section 'manifest' contains " "unknown field 'invalid.field'" ), ): ManifestParser( "test", """ [manifest] name = test invalid.field = woot """, ) def test_invalid_section_name(self) -> None: with self.assertRaisesRegex( Exception, "manifest file test contains unknown section 'invalid.section'" ): ManifestParser( "test", """ [manifest] name = test [invalid.section] foo = bar """, ) def test_value_in_dependencies_section(self) -> None: with self.assertRaisesRegex( Exception, ( "manifest file test section 'dependencies' has " "'foo = bar' but this section doesn't allow " "specifying values for its entries" ), ): ManifestParser( "test", """ [manifest] name = test [dependencies] foo = bar """, ) def test_invalid_conditional_section_name(self) -> None: with self.assertRaisesRegex( Exception, ( "manifest file test section 'dependencies.=' " "has invalid conditional: expected " "identifier found =" ), ): ManifestParser( "test", """ [manifest] name = test [dependencies.=] """, ) def test_section_as_args(self) -> None: p = ManifestParser( "test", """ [manifest] name = test [dependencies] a b c [dependencies.test=on] foo """, ) self.assertEqual(p.get_section_as_args("dependencies"), ["a", "b", "c"]) self.assertEqual( p.get_section_as_args("dependencies", {"test": "off"}), ["a", "b", "c"] ) self.assertEqual( p.get_section_as_args("dependencies", {"test": "on"}), ["a", "b", "c", "foo"], ) p2 = ManifestParser( "test", """ [manifest] name = test [autoconf.args] --prefix=/foo --with-woot """, ) self.assertEqual( p2.get_section_as_args("autoconf.args"), ["--prefix=/foo", "--with-woot"] ) def test_section_as_dict(self) -> None: p = ManifestParser( "test", """ [manifest] name = test [cmake.defines] foo = bar [cmake.defines.test=on] foo = baz """, ) self.assertEqual(p.get_section_as_dict("cmake.defines", {}), {"foo": "bar"}) self.assertEqual( p.get_section_as_dict("cmake.defines", {"test": "on"}), {"foo": "baz"} ) p2 = ManifestParser( "test", """ [manifest] name = test [cmake.defines.test=on] foo = baz [cmake.defines] foo = bar """, ) self.assertEqual( p2.get_section_as_dict("cmake.defines", {"test": "on"}), {"foo": "bar"}, msg="sections cascade in the order they appear in the manifest", ) def test_parse_common_manifests(self) -> None: patch_loader(__name__) manifests = load_all_manifests(None) self.assertNotEqual(0, len(manifests), msg="parsed some number of manifests") def test_mismatch_name(self) -> None: with self.assertRaisesRegex( Exception, "filename of the manifest 'foo' does not match the manifest name 'bar'", ): ManifestParser( "foo", """ [manifest] name = bar """, ) def test_duplicate_manifest(self) -> None: patch_loader(__name__, "fixtures/duplicate") with self.assertRaisesRegex(Exception, "found duplicate manifest 'foo'"): load_all_manifests(None) if sys.version_info < (3, 2): def assertRaisesRegex(self, *args, **kwargs): return self.assertRaisesRegexp(*args, **kwargs)
Python
hhvm/build/fbcode_builder/getdeps/test/platform_test.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import unittest from ..platform import HostType class PlatformTest(unittest.TestCase): def test_create(self) -> None: p = HostType() self.assertNotEqual(p.ostype, None, msg="probed and returned something") tuple_string = p.as_tuple_string() round_trip = HostType.from_tuple_string(tuple_string) self.assertEqual(round_trip, p) def test_rendering_of_none(self) -> None: p = HostType(ostype="foo") self.assertEqual(p.as_tuple_string(), "foo-none-none") def test_is_methods(self) -> None: p = HostType(ostype="windows") self.assertTrue(p.is_windows()) self.assertFalse(p.is_darwin()) self.assertFalse(p.is_linux()) p = HostType(ostype="darwin") self.assertFalse(p.is_windows()) self.assertTrue(p.is_darwin()) self.assertFalse(p.is_linux()) p = HostType(ostype="linux") self.assertFalse(p.is_windows()) self.assertFalse(p.is_darwin()) self.assertTrue(p.is_linux())
Python
hhvm/build/fbcode_builder/getdeps/test/scratch_test.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import unittest from ..buildopts import find_existing_win32_subst_for_path class Win32SubstTest(unittest.TestCase): def test_no_existing_subst(self) -> None: self.assertIsNone( find_existing_win32_subst_for_path( r"C:\users\alice\appdata\local\temp\fbcode_builder_getdeps", subst_mapping={}, ) ) self.assertIsNone( find_existing_win32_subst_for_path( r"C:\users\alice\appdata\local\temp\fbcode_builder_getdeps", subst_mapping={"X:\\": r"C:\users\alice\appdata\local\temp\other"}, ) ) def test_exact_match_returns_drive_path(self) -> None: self.assertEqual( find_existing_win32_subst_for_path( r"C:\temp\fbcode_builder_getdeps", subst_mapping={"X:\\": r"C:\temp\fbcode_builder_getdeps"}, ), "X:\\", ) self.assertEqual( find_existing_win32_subst_for_path( r"C:/temp/fbcode_builder_getdeps", subst_mapping={"X:\\": r"C:/temp/fbcode_builder_getdeps"}, ), "X:\\", ) def test_multiple_exact_matches_returns_arbitrary_drive_path(self) -> None: self.assertIn( find_existing_win32_subst_for_path( r"C:\temp\fbcode_builder_getdeps", subst_mapping={ "X:\\": r"C:\temp\fbcode_builder_getdeps", "Y:\\": r"C:\temp\fbcode_builder_getdeps", "Z:\\": r"C:\temp\fbcode_builder_getdeps", }, ), ("X:\\", "Y:\\", "Z:\\"), ) def test_drive_letter_is_case_insensitive(self) -> None: self.assertEqual( find_existing_win32_subst_for_path( r"C:\temp\fbcode_builder_getdeps", subst_mapping={"X:\\": r"c:\temp\fbcode_builder_getdeps"}, ), "X:\\", ) def test_path_components_are_case_insensitive(self) -> None: self.assertEqual( find_existing_win32_subst_for_path( r"C:\TEMP\FBCODE_builder_getdeps", subst_mapping={"X:\\": r"C:\temp\fbcode_builder_getdeps"}, ), "X:\\", ) self.assertEqual( find_existing_win32_subst_for_path( r"C:\temp\fbcode_builder_getdeps", subst_mapping={"X:\\": r"C:\TEMP\FBCODE_builder_getdeps"}, ), "X:\\", )
hhvm/build/fbcode_builder/manifests/airstore
[manifest] name = airstore fbsource_path = fbcode/fair_infra/data/airstore/ shipit_project = AIRStore shipit_fbcode_builder = true [git] repo_url = https://github.com/fairinternal/AIRStore.git [build.os=linux] builder = cmake [build.not(os=linux)] # We only support Linux builder = nop [dependencies] boost libcurl fizz fmt folly googletest libsodium libevent double-conversion proxygen wangle zstd zlib xz [shipit.pathmap] fbcode/fair_infra/data/airstore = . fbcode/deeplearning/projects/fairstore/cpp = deeplearning/projects/fairstore/cpp fbcode/proxygen/lib/utils = proxygen/lib/utils [shipit.strip]
hhvm/build/fbcode_builder/manifests/autoconf
[manifest] name = autoconf [debs] autoconf [homebrew] autoconf [rpms] autoconf [download] url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz sha256 = 954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969 [build] builder = autoconf subdir = autoconf-2.69
hhvm/build/fbcode_builder/manifests/automake
[manifest] name = automake [homebrew] automake [debs] automake [rpms] automake [download] url = http://ftp.gnu.org/gnu/automake/automake-1.16.1.tar.gz sha256 = 608a97523f97db32f1f5d5615c98ca69326ced2054c9f82e65bade7fc4c9dea8 [build] builder = autoconf subdir = automake-1.16.1 [dependencies] autoconf
hhvm/build/fbcode_builder/manifests/benchmark
[manifest] name = benchmark [download] url = https://github.com/google/benchmark/archive/refs/tags/v1.8.0.tar.gz sha256 = ea2e94c24ddf6594d15c711c06ccd4486434d9cf3eca954e2af8a20c88f9f172 [build] builder = cmake subdir = benchmark-1.8.0/ [cmake.defines] BENCHMARK_ENABLE_TESTING=OFF
hhvm/build/fbcode_builder/manifests/blake3
[manifest] name = blake3 [download] url = https://github.com/BLAKE3-team/BLAKE3/archive/refs/tags/1.3.3.tar.gz sha256 = 27d2bc4ee5945ba75434859521042c949463ee7514ff17aaef328e23ef83fec0 [build] builder = cmake subdir = BLAKE3-1.3.3/c patchfile = blake3_CMakeLists_txt.patch
hhvm/build/fbcode_builder/manifests/boost
[manifest] name = boost [download.not(os=windows)] url = https://boostorg.jfrog.io/artifactory/main/release/1.78.0/source/boost_1_78_0.tar.gz sha256 = 94ced8b72956591c4775ae2207a9763d3600b30d9d7446562c552f0a14a63be7 [download.os=windows] url = https://boostorg.jfrog.io/artifactory/main/release/1.78.0/source/boost_1_78_0.zip sha256 = f22143b5528e081123c3c5ed437e92f648fe69748e95fa6e2bd41484e2986cc3 [preinstalled.env] # Here we list the acceptable versions that cmake needs a hint to find BOOST_ROOT_1_69_0 BOOST_ROOT_1_78_0 [debs] libboost-all-dev [homebrew] boost # Boost cmake detection on homebrew adds this as requirement: https://github.com/Homebrew/homebrew-core/issues/67427#issuecomment-754187345 icu4c [rpms.all(distro=centos_stream,distro_vers=8)] boost169 boost169-math boost169-test boost169-fiber boost169-graph boost169-log boost169-openmpi boost169-timer boost169-chrono boost169-locale boost169-thread boost169-atomic boost169-random boost169-static boost169-contract boost169-date-time boost169-iostreams boost169-container boost169-coroutine boost169-filesystem boost169-system boost169-stacktrace boost169-regex boost169-devel boost169-context boost169-python3-devel boost169-type_erasure boost169-wave boost169-python3 boost169-serialization boost169-program-options [rpms.not(all(distro=centos_stream,distro_vers=8))] boost-devel boost-static [build] builder = boost job_weight_mib = 512 [b2.args] --with-atomic --with-chrono --with-container --with-context --with-contract --with-coroutine --with-date_time --with-exception --with-fiber --with-filesystem --with-graph --with-graph_parallel --with-iostreams --with-locale --with-log --with-math --with-mpi --with-program_options --with-python --with-random --with-regex --with-serialization --with-stacktrace --with-system --with-test --with-thread --with-timer --with-type_erasure [bootstrap.args.os=darwin] # Not really gcc, but CI puts a broken clang in the PATH, and saying gcc # here selects the correct one from Xcode. --with-toolset=gcc [b2.args.os=linux] # RHEL hardened gcc is not compatible with PCH # https://bugzilla.redhat.com/show_bug.cgi?id=1806545 pch=off [b2.args.os=darwin] toolset=clang [b2.args.all(os=windows,fb=on)] toolset=msvc-14.2
hhvm/build/fbcode_builder/manifests/bz2
[manifest] name = bz2 [debs] libbz2-dev [homebrew] bzip2 [rpms] bzip2-devel [download] url = https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz sha256 = ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269 [build.not(os=windows)] builder = make subdir = bzip2-1.0.8 [make.build_args.os=linux] # python bz2 support on linux needs dynamic library -f Makefile-libbz2_so [make.install_args] install [build.os=windows] builder = nop
hhvm/build/fbcode_builder/manifests/CLI11
[manifest] name = CLI11 [download] url = https://github.com/CLIUtils/CLI11/archive/v2.0.0.tar.gz sha256 = 2c672f17bf56e8e6223a3bfb74055a946fa7b1ff376510371902adb9cb0ab6a3 [build] builder = cmake subdir = CLI11-2.0.0 [cmake.defines] CLI11_BUILD_TESTS = OFF CLI11_BUILD_EXAMPLES = OFF
hhvm/build/fbcode_builder/manifests/cmake
[manifest] name = cmake [homebrew] cmake # 18.04 cmake is too old [debs.not(all(distro=ubuntu,distro_vers="18.04"))] cmake [rpms] cmake [dependencies] ninja [download.os=windows] url = https://github.com/Kitware/CMake/releases/download/v3.20.2/cmake-3.20.2-windows-x86_64.zip sha256 = 15a49e2ab81c1822d75b1b1a92f7863f58e31f6d6aac1c4103eef2b071be3112 [download.os=darwin] url = https://github.com/Kitware/CMake/releases/download/v3.20.2/cmake-3.20.2-macos-universal.tar.gz sha256 = 0100663380a3bd977b001183cd487412db7aad9de6859927bde97e1e6e44e645 [download.any(os=linux,os=freebsd)] url = https://github.com/Kitware/CMake/releases/download/v3.20.2/cmake-3.20.2.tar.gz sha256 = aecf6ecb975179eb3bb6a4a50cae192d41e92b9372b02300f9e8f1d5f559544e [build.os=windows] builder = nop subdir = cmake-3.20.2-windows-x86_64 [build.os=darwin] builder = nop subdir = cmake-3.20.2-macos-universal [install.files.os=darwin] CMake.app/Contents/bin = bin CMake.app/Contents/share = share [build.any(os=linux,os=freebsd)] builder = cmakebootstrap subdir = cmake-3.20.2 [make.install_args.any(os=linux,os=freebsd)] install
hhvm/build/fbcode_builder/manifests/cpptoml
[manifest] name = cpptoml [homebrew] cpptoml [download] url = https://github.com/chadaustin/cpptoml/archive/refs/tags/v0.1.2.tar.gz sha256 = beda37e94f9746874436c8090c045fd80ae6f8a51f7c668c932a2b110a4fc277 [build] builder = cmake subdir = cpptoml-0.1.2 [cmake.defines.os=freebsd] ENABLE_LIBCXX=NO
hhvm/build/fbcode_builder/manifests/date
[manifest] name = date [download] url = https://github.com/HowardHinnant/date/archive/refs/tags/v3.0.1.tar.gz sha256 = 7a390f200f0ccd207e8cff6757e04817c1a0aec3e327b006b7eb451c57ee3538 [build] builder = cmake subdir = date-3.0.1
hhvm/build/fbcode_builder/manifests/delos_core
[manifest] name = delos_core fbsource_path = fbcode/delos_core shipit_project = delos_core shipit_fbcode_builder = true [git] repo_url = https://github.com/facebookincubator/delos_core.git [build.os=linux] builder = cmake [build.not(os=linux)] builder = nop [dependencies] glog googletest folly fbthrift fb303 re2 [shipit.pathmap] fbcode/delos_core = .
hhvm/build/fbcode_builder/manifests/double-conversion
[manifest] name = double-conversion [download] url = https://github.com/google/double-conversion/archive/v3.1.4.tar.gz sha256 = 95004b65e43fefc6100f337a25da27bb99b9ef8d4071a36a33b5e83eb1f82021 [homebrew] double-conversion [debs] libdouble-conversion-dev [rpms] double-conversion double-conversion-devel [build] builder = cmake subdir = double-conversion-3.1.4
hhvm/build/fbcode_builder/manifests/eden
[manifest] name = eden fbsource_path = fbcode/eden shipit_project = eden shipit_fbcode_builder = true [git] repo_url = https://github.com/facebookexperimental/eden.git [github.actions] run_tests = off [sandcastle] run_tests = off [build] builder = cmake [dependencies] blake3 googletest folly fbthrift fb303 cpptoml rocksdb re2 libgit2 pexpect python-toml python-filelock edencommon [dependencies.fbsource=on] rust # macOS ships with sqlite3, and some of the core system # frameworks require that that version be linked rather # than the one we might build for ourselves here, so we # skip building it on macos. [dependencies.not(os=darwin)] sqlite3 [dependencies.os=darwin] osxfuse [dependencies.not(os=windows)] # TODO: teach getdeps to compile curl on Windows. # Enabling curl on Windows requires us to find a way to compile libcurl with # msvc. libcurl # Added so that OSS doesn't see system "python" which is python 2 on darwin and some linux python [shipit.pathmap.fb=on] # for internal builds that use getdeps fbcode/fb303 = fb303 fbcode/common/rust/fbwhoami = common/rust/fbwhoami fbcode/common/rust/shed = common/rust/shed fbcode/thrift/lib/rust = thrift/lib/rust [shipit.pathmap] # Map hostcaps for now as eden C++ includes its .h. Rust-shed should install it fbcode/common/rust/shed/hostcaps = common/rust/shed/hostcaps fbcode/configerator/structs/scm/hg = configerator/structs/scm/hg fbcode/eden/oss = . fbcode/eden = eden fbcode/tools/lfs = tools/lfs [shipit.pathmap.fb=off] fbcode/eden/fs/public_autocargo = eden/fs fbcode/eden/scm/public_autocargo = eden/scm fbcode/common/rust/shed/hostcaps/public_cargo = common/rust/shed/hostcaps fbcode/configerator/structs/scm/hg/public_autocargo = configerator/structs/scm/hg [shipit.strip] ^fbcode/eden/addons/.*$ ^fbcode/eden/fs/eden-config\.h$ ^fbcode/eden/fs/py/eden/config\.py$ ^fbcode/eden/hg-server/.*$ ^fbcode/eden/mononoke/(?!lfs_protocol) ^fbcode/eden/scm/build/.*$ ^fbcode/eden/scm/lib/third-party/rust/.*/Cargo.toml$ ^fbcode/eden/website/.*$ ^fbcode/eden/.*/\.cargo/.*$ /Cargo\.lock$ \.pyc$ [shipit.strip.fb=off] ^fbcode/common/rust/shed(?!/public_autocargo).*/Cargo\.toml$ ^fbcode/configerator/structs/scm/hg(?!/public_autocargo).*/Cargo\.toml$ ^fbcode/eden/fs(?!/public_autocargo).*/Cargo\.toml$ ^fbcode/eden/scm(?!/public_autocargo|/edenscmnative).*/Cargo\.toml$ ^.*/facebook/.*$ ^.*/fb/.*$ [cmake.defines.all(fb=on,os=windows)] ENABLE_GIT=OFF INSTALL_PYTHON_LIB=ON [cmake.defines.all(not(fb=on),os=windows)] ENABLE_GIT=OFF [cmake.defines.fbsource=on] USE_CARGO_VENDOR=ON [cmake.defines.fb=on] IS_FB_BUILD=ON [depends.environment] EDEN_VERSION_OVERRIDE
hhvm/build/fbcode_builder/manifests/edencommon
[manifest] name = edencommon fbsource_path = fbcode/eden/common shipit_project = edencommon shipit_fbcode_builder = true [git] repo_url = https://github.com/facebookexperimental/edencommon.git [build] builder = cmake [dependencies] fmt folly gflags glog [cmake.defines.test=on] BUILD_TESTS=ON [cmake.defines.test=off] BUILD_TESTS=OFF [shipit.pathmap] fbcode/eden/common = eden/common fbcode/eden/common/oss = . [shipit.strip] @README.facebook@
hhvm/build/fbcode_builder/manifests/exprtk
[manifest] name = exprtk [download] url = https://github.com/ArashPartow/exprtk/archive/refs/tags/0.0.1.tar.gz sha256 = fb72791c88ae3b3426e14fdad630027715682584daf56b973569718c56e33f28 [build.not(os=windows)] builder = nop subdir = exprtk-0.0.1 [install.files] exprtk.hpp = exprtk.hpp [dependencies]
hhvm/build/fbcode_builder/manifests/f4d
[manifest] name = f4d fbsource_path = fbcode/f4d shipit_project = f4d shipit_fbcode_builder = true [git] repo_url = https://github.com/facebookexternal/f4d.git rev = master [build.os=windows] builder = nop [build.not(os=windows)] builder = cmake [dependencies] double-conversion folly glog googletest boost protobuf lzo libicu re2 [shipit.pathmap] fbcode/f4d/public_tld = . fbcode/f4d = f4d
hhvm/build/fbcode_builder/manifests/fatal
[manifest] name = fatal fbsource_path = fbcode/fatal shipit_project = fatal [git] repo_url = https://github.com/facebook/fatal.git [shipit.pathmap] fbcode/fatal = fatal fbcode/fatal/public_tld = . [build] builder = nop subdir = . [install.files] fatal/portability.h = fatal/portability.h fatal/preprocessor.h = fatal/preprocessor.h fatal/container = fatal/container fatal/functional = fatal/functional fatal/math = fatal/math fatal/string = fatal/string fatal/type = fatal/type
hhvm/build/fbcode_builder/manifests/fb303
[manifest] name = fb303 fbsource_path = fbcode/fb303 shipit_project = fb303 shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/fb303.git [cargo] cargo_config_file = source/fb303/thrift/.cargo/config.toml [crate.pathmap] fb303_core = fb303/thrift [build] builder = cmake [dependencies] folly gflags glog fbthrift [cmake.defines.test=on] BUILD_TESTS=ON [cmake.defines.test=off] BUILD_TESTS=OFF [shipit.pathmap] fbcode/fb303/github = . fbcode/fb303/public_autocargo = fb303 fbcode/fb303 = fb303 [shipit.strip] ^fbcode/fb303/(?!public_autocargo).+/Cargo\.toml$
hhvm/build/fbcode_builder/manifests/fb303-source
[manifest] name = fb303-source fbsource_path = fbcode/fb303 shipit_project = fb303 shipit_fbcode_builder = false [git] repo_url = https://github.com/facebook/fb303.git [build] builder = nop [shipit.pathmap] fbcode/fb303/github = . fbcode/fb303/public_autocargo = fb303 fbcode/fb303 = fb303 [shipit.strip] ^fbcode/fb303/(?!public_autocargo).+/Cargo\.toml$
hhvm/build/fbcode_builder/manifests/fboss
[manifest] name = fboss fbsource_path = fbcode/fboss shipit_project = fboss shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/fboss.git [build.os=linux] builder = cmake # fboss files take a lot of RAM to compile. job_weight_mib = 3072 [build.not(os=linux)] builder = nop [dependencies] folly fb303 wangle fizz fmt libsodium googletest zstd fatal fbthrift iproute2 libmnl libusb libcurl libnl libsai re2 python yaml-cpp libyaml CLI11 exprtk nlohmann-json [shipit.pathmap] fbcode/fboss/github = . fbcode/fboss/common = common fbcode/fboss = fboss [sandcastle] run_tests = off
hhvm/build/fbcode_builder/manifests/fbthrift
[manifest] name = fbthrift fbsource_path = fbcode/thrift shipit_project = fbthrift shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/fbthrift.git [cargo] cargo_config_file = source/thrift/lib/rust/.cargo/config.toml [crate.pathmap] fbthrift = thrift/lib/rust [build] builder = cmake job_weight_mib = 2048 [dependencies] fizz fmt folly googletest libsodium python-six wangle zstd mvfst # Thrift also depends on openssl but since the latter requires a platform- # specific configuration we rely on the folly manifest to provide this # dependency to avoid duplication. [dependencies.os=linux] # python doesn't build on Windows yet and this causes python3 shebangs to # expand to a non-portable path on macOS python [shipit.pathmap] fbcode/thrift/public_tld = . fbcode/thrift = thrift [shipit.strip] ^fbcode/thrift/thrift-config\.h$ ^fbcode/thrift/perf/canary.py$ ^fbcode/thrift/perf/loadtest.py$ ^fbcode/thrift/.castle/.*
hhvm/build/fbcode_builder/manifests/fbthrift-source
[manifest] name = fbthrift-source fbsource_path = fbcode/thrift shipit_project = fbthrift shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/fbthrift.git [build] builder = nop [shipit.pathmap] fbcode/thrift/public_tld = . fbcode/thrift = thrift [shipit.strip] ^fbcode/thrift/thrift-config\.h$ ^fbcode/thrift/perf/canary.py$ ^fbcode/thrift/perf/loadtest.py$ ^fbcode/thrift/.castle/.*
hhvm/build/fbcode_builder/manifests/fbzmq
[manifest] name = fbzmq fbsource_path = facebook/fbzmq shipit_project = fbzmq shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/fbzmq.git [build.os=linux] builder = cmake [build.not(os=linux)] # boost.fiber is required and that is not available on macos. # libzmq doesn't currently build on windows. builder = nop [dependencies] boost folly fbthrift googletest libzmq [shipit.pathmap] fbcode/fbzmq = fbzmq fbcode/fbzmq/public_tld = . [shipit.strip]
hhvm/build/fbcode_builder/manifests/fizz
[manifest] name = fizz fbsource_path = fbcode/fizz shipit_project = fizz shipit_fbcode_builder = true [git] repo_url = https://github.com/facebookincubator/fizz.git [build] builder = cmake subdir = fizz [cmake.defines] BUILD_EXAMPLES = OFF [cmake.defines.test=on] BUILD_TESTS = ON [cmake.defines.all(os=windows, test=on)] BUILD_TESTS = OFF [cmake.defines.test=off] BUILD_TESTS = OFF [dependencies] folly libsodium zlib zstd [dependencies.all(test=on, not(os=windows))] googletest [shipit.pathmap] fbcode/fizz/public_tld = . fbcode/fizz = fizz
hhvm/build/fbcode_builder/manifests/fmt
[manifest] name = fmt [download] url = https://github.com/fmtlib/fmt/archive/refs/tags/9.1.0.tar.gz sha256 = 5dea48d1fcddc3ec571ce2058e13910a0d4a6bab4cc09a809d8b1dd1c88ae6f2 [build] builder = cmake subdir = fmt-9.1.0 [cmake.defines] FMT_TEST = OFF FMT_DOC = OFF
hhvm/build/fbcode_builder/manifests/folly
[manifest] name = folly fbsource_path = fbcode/folly shipit_project = folly shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/folly.git [build] builder = cmake job_weight_mib = 1024 [dependencies] gflags glog googletest boost libevent libsodium double-conversion fmt lz4 snappy zstd # no openssl or zlib in the linux case, why? # these are usually installed on the system # and are the easiest system deps to pull in. # In the future we want to be able to express # that a system dep is sufficient in the manifest # for eg: openssl and zlib, but for now we don't # have it. # macOS doesn't expose the openssl api so we need # to build our own. [dependencies.os=darwin] openssl # Windows has neither openssl nor zlib, so we get # to provide both [dependencies.os=windows] openssl zlib # xz depends on autoconf which does not build on # Windows [dependencies.not(os=windows)] xz [shipit.pathmap] fbcode/folly/public_tld = . fbcode/folly = folly [shipit.strip] ^fbcode/folly/folly-config\.h$ ^fbcode/folly/public_tld/build/facebook_.* [cmake.defines] BUILD_SHARED_LIBS=OFF BOOST_LINK_STATIC=ON [cmake.defines.os=freebsd] LIBDWARF_FOUND=NO [cmake.defines.test=on] BUILD_TESTS=ON BUILD_BENCHMARKS=OFF [cmake.defines.test=off] BUILD_TESTS=OFF BUILD_BENCHMARKS=OFF
hhvm/build/fbcode_builder/manifests/gflags
[manifest] name = gflags [download] url = https://github.com/gflags/gflags/archive/v2.2.2.tar.gz sha256 = 34af2f15cf7367513b352bdcd2493ab14ce43692d2dcd9dfc499492966c64dcf [build] builder = cmake subdir = gflags-2.2.2 [cmake.defines] BUILD_SHARED_LIBS = ON BUILD_STATIC_LIBS = ON #BUILD_gflags_nothreads_LIB = OFF BUILD_gflags_LIB = ON [debs] libgflags-dev [rpms] gflags-devel
hhvm/build/fbcode_builder/manifests/git-lfs
[manifest] name = git-lfs [download.os=linux] url = https://github.com/git-lfs/git-lfs/releases/download/v2.9.1/git-lfs-linux-amd64-v2.9.1.tar.gz sha256 = 2a8e60cf51ec45aa0f4332aa0521d60ec75c76e485d13ebaeea915b9d70ea466 [build] builder = nop [install.files] git-lfs = bin/git-lfs
hhvm/build/fbcode_builder/manifests/glog
[manifest] name = glog [download] url = https://github.com/google/glog/archive/v0.5.0.tar.gz sha256 = eede71f28371bf39aa69b45de23b329d37214016e2055269b3b5e7cfd40b59f5 [build] builder = cmake subdir = glog-0.5.0 [dependencies] gflags [cmake.defines] BUILD_SHARED_LIBS=ON BUILD_TESTING=NO WITH_PKGCONFIG=ON [cmake.defines.os=freebsd] HAVE_TR1_UNORDERED_MAP=OFF HAVE_TR1_UNORDERED_SET=OFF [debs] libgoogle-glog-dev [rpms] glog-devel
hhvm/build/fbcode_builder/manifests/gnu-bash
[manifest] name = gnu-bash [download.os=darwin] url = https://ftp.gnu.org/gnu/bash/bash-5.1-rc1.tar.gz sha256 = 0b2684eb1990329d499c96decfe2459f3e150deb915b0a9d03cf1be692b1d6d3 [build.os=darwin] # The buildin FreeBSD bash on OSX is both outdated and incompatible with the # modern GNU bash, so for the sake of being cross-platform friendly this # manifest provides GNU bash. # NOTE: This is the 5.1-rc1 version, which is almost the same as what Homebrew # uses (Homebrew installs 5.0 with the 18 patches that in fact make the 5.1-rc1 # version). builder = autoconf subdir = bash-5.1-rc1 build_in_src_dir = true [build.not(os=darwin)] builder = nop
hhvm/build/fbcode_builder/manifests/gnu-coreutils
[manifest] name = gnu-coreutils [download.os=darwin] url = https://ftp.gnu.org/gnu/coreutils/coreutils-8.32.tar.gz sha256 = d5ab07435a74058ab69a2007e838be4f6a90b5635d812c2e26671e3972fca1b8 [build.os=darwin] # The buildin FreeBSD version incompatible with the GNU one, so for the sake of # being cross-platform friendly this manifest provides the GNU version. builder = autoconf subdir = coreutils-8.32 [build.not(os=darwin)] builder = nop
hhvm/build/fbcode_builder/manifests/gnu-grep
[manifest] name = gnu-grep [download.os=darwin] url = https://ftp.gnu.org/gnu/grep/grep-3.5.tar.gz sha256 = 9897220992a8fd38a80b70731462defa95f7ff2709b235fb54864ddd011141dd [build.os=darwin] # The buildin FreeBSD version incompatible with the GNU one, so for the sake of # being cross-platform friendly this manifest provides the GNU version. builder = autoconf subdir = grep-3.5 [build.not(os=darwin)] builder = nop
hhvm/build/fbcode_builder/manifests/gnu-sed
[manifest] name = gnu-sed [download.os=darwin] url = https://ftp.gnu.org/gnu/sed/sed-4.8.tar.gz sha256 = 53cf3e14c71f3a149f29d13a0da64120b3c1d3334fba39c4af3e520be053982a [build.os=darwin] # The buildin FreeBSD version incompatible with the GNU one, so for the sake of # being cross-platform friendly this manifest provides the GNU version. builder = autoconf subdir = sed-4.8 [build.not(os=darwin)] builder = nop
hhvm/build/fbcode_builder/manifests/googletest
[manifest] name = googletest [download] url = https://github.com/google/googletest/archive/refs/tags/release-1.12.1.tar.gz sha256 = 81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2 [build] builder = cmake subdir = googletest-release-1.12.1 [cmake.defines] # Everything else defaults to the shared runtime, so tell gtest that # it should not use its choice of the static runtime gtest_force_shared_crt=ON [cmake.defines.os=windows] BUILD_SHARED_LIBS=ON # 18.04 googletest is too old [debs.not(all(distro=ubuntu,distro_vers="18.04"))] libgtest-dev libgmock-dev
hhvm/build/fbcode_builder/manifests/googletest_1_8
[manifest] name = googletest_1_8 [download] url = https://github.com/google/googletest/archive/release-1.8.0.tar.gz sha256 = 58a6f4277ca2bc8565222b3bbd58a177609e9c488e8a72649359ba51450db7d8 [build] builder = cmake subdir = googletest-release-1.8.0 [cmake.defines] # Everything else defaults to the shared runtime, so tell gtest that # it should not use its choice of the static runtime gtest_force_shared_crt=ON [cmake.defines.os=windows] BUILD_SHARED_LIBS=ON
hhvm/build/fbcode_builder/manifests/gperf
[manifest] name = gperf [download] url = http://ftp.gnu.org/pub/gnu/gperf/gperf-3.1.tar.gz sha256 = 588546b945bba4b70b6a3a616e80b4ab466e3f33024a352fc2198112cdbb3ae2 [build.not(os=windows)] builder = autoconf subdir = gperf-3.1 [build.os=windows] builder = nop
hhvm/build/fbcode_builder/manifests/iproute2
[manifest] name = iproute2 [download] url = https://mirrors.edge.kernel.org/pub/linux/utils/net/iproute2/iproute2-4.12.0.tar.gz sha256 = 46612a1e2d01bb31932557bccdb1b8618cae9a439dfffc08ef35ed8e197f14ce [build.os=linux] builder = iproute2 subdir = iproute2-4.12.0 [build.not(os=linux)] builder = nop
hhvm/build/fbcode_builder/manifests/jq
[manifest] name = jq [rpms] jq [debs] jq [download.not(os=windows)] url = https://github.com/stedolan/jq/releases/download/jq-1.5/jq-1.5.tar.gz sha256 = c4d2bfec6436341113419debf479d833692cc5cdab7eb0326b5a4d4fbe9f493c [build.not(os=windows)] builder = autoconf subdir = jq-1.5 [build.os=windows] builder = nop [autoconf.args] # This argument turns off some developers tool and it is recommended in jq's # README --disable-maintainer-mode
hhvm/build/fbcode_builder/manifests/katran
[manifest] name = katran fbsource_path = fbcode/katran shipit_project = katran shipit_fbcode_builder = true [git] repo_url = https://github.com/facebookincubator/katran.git [build.not(os=linux)] builder = nop [build.os=linux] builder = cmake subdir = . [cmake.defines.test=on] BUILD_TESTS=ON [cmake.defines.test=off] BUILD_TESTS=OFF [dependencies] folly fizz libbpf libmnl zlib googletest fmt [debs] libssl-dev [shipit.pathmap] fbcode/katran/public_root = . fbcode/katran = katran [shipit.strip] ^fbcode/katran/facebook ^fbcode/katran/OSS_SYNC
hhvm/build/fbcode_builder/manifests/libbpf
[manifest] name = libbpf [download] url = https://github.com/libbpf/libbpf/archive/refs/tags/v0.7.0.tar.gz sha256 = 5083588ce5a3a620e395ee1e596af77b4ec5771ffc71cff2af49dfee38c06361 # BPF only builds on linux, so make it a NOP on other platforms [build.not(os=linux)] builder = nop [build.os=linux] builder = make subdir = libbpf-0.7.0/src [make.build_args] BUILD_STATIC_ONLY=y # libbpf-0.3 requires uapi headers >= 5.8 [make.install_args] install install_uapi_headers BUILD_STATIC_ONLY=y [dependencies] libelf
hhvm/build/fbcode_builder/manifests/libbpf_0_2_0_beta
[manifest] name = libbpf_0_2_0_beta [download] url = https://github.com/libbpf/libbpf/archive/b6dd2f2.tar.gz sha256 = 8db9dca90f5c445ef2362e3c6a00f3d6c4bf36e8782f8e27704109c78e541497 # BPF only builds on linux, so make it a NOP on other platforms [build.not(os=linux)] builder = nop [build.os=linux] builder = make subdir = libbpf-b6dd2f2b7df4d3bd35d64aaf521d9ad18d766f53/src [make.build_args] BUILD_STATIC_ONLY=y # libbpf now requires uapi headers >= 5.8 [make.install_args] install install_uapi_headers BUILD_STATIC_ONLY=y [dependencies] libelf
hhvm/build/fbcode_builder/manifests/libcurl
[manifest] name = libcurl [rpms] libcurl-devel libcurl [debs] libcurl4-openssl-dev [download] url = https://curl.haxx.se/download/curl-7.65.1.tar.gz sha256 = 821aeb78421375f70e55381c9ad2474bf279fc454b791b7e95fc83562951c690 [dependencies] nghttp2 # We use system OpenSSL on Linux (see folly's manifest for details) [dependencies.not(os=linux)] openssl [build.not(os=windows)] builder = autoconf subdir = curl-7.65.1 [autoconf.args] # fboss (which added the libcurl dep) doesn't need ldap so it is disabled here. # if someone in the future wants to add ldap for something else, it won't hurt # fboss. However, that would require adding an ldap manifest. # # For the same reason, we disable libssh2 and libidn2 which aren't really used # but would require adding manifests if we don't disable them. --disable-ldap --without-libssh2 --without-libidn2 [build.os=windows] builder = cmake subdir = curl-7.65.1
hhvm/build/fbcode_builder/manifests/libelf
[manifest] name = libelf [rpms] elfutils-libelf-devel-static [debs] libelf-dev [download] url = https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz sha256 = 591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d # libelf only makes sense on linux, so make it a NOP on other platforms [build.not(os=linux)] builder = nop [build.os=linux] builder = autoconf subdir = libelf-0.8.13
hhvm/build/fbcode_builder/manifests/libevent
[manifest] name = libevent [debs] libevent-dev [homebrew] libevent [rpms] libevent-devel # Note that the CMakeLists.txt file is present only in # git repo and not in the release tarball, so take care # to use the github generated source tarball rather than # the explicitly uploaded source tarball [download] url = https://github.com/libevent/libevent/releases/download/release-2.1.12-stable/libevent-2.1.12-stable.tar.gz sha256 = 92e6de1be9ec176428fd2367677e61ceffc2ee1cb119035037a27d346b0403bb [build] builder = cmake subdir = libevent-2.1.12-stable [cmake.defines] EVENT__DISABLE_TESTS = ON EVENT__DISABLE_BENCHMARK = ON EVENT__DISABLE_SAMPLES = ON EVENT__DISABLE_REGRESS = ON [cmake.defines.shared_libs=on] EVENT__BUILD_SHARED_LIBRARIES = ON [cmake.defines.os=windows] EVENT__LIBRARY_TYPE = STATIC [dependencies.not(any(os=linux, os=freebsd))] openssl
hhvm/build/fbcode_builder/manifests/libffi
[manifest] name = libffi [debs] libffi-dev [homebrew] libffi [rpms] libffi-devel libffi [download] url = https://github.com/libffi/libffi/releases/download/v3.4.2/libffi-3.4.2.tar.gz sha256 = 540fb721619a6aba3bdeef7d940d8e9e0e6d2c193595bc243241b77ff9e93620 [build] builder = autoconf subdir = libffi-3.4.2
hhvm/build/fbcode_builder/manifests/libgit2
[manifest] name = libgit2 [homebrew] libgit2 [rpms] libgit2-devel # Ubuntu 18.04 libgit2 has clash with libcurl4-openssl-dev as it depends on # libcurl4-gnutls-dev. Should be ok from 20.04 again # There is a description at https://github.com/r-hub/sysreqsdb/issues/77 # [debs] # libgit2-dev [download] url = https://github.com/libgit2/libgit2/archive/v0.28.1.tar.gz sha256 = 0ca11048795b0d6338f2e57717370208c2c97ad66c6d5eac0c97a8827d13936b [build] builder = cmake subdir = libgit2-0.28.1 [cmake.defines] # Could turn this on if we also wanted to add a manifest for libssh2 USE_SSH = OFF BUILD_CLAR = OFF # Have to build shared to work around annoying problems with cmake # mis-parsing the frameworks required to link this on macos :-/ BUILD_SHARED_LIBS = ON
hhvm/build/fbcode_builder/manifests/libicu
[manifest] name = libicu [rpms] libicu-devel [debs] libicu-dev [download] url = https://github.com/unicode-org/icu/releases/download/release-68-2/icu4c-68_2-src.tgz sha256 = c79193dee3907a2199b8296a93b52c5cb74332c26f3d167269487680d479d625 [build.not(os=windows)] builder = autoconf subdir = icu/source [build.os=windows] builder = nop
hhvm/build/fbcode_builder/manifests/libmnl
[manifest] name = libmnl [rpms] libmnl-devel # all centos 8 distros are missing this, # but its in fedora so may be back in a later version [rpms.not(all(any(distro=centos_stream,distro=centos),distro_vers=8))] libmnl-static [debs] libmnl-dev [download] url = http://www.netfilter.org/pub/libmnl/libmnl-1.0.4.tar.bz2 sha256 = 171f89699f286a5854b72b91d06e8f8e3683064c5901fb09d954a9ab6f551f81 [build.os=linux] builder = autoconf subdir = libmnl-1.0.4
hhvm/build/fbcode_builder/manifests/libnl
[manifest] name = libnl [rpms] libnl3-devel libnl3 [debs] libnl-3-dev libnl-route-3-dev [download] url = https://www.infradead.org/~tgr/libnl/files/libnl-3.2.25.tar.gz sha256 = 8beb7590674957b931de6b7f81c530b85dc7c1ad8fbda015398bc1e8d1ce8ec5 [build.os=linux] builder = autoconf subdir = libnl-3.2.25
hhvm/build/fbcode_builder/manifests/libsai
[manifest] name = libsai [download] url = https://github.com/opencomputeproject/SAI/archive/v1.12.0.tar.gz sha256 = 1e7f43599baf1dcca122bbbb2baaeb9b20e5632d2ca6aaa61a568d1d58afaa97 [build] builder = nop subdir = SAI-1.12.0 [install.files] inc = include experimental = experimental
hhvm/build/fbcode_builder/manifests/libsodium
[manifest] name = libsodium [debs] libsodium-dev [homebrew] libsodium [rpms] libsodium-devel libsodium-static [download.not(os=windows)] url = https://github.com/jedisct1/libsodium/releases/download/1.0.17/libsodium-1.0.17.tar.gz sha256 = 0cc3dae33e642cc187b5ceb467e0ad0e1b51dcba577de1190e9ffa17766ac2b1 [build.not(os=windows)] builder = autoconf subdir = libsodium-1.0.17 [download.os=windows] url = https://download.libsodium.org/libsodium/releases/old/libsodium-1.0.17-msvc.zip sha256 = f0f32ad8ebd76eee99bb039f843f583f2babca5288a8c26a7261db9694c11467 [build.os=windows] builder = nop [install.files.os=windows] x64/Release/v141/dynamic/libsodium.dll = bin/libsodium.dll x64/Release/v141/dynamic/libsodium.lib = lib/libsodium.lib x64/Release/v141/dynamic/libsodium.exp = lib/libsodium.exp x64/Release/v141/dynamic/libsodium.pdb = lib/libsodium.pdb include = include [autoconf.args]
hhvm/build/fbcode_builder/manifests/libtool
[manifest] name = libtool [homebrew] libtool [rpms] libtool [debs] libtool [download] url = http://ftp.gnu.org/gnu/libtool/libtool-2.4.6.tar.gz sha256 = e3bd4d5d3d025a36c21dd6af7ea818a2afcd4dfc1ea5a17b39d7854bcd0c06e3 [build] builder = autoconf subdir = libtool-2.4.6 [dependencies] automake [autoconf.args] --enable-ltdl-install
hhvm/build/fbcode_builder/manifests/libusb
[manifest] name = libusb [debs] libusb-1.0-0-dev [homebrew] libusb [rpms] libusb-devel libusb [download] url = https://github.com/libusb/libusb/releases/download/v1.0.22/libusb-1.0.22.tar.bz2 sha256 = 75aeb9d59a4fdb800d329a545c2e6799f732362193b465ea198f2aa275518157 [build.os=linux] builder = autoconf subdir = libusb-1.0.22 [autoconf.args] # fboss (which added the libusb dep) doesn't need udev so it is disabled here. # if someone in the future wants to add udev for something else, it won't hurt # fboss. --disable-udev
hhvm/build/fbcode_builder/manifests/libyaml
[manifest] name = libyaml [download] url = http://pyyaml.org/download/libyaml/yaml-0.1.7.tar.gz sha256 = 8088e457264a98ba451a90b8661fcb4f9d6f478f7265d48322a196cec2480729 [build.os=linux] builder = autoconf subdir = yaml-0.1.7 [build.not(os=linux)] builder = nop
hhvm/build/fbcode_builder/manifests/libzmq
[manifest] name = libzmq [debs] libzmq3-dev [homebrew] zeromq [rpms] zeromq-devel zeromq [download] url = https://github.com/zeromq/libzmq/releases/download/v4.3.1/zeromq-4.3.1.tar.gz sha256 = bcbabe1e2c7d0eec4ed612e10b94b112dd5f06fcefa994a0c79a45d835cd21eb [build] builder = autoconf subdir = zeromq-4.3.1 [autoconf.args] [dependencies] autoconf libtool
hhvm/build/fbcode_builder/manifests/lz4
[manifest] name = lz4 [homebrew] lz4 [rpms] lz4-devel # centos 8 and centos_stream 9 are missing this rpm [rpms.not(any(all(distro=centos,distro_vers=8),all(distro=centos_stream,distro_vers=9)))] lz4-static [debs] liblz4-dev [download] url = https://github.com/lz4/lz4/archive/v1.8.3.tar.gz sha256 = 33af5936ac06536805f9745e0b6d61da606a1f8b4cc5c04dd3cbaca3b9b4fc43 [build] builder = cmake subdir = lz4-1.8.3/contrib/cmake_unofficial
hhvm/build/fbcode_builder/manifests/lzo
[manifest] name = lzo [debs] liblzo2-dev [homebrew] lzo [rpms] lzo-devel [download] url = http://www.oberhumer.com/opensource/lzo/download/lzo-2.10.tar.gz sha256 = c0f892943208266f9b6543b3ae308fab6284c5c90e627931446fb49b4221a072 [build.not(os=windows)] builder = autoconf subdir = lzo-2.10 [build.os=windows] builder = nop
hhvm/build/fbcode_builder/manifests/mononoke
[manifest] name = mononoke fbsource_path = fbcode/eden shipit_project = eden shipit_fbcode_builder = true [git] repo_url = https://github.com/facebookexperimental/eden.git [build.not(os=windows)] builder = cargo [build.os=windows] # building Mononoke on windows is not supported builder = nop [cargo] build_doc = true workspace_dir = eden/mononoke [shipit.pathmap] fbcode/configerator/structs/scm/hg = configerator/structs/scm/hg fbcode/configerator/structs/scm/hg/public_autocargo = configerator/structs/scm/hg fbcode/configerator/structs/scm/mononoke/public_autocargo = configerator/structs/scm/mononoke fbcode/configerator/structs/scm/mononoke = configerator/structs/scm/mononoke fbcode/eden/oss = . fbcode/eden = eden fbcode/eden/fs/public_autocargo = eden/fs fbcode/eden/mononoke/public_autocargo = eden/mononoke fbcode/eden/scm/public_autocargo = eden/scm fbcode/tools/lfs = tools/lfs tools/rust/ossconfigs = . [shipit.strip] ^fbcode/configerator/structs/scm/hg(?!/public_autocargo).*/Cargo\.toml$ ^fbcode/configerator/structs/scm/mononoke(?!/public_autocargo).*/Cargo\.toml$ ^fbcode/eden/fs(?!/public_autocargo).*/Cargo\.toml$ ^fbcode/eden/scm/lib/third-party/rust/.*/Cargo\.toml$ ^fbcode/eden/mononoke(?!/public_autocargo).*/Cargo\.toml$ # strip other scm code unrelated to mononoke to prevent triggering unnecessary checks ^fbcode/eden(?!/mononoke|/scm/(lib|public_autocargo))/.*$ ^.*/facebook/.*$ ^.*/fb/.*$ [dependencies] fb303 fbthrift rust-shed [dependencies.fb=on] rust
hhvm/build/fbcode_builder/manifests/mvfst
[manifest] name = mvfst fbsource_path = fbcode/quic shipit_project = mvfst shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/mvfst.git [build] builder = cmake subdir = . [cmake.defines.test=on] BUILD_TESTS = ON [cmake.defines.all(os=windows, test=on)] BUILD_TESTS = OFF [cmake.defines.test=off] BUILD_TESTS = OFF [dependencies] folly fizz [dependencies.all(test=on, not(os=windows))] googletest [shipit.pathmap] fbcode/quic/public_root = . fbcode/quic = quic
hhvm/build/fbcode_builder/manifests/ncurses
[manifest] name = ncurses [debs] libncurses-dev [homebrew] ncurses [rpms] ncurses-devel [download] url = https://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.3.tar.gz sha256 = 97fc51ac2b085d4cde31ef4d2c3122c21abc217e9090a43a30fc5ec21684e059 [build.not(os=windows)] builder = autoconf subdir = ncurses-6.3 [autoconf.args] --without-cxx-binding --without-ada [autoconf.args.os=linux] --enable-shared --with-shared [build.os=windows] builder = nop
hhvm/build/fbcode_builder/manifests/nghttp2
[manifest] name = nghttp2 [rpms] libnghttp2-devel libnghttp2 [debs] libnghttp2-dev [download] url = https://github.com/nghttp2/nghttp2/releases/download/v1.47.0/nghttp2-1.47.0.tar.gz sha256 = 62f50f0e9fc479e48b34e1526df8dd2e94136de4c426b7680048181606832b7c [build] builder = autoconf subdir = nghttp2-1.47.0 [autoconf.args] --enable-lib-only --disable-dependency-tracking
hhvm/build/fbcode_builder/manifests/ninja
[manifest] name = ninja [debs] ninja-build [homebrew] ninja [rpms] ninja-build [download.os=windows] url = https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-win.zip sha256 = bbde850d247d2737c5764c927d1071cbb1f1957dcabda4a130fa8547c12c695f [build.os=windows] builder = nop [install.files.os=windows] ninja.exe = bin/ninja.exe [download.not(os=windows)] url = https://github.com/ninja-build/ninja/archive/v1.10.2.tar.gz sha256 = ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed [build.not(os=windows)] builder = ninja_bootstrap subdir = ninja-1.10.2
hhvm/build/fbcode_builder/manifests/nlohmann-json
[manifest] name = nlohmann-json [download] url = https://github.com/nlohmann/json/archive/refs/tags/v3.10.5.tar.gz sha256 = 5daca6ca216495edf89d167f808d1d03c4a4d929cef7da5e10f135ae1540c7e4 [dependencies] [build] builder = cmake subdir = json-3.10.5
hhvm/build/fbcode_builder/manifests/nmap
[manifest] name = nmap [rpms] nmap [debs] nmap [download.not(os=windows)] url = https://api.github.com/repos/nmap/nmap/tarball/ef8213a36c2e89233c806753a57b5cd473605408 sha256 = eda39e5a8ef4964fac7db16abf91cc11ff568eac0fa2d680b0bfa33b0ed71f4a [build.not(os=windows)] builder = autoconf subdir = nmap-nmap-ef8213a build_in_src_dir = true [build.os=windows] builder = nop [autoconf.args] # Without this option the build was filing to find some third party libraries # that we don't need enable_rdma=no
hhvm/build/fbcode_builder/manifests/openr
[manifest] name = openr fbsource_path = facebook/openr shipit_project = openr shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/openr.git [build.os=linux] builder = cmake # openr files take a lot of RAM to compile. job_weight_mib = 3072 [build.not(os=linux)] # boost.fiber is required and that is not available on macos. builder = nop [dependencies] boost fb303 fbthrift folly googletest re2 range-v3 [cmake.defines.test=on] BUILD_TESTS=ON ADD_ROOT_TESTS=OFF [cmake.defines.test=off] BUILD_TESTS=OFF [shipit.pathmap] fbcode/openr = openr fbcode/openr/public_tld = .
hhvm/build/fbcode_builder/manifests/openssl
[manifest] name = openssl [debs] libssl-dev [homebrew] [email protected] # on homebrew need the matching curl and ca- [rpms] openssl openssl-devel openssl-libs [download] url = https://www.openssl.org/source/openssl-1.1.1l.tar.gz sha256 = 0b7a3e5e59c34827fe0c3a74b7ec8baef302b98fa80088d7f9153aa16fa76bd1 # We use the system openssl on linux [build.not(any(os=linux, os=freebsd))] builder = openssl subdir = openssl-1.1.1l [dependencies.os=windows] perl
hhvm/build/fbcode_builder/manifests/osxfuse
[manifest] name = osxfuse [download] url = https://github.com/osxfuse/osxfuse/archive/osxfuse-3.8.3.tar.gz sha256 = 93bab6731bdfe8dc1ef069483437270ce7fe5a370f933d40d8d0ef09ba846c0c [build] builder = nop [install.files] osxfuse-osxfuse-3.8.3/common = include
hhvm/build/fbcode_builder/manifests/patchelf
[manifest] name = patchelf [rpms] patchelf [debs] patchelf [download] url = https://github.com/NixOS/patchelf/archive/0.10.tar.gz sha256 = b3cb6bdedcef5607ce34a350cf0b182eb979f8f7bc31eae55a93a70a3f020d13 [build] builder = autoconf subdir = patchelf-0.10
hhvm/build/fbcode_builder/manifests/pcre
[manifest] name = pcre [homebrew] pcre [rpms] pcre-devel pcre-static [debs] libpcre3-dev [download] url = https://versaweb.dl.sourceforge.net/project/pcre/pcre/8.43/pcre-8.43.tar.gz sha256 = 0b8e7465dc5e98c757cc3650a20a7843ee4c3edf50aaf60bb33fd879690d2c73 [build] builder = cmake subdir = pcre-8.43
hhvm/build/fbcode_builder/manifests/pcre2
[manifest] name = pcre2 [homebrew] pcre2 [rpms] pcre2-devel pcre-static [debs] libpcre2-dev [download] url = https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.40/pcre2-10.40.tar.bz2 sha256 = 14e4b83c4783933dc17e964318e6324f7cae1bc75d8f3c79bc6969f00c159d68 [build] builder = cmake subdir = pcre2-10.40
hhvm/build/fbcode_builder/manifests/perl
[manifest] name = perl [download.os=windows] url = http://strawberryperl.com/download/5.28.1.1/strawberry-perl-5.28.1.1-64bit-portable.zip sha256 = 935c95ba096fa11c4e1b5188732e3832d330a2a79e9882ab7ba8460ddbca810d [build.os=windows] builder = nop subdir = perl
hhvm/build/fbcode_builder/manifests/pexpect
[manifest] name = pexpect [download] url = https://files.pythonhosted.org/packages/0e/3e/377007e3f36ec42f1b84ec322ee12141a9e10d808312e5738f52f80a232c/pexpect-4.7.0-py2.py3-none-any.whl sha256 = 2094eefdfcf37a1fdbfb9aa090862c1a4878e5c7e0e7e7088bdb511c558e5cd1 [build] builder = python-wheel [dependencies] python-ptyprocess
hhvm/build/fbcode_builder/manifests/protobuf
[manifest] name = protobuf [rpms] protobuf-devel [debs] libprotobuf-dev [git] repo_url = https://github.com/protocolbuffers/protobuf.git rev = master [build.not(os=windows)] builder = autoconf [build.os=windows] builder = nop
hhvm/build/fbcode_builder/manifests/proxygen
[manifest] name = proxygen fbsource_path = fbcode/proxygen shipit_project = proxygen shipit_fbcode_builder = true [git] repo_url = https://github.com/facebook/proxygen.git [build.os=windows] builder = nop [build] builder = cmake subdir = . job_weight_mib = 3072 [cmake.defines] BUILD_QUIC = ON [cmake.defines.test=on] BUILD_TESTS = ON [cmake.defines.test=off] BUILD_TESTS = OFF [dependencies] zlib gperf folly fizz wangle mvfst [dependencies.test=on] googletest [shipit.pathmap] fbcode/proxygen/public_tld = . fbcode/proxygen = proxygen
hhvm/build/fbcode_builder/manifests/python
[manifest] name = python [homebrew] [email protected] [rpms] python3 python3-devel # sapling needs dataclasses which arrive in 3.7, and the bionic python is 3.6 [debs.all(distro=ubuntu,distro_vers="18.04")] python3.8-dev [debs.not(all(distro=ubuntu,distro_vers="18.04"))] python3-all-dev [download] url = https://www.python.org/ftp/python/3.8.13/Python-3.8.13.tgz sha256 = 903b92d76354366b1d9c4434d0c81643345cef87c1600adfa36095d7b00eede4 [build] builder = autoconf subdir = Python-3.8.13 [autoconf.args] --enable-shared --with-ensurepip=install # python's pkg-config libffi detection is broken # See https://bugs.python.org/issue34823 for clearest description # and pending PR https://github.com/python/cpython/pull/20451 # The documented workaround requires an environment variable derived from # pkg-config to be passed into its configure step [autoconf.envcmd.LDFLAGS] pkg-config --libs-only-L libffi [dependencies] libffi # eden tests expect the python bz2 support bz2 # eden tests expect the python curses support ncurses
hhvm/build/fbcode_builder/manifests/python-click
[manifest] name = python-click [download] url = https://files.pythonhosted.org/packages/d2/3d/fa76db83bf75c4f8d338c2fd15c8d33fdd7ad23a9b5e57eb6c5de26b430e/click-7.1.2-py2.py3-none-any.whl sha256 = dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc [build] builder = python-wheel