docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Write package data to `buf`. Args: data (dict): Data source - must conform to `package_serialise_schema`. buf (file-like object): Destination stream. format_ (`FileFormat`): Format to dump data in. skip_attributes (list of str): List of attributes to not print.
def dump_package_data(data, buf, format_=FileFormat.py, skip_attributes=None): if format_ == FileFormat.txt: raise ValueError("'txt' format not supported for packages.") data_ = dict((k, v) for k, v in data.iteritems() if v is not None) data_ = package_serialise_schema.validate(data_) skip = set(skip_attributes or []) items = [] for key in package_key_order: if key not in skip: value = data_.pop(key, None) if value is not None: items.append((key, value)) # remaining are arbitrary keys for key, value in data_.iteritems(): if key not in skip: items.append((key, value)) dump_func = dump_functions[format_] dump_func(items, buf)
232,028
Create a package maker. Args: name (str): Package name.
def __init__(self, name, data=None, package_cls=None): super(PackageMaker, self).__init__(data) self.name = name self.package_cls = package_cls or Package # set by `make_package` self.installed_variants = [] self.skipped_variants = []
232,047
Convert a dict of Schemas into a Schema. Args: required (bool): Whether to make schema keys optional or required. allow_custom_keys (bool, optional): If True, creates a schema that allows custom items in dicts. modifier (callable): Functor to apply to dict values - it is applied via `Schema.Use`. Returns: A `Schema` object.
def dict_to_schema(schema_dict, required, allow_custom_keys=True, modifier=None): if modifier: modifier = Use(modifier) def _to(value): if isinstance(value, dict): d = {} for k, v in value.iteritems(): if isinstance(k, basestring): k = Required(k) if required else Optional(k) d[k] = _to(v) if allow_custom_keys: d[Optional(basestring)] = modifier or object schema = Schema(d) elif modifier: schema = And(value, modifier) else: schema = value return schema return _to(schema_dict)
232,086
Enter diff mode. Args: context_model (`ContextModel`): Context to diff against. If None, a copy of the current context is used.
def enter_diff_mode(self, context_model=None): assert not self.diff_mode self.diff_mode = True if context_model is None: self.diff_from_source = True self.diff_context_model = self.context_model.copy() else: self.diff_from_source = False self.diff_context_model = context_model self.clear() self.setColumnCount(5) self.refresh()
232,104
Return the string wrapped with the appropriate styling for the message level. The styling will be determined based on the rez configuration. Args: str_ (str): The string to be wrapped. level (str): The message level. Should be one of 'critical', 'error', 'warning', 'info' or 'debug'. Returns: str: The string styled with the appropriate escape sequences.
def _color_level(str_, level): fore_color, back_color, styles = _get_style_from_config(level) return _color(str_, fore_color, back_color, styles)
232,129
Iterate over package families, in no particular order. Note that multiple package families with the same name can be returned. Unlike packages, families later in the searchpath are not hidden by earlier families. Args: paths (list of str, optional): paths to search for package families, defaults to `config.packages_path`. Returns: `PackageFamily` iterator.
def iter_package_families(paths=None): for path in (paths or config.packages_path): repo = package_repository_manager.get_repository(path) for resource in repo.iter_package_families(): yield PackageFamily(resource)
232,159
Get an exact version of a package. Args: name (str): Name of the package, eg 'maya'. version (Version or str): Version of the package, eg '1.0.0' paths (list of str, optional): paths to search for package, defaults to `config.packages_path`. Returns: `Package` object, or None if the package was not found.
def get_package(name, version, paths=None): if isinstance(version, basestring): range_ = VersionRange("==%s" % version) else: range_ = VersionRange.from_version(version, "==") it = iter_packages(name, range_, paths) try: return it.next() except StopIteration: return None
232,161
Create a package given its handle (or serialized dict equivalent) Args: package_handle (`ResourceHandle` or dict): Resource handle, or equivalent serialized dict representation from ResourceHandle.to_dict Returns: `Package`.
def get_package_from_handle(package_handle): if isinstance(package_handle, dict): package_handle = ResourceHandle.from_dict(package_handle) package_resource = package_repository_manager.get_resource_from_handle(package_handle) package = Package(package_resource) return package
232,162
Get a package given a string. Args: txt (str): String such as 'foo', 'bah-1.3'. paths (list of str, optional): paths to search for package, defaults to `config.packages_path`. Returns: `Package` instance, or None if no package was found.
def get_package_from_string(txt, paths=None): o = VersionedObject(txt) return get_package(o.name, o.version, paths=paths)
232,163
Create a developer package. Args: path (str): Path to dir containing package definition file. format (str): Package definition file format, detected if None. Returns: `DeveloperPackage`.
def get_developer_package(path, format=None): from rez.developer_package import DeveloperPackage return DeveloperPackage.from_path(path, format=format)
232,164
Create a package given package data. Args: name (str): Package name. data (dict): Package data. Must conform to `package_maker.package_schema`. Returns: `Package` object.
def create_package(name, data, package_cls=None): from rez.package_maker__ import PackageMaker maker = PackageMaker(name, data, package_cls=package_cls) return maker.get_package()
232,165
Create a variant given its handle (or serialized dict equivalent) Args: variant_handle (`ResourceHandle` or dict): Resource handle, or equivalent serialized dict representation from ResourceHandle.to_dict context (`ResolvedContext`): The context this variant is associated with, if any. Returns: `Variant`.
def get_variant(variant_handle, context=None): if isinstance(variant_handle, dict): variant_handle = ResourceHandle.from_dict(variant_handle) variant_resource = package_repository_manager.get_resource_from_handle(variant_handle) variant = Variant(variant_resource, context=context) return variant
232,166
Get the latest package for a given package name. Args: name (str): Package name. range_ (`VersionRange`): Version range to search within. paths (list of str, optional): paths to search for package families, defaults to `config.packages_path`. error (bool): If True, raise an error if no package is found. Returns: `Package` object, or None if no package is found.
def get_latest_package(name, range_=None, paths=None, error=False): it = iter_packages(name, range_=range_, paths=paths) try: return max(it, key=lambda x: x.version) except ValueError: # empty sequence if error: # FIXME this isn't correct, since the pkg fam may exist but a pkg # in the range does not. raise PackageFamilyNotFoundError("No such package family %r" % name) return None
232,169
Get the latest package found within the given request string. Args: txt (str): Request, eg 'foo-1.2+' paths (list of str, optional): paths to search for package families, defaults to `config.packages_path`. error (bool): If True, raise an error if no package is found. Returns: `Package` object, or None if no package is found.
def get_latest_package_from_string(txt, paths=None, error=False): from rez.utils.formatting import PackageRequest req = PackageRequest(txt) return get_latest_package(name=req.name, range_=req.range_, paths=paths, error=error)
232,170
Print the contents of the package. Args: buf (file-like object): Stream to write to. format_ (`FileFormat`): Format to write in. skip_attributes (list of str): List of attributes to not print. include_release (bool): If True, include release-related attributes, such as 'timestamp' and 'changelog'
def print_info(self, buf=None, format_=FileFormat.yaml, skip_attributes=None, include_release=False): data = self.validated_data().copy() # config is a special case. We only really want to show any config settings # that were in the package.py, not the entire Config contents that get # grafted onto the Package/Variant instance. However Variant has an empy # 'data' dict property, since it forwards data from its parent package. data.pop("config", None) if self.config: if isinstance(self, Package): config_dict = self.data.get("config") else: config_dict = self.parent.data.get("config") data["config"] = config_dict if not include_release: skip_attributes = list(skip_attributes or []) + list(package_release_keys) buf = buf or sys.stdout dump_package_data(data, buf=buf, format_=format_, skip_attributes=skip_attributes)
232,178
Get the requirements of the variant. Args: build_requires (bool): If True, include build requirements. private_build_requires (bool): If True, include private build requirements. Returns: List of `Requirement` objects.
def get_requires(self, build_requires=False, private_build_requires=False): requires = self.requires or [] if build_requires: requires = requires + (self.build_requires or []) if private_build_requires: requires = requires + (self.private_build_requires or []) return requires
232,192
Writes both to given filepath, and tmpdir location. This is to get around the problem with some NFS's where immediately reading a file that has just been written is problematic. Instead, any files that we write, we also write to /tmp, and reads of these files are redirected there. Args: filepath (str): File to write. mode (int): Same mode arg as you would pass to `os.chmod`. Yields: File-like object.
def open_file_for_write(filepath, mode=None): stream = StringIO() yield stream content = stream.getvalue() filepath = os.path.realpath(filepath) tmpdir = tmpdir_manager.mkdtemp() cache_filepath = os.path.join(tmpdir, os.path.basename(filepath)) debug_print("Writing to %s (local cache of %s)", cache_filepath, filepath) with atomic_write(filepath, overwrite=True) as f: f.write(content) if mode is not None: os.chmod(filepath, mode) with open(cache_filepath, 'w') as f: f.write(content) file_cache[filepath] = cache_filepath
232,195
Load data from a file. Note: Any functions from a .py file will be converted to `SourceCode` objects. Args: filepath (str): File to load. format_ (`FileFormat`): Format of file contents. update_data_callback (callable): Used to change data before it is returned or cached. disable_memcache (bool): If True, don't r/w to memcache. Returns: dict.
def load_from_file(filepath, format_=FileFormat.py, update_data_callback=None, disable_memcache=False): filepath = os.path.realpath(filepath) cache_filepath = file_cache.get(filepath) if cache_filepath: # file has been written by this process, read it from /tmp to avoid # potential write-then-read issues over NFS return _load_file(filepath=cache_filepath, format_=format_, update_data_callback=update_data_callback, original_filepath=filepath) elif disable_memcache: return _load_file(filepath=filepath, format_=format_, update_data_callback=update_data_callback) else: return _load_from_file(filepath=filepath, format_=format_, update_data_callback=update_data_callback)
232,196
Load python-formatted data from a stream. Args: stream (file-like object). Returns: dict.
def load_py(stream, filepath=None): with add_sys_paths(config.package_definition_build_python_paths): return _load_py(stream, filepath=filepath)
232,199
Load yaml-formatted data from a stream. Args: stream (file-like object). Returns: dict.
def load_yaml(stream, **kwargs): # if there's an error parsing the yaml, and you pass yaml.load a string, # it will print lines of context, but will print "<string>" instead of a # filename; if you pass a stream, it will print the filename, but no lines # of context. # Get the best of both worlds, by passing it a string, then replacing # "<string>" with the filename if there's an error... content = stream.read() try: return yaml.load(content) or {} except Exception, e: if stream.name and stream.name != '<string>': for mark_name in 'context_mark', 'problem_mark': mark = getattr(e, mark_name, None) if mark is None: continue if getattr(mark, 'name') == '<string>': mark.name = stream.name raise e
232,202
Security mechanism response This method attempts to authenticate, passing a block of SASL data for the security mechanism at the server side. PARAMETERS: response: longstr security response data A block of opaque data passed to the security mechanism. The contents of this data are defined by the SASL security mechanism.
def _x_secure_ok(self, response): args = AMQPWriter() args.write_longstr(response) self._send_method((10, 21), args)
232,214
Print a list of visible tools. Args: pattern (str): Only list tools that match this glob pattern.
def print_tools(self, pattern=None, buf=sys.stdout): seen = set() rows = [] context = self.context if context: data = context.get_tools() conflicts = set(context.get_conflicting_tools().keys()) for _, (variant, tools) in sorted(data.items()): pkg_str = variant.qualified_package_name for tool in tools: if pattern and not fnmatch(tool, pattern): continue if tool in conflicts: label = "(in conflict)" color = critical else: label = '' color = None rows.append([tool, '-', pkg_str, "active context", label, color]) seen.add(tool) for suite in self.suites: for tool, d in suite.get_tools().iteritems(): if tool in seen: continue if pattern and not fnmatch(tool, pattern): continue label = [] color = None path = which(tool) if path: path_ = os.path.join(suite.tools_path, tool) if path != path_: label.append("(hidden by unknown tool '%s')" % path) color = warning variant = d["variant"] if isinstance(variant, set): pkg_str = ", ".join(variant) label.append("(in conflict)") color = critical else: pkg_str = variant.qualified_package_name orig_tool = d["tool_name"] if orig_tool == tool: orig_tool = '-' label = ' '.join(label) source = ("context '%s' in suite '%s'" % (d["context_name"], suite.load_path)) rows.append([tool, orig_tool, pkg_str, source, label, color]) seen.add(tool) _pr = Printer(buf) if not rows: _pr("No matching tools.") return False headers = [["TOOL", "ALIASING", "PACKAGE", "SOURCE", "", None], ["----", "--------", "-------", "------", "", None]] rows = headers + sorted(rows, key=lambda x: x[0].lower()) print_colored_columns(_pr, rows) return True
232,221
Load a developer package. A developer package may for example be a package.yaml or package.py in a user's source directory. Args: path: Directory containing the package definition file, or file path for the package file itself format: which FileFormat to use, or None to check both .py and .yaml Returns: `Package` object.
def from_path(cls, path, format=None): name = None data = None if format is None: formats = (FileFormat.py, FileFormat.yaml) else: formats = (format,) try: mode = os.stat(path).st_mode except (IOError, OSError): raise PackageMetadataError( "Path %r did not exist, or was not accessible" % path) is_dir = stat.S_ISDIR(mode) for name_ in config.plugins.package_repository.filesystem.package_filenames: for format_ in formats: if is_dir: filepath = os.path.join(path, "%s.%s" % (name_, format_.extension)) exists = os.path.isfile(filepath) else: # if format was not specified, verify that it has the # right extension before trying to load if format is None: if os.path.splitext(path)[1] != format_.extension: continue filepath = path exists = True if exists: data = load_from_file(filepath, format_, disable_memcache=True) break if data: name = data.get("name") if name is not None or isinstance(name, basestring): break if data is None: raise PackageMetadataError("No package definition file found at %s" % path) if name is None or not isinstance(name, basestring): raise PackageMetadataError( "Error in %r - missing or non-string field 'name'" % filepath) package = create_package(name, data, package_cls=cls) # preprocessing result = package._get_preprocessed(data) if result: package, data = result package.filepath = filepath # find all includes, this is needed at install time to copy the right # py sourcefiles into the package installation package.includes = set() def visit(d): for k, v in d.iteritems(): if isinstance(v, SourceCode): package.includes |= (v.includes or set()) elif isinstance(v, dict): visit(v) visit(data) package._validate_includes() return package
232,231
Get a list of running instances. Args: context (`ResolvedContext`): Context the process is running in. process_name (str): Name of the process. Returns: List of (`subprocess.Popen`, start-time) 2-tuples, where start_time is the epoch time the process was added.
def running_instances(self, context, process_name): handle = (id(context), process_name) it = self.processes.get(handle, {}).itervalues() entries = [x for x in it if x[0].poll() is None] return entries
232,248
Return the string with non-literal parts formatted. Args: func (callable): Callable that translates a string into a formatted string. Returns: `EscapedString` object.
def formatted(self, func): other = EscapedString.__new__(EscapedString) other.strings = [] for is_literal, value in self.strings: if not is_literal: value = func(value) other.strings.append((is_literal, value)) return other
232,295
Creates an `EnvironmentDict`. Args: override_existing_lists (bool): If True, the first call to append or prepend will override the value in `environ` and effectively act as a setenv operation. If False, pre-existing values will be appended/prepended to as usual.
def __init__(self, manager): self.manager = manager self._var_cache = dict((k, EnvironmentVariable(k, self)) for k in manager.parent_environ.iterkeys())
232,302
Compile and possibly execute rex code. Args: code (str or SourceCode): The python code to compile. filename (str): File to associate with the code, will default to '<string>'. exec_namespace (dict): Namespace to execute the code in. If None, the code is not executed. Returns: Compiled code object.
def compile_code(cls, code, filename=None, exec_namespace=None): if filename is None: if isinstance(code, SourceCode): filename = code.sourcename else: filename = "<string>" # compile try: if isinstance(code, SourceCode): pyc = code.compiled else: pyc = compile(code, filename, 'exec') except SourceCodeError as e: reraise(e, RexError) except Exception as e: stack = traceback.format_exc() raise RexError("Failed to compile %s:\n\n%s" % (filename, stack)) error_class = Exception if config.catch_rex_errors else None # execute if exec_namespace is not None: try: if isinstance(code, SourceCode): code.exec_(globals_=exec_namespace) else: exec pyc in exec_namespace except RexError: raise except SourceCodeError as e: reraise(e, RexError) except error_class as e: stack = traceback.format_exc() raise RexError("Failed to exec %s:\n\n%s" % (filename, stack)) return pyc
232,315
Execute code within the execution context. Args: code (str or SourceCode): Rex code to execute. filename (str): Filename to report if there are syntax errors. isolate (bool): If True, do not affect `self.globals` by executing this code.
def execute_code(self, code, filename=None, isolate=False): def _apply(): self.compile_code(code=code, filename=filename, exec_namespace=self.globals) # we want to execute the code using self.globals - if for no other # reason that self.formatter is pointing at self.globals, so if we # passed in a copy, we would also need to make self.formatter "look" at # the same copy - but we don't want to "pollute" our namespace, because # the same executor may be used to run multiple packages. Therefore, # we save a copy of self.globals before execution, and restore it after # if isolate: saved_globals = dict(self.globals) try: _apply() finally: self.globals.clear() self.globals.update(saved_globals) else: _apply()
232,316
Same as iter_packages in packages.py, but also applies this filter. Args: name (str): Name of the package, eg 'maya'. range_ (VersionRange or str): If provided, limits the versions returned to those in `range_`. paths (list of str, optional): paths to search for packages, defaults to `config.packages_path`. Returns: `Package` iterator.
def iter_packages(self, name, range_=None, paths=None): for package in iter_packages(name, range_, paths): if not self.excludes(package): yield package
232,333
Add a filter to the list. Args: package_filter (`PackageFilter`): Filter to add.
def add_filter(self, package_filter): filters = self.filters + [package_filter] self.filters = sorted(filters, key=lambda x: x.cost)
232,342
Parse a rule from a string. See rezconfig.package_filter for an overview of valid strings. Args: txt (str): String to parse. Returns: `Rule` instance.
def parse_rule(cls, txt): types = {"glob": GlobRule, "regex": RegexRule, "range": RangeRule, "before": TimestampRule, "after": TimestampRule} # parse form 'x(y)' into x, y label, txt = Rule._parse_label(txt) if label is None: if '*' in txt: label = "glob" else: label = "range" elif label not in types: raise ConfigurationError( "'%s' is not a valid package filter type" % label) rule_cls = types[label] txt_ = "%s(%s)" % (label, txt) try: rule = rule_cls._parse(txt_) except Exception as e: raise ConfigurationError("Error parsing package filter '%s': %s: %s" % (txt_, e.__class__.__name__, str(e))) return rule
232,349
Create a regex rule. Args: s (str): Regex pattern. Eg '.*\\.beta$'.
def __init__(self, s): self.txt = s self._family = self._extract_family(s) self.regex = re.compile(s)
232,352
Create a timestamp rule. Args: timestamp (int): Epoch time. family (str): Package family to apply the rule to. reverse (bool): If True, reverse the logic so that packages released *after* the timestamp are matched.
def __init__(self, timestamp, family=None, reverse=False): self.timestamp = timestamp self.reverse = reverse self._family = family
232,356
Create an executable script. Args: filepath (str): File to create. body (str or callable): Contents of the script. If a callable, its code is used as the script body. program (str): Name of program to launch the script, 'python' if None
def create_executable_script(filepath, body, program=None): program = program or "python" if callable(body): from rez.utils.sourcecode import SourceCode code = SourceCode(func=body) body = code.source if not body.endswith('\n'): body += '\n' with open(filepath, 'w') as f: # TODO: make cross platform f.write("#!/usr/bin/env %s\n" % program) f.write(body) # TODO: Although Windows supports os.chmod you can only set the readonly # flag. Setting the file readonly breaks the unit tests that expect to # clean up the files once the test has run. Temporarily we don't bother # setting the permissions, but this will need to change. if os.name == "posix": os.chmod(filepath, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
232,384
Create a PackageHelp object. Args: package_name (str): Package to search. version_range (`VersionRange`): Versions to search.
def __init__(self, package_name, version_range=None, paths=None, verbose=False): self.package = None self._verbose = verbose self._sections = [] # find latest package with a help entry package = None it = iter_packages(package_name, range_=version_range) packages = sorted(it, key=lambda x: x.version, reverse=True) for package_ in packages: if self._verbose: print "searching for help in %s..." % package_.uri if package_.help: package = package_ break if package: help_ = package.help if isinstance(help_, basestring): sections = [["Help", help_]] elif isinstance(help_, list): sections = help_ if self._verbose: print "found %d help entries in %s." % (len(sections), package.uri) # create string formatter for help entries if package.num_variants == 0: base = package.base root = base else: variant = package.get_variant(0) base = variant.base root = variant.root formatter = scoped_formatter( base=base, root=root, config=config, version=VersionBinding(package.version), system=system) # format sections for section in sections: uri = section[1] uri = convert_old_command_expansions(uri) uri = uri.replace("$BROWSER", "").strip() uri = formatter.format(uri) section[1] = uri self.package = package self._sections = sections
232,393
Create a standalone, in-memory repository. Using this function bypasses the `package_repository_manager` singleton. This is usually desired however, since in-memory repositories are for temporarily storing programmatically created packages, which we do not want to cache and that do not persist. Args: repository_data (dict): Repository data, see class docstring. Returns: `MemoryPackageRepository` object.
def create_repository(cls, repository_data): location = "memory{%s}" % hex(id(repository_data)) resource_pool = ResourcePool(cache_size=None) repo = MemoryPackageRepository(location, resource_pool) repo.data = repository_data return repo
232,440
Create an in-memory package repository. Args: location (str): Path containing the package repository.
def __init__(self, location, resource_pool): super(MemoryPackageRepository, self).__init__(location, resource_pool) self.data = {} self.register_resource(MemoryPackageFamilyResource) self.register_resource(MemoryPackageResource) self.register_resource(MemoryVariantResource)
232,441
Create a widget containing an aligned set of widgets. Args: widgets (list of `QWidget`). horizontal (bool). align (str): One of: - 'left', 'right' (horizontal); - 'top', 'bottom' (vertical) parent_widget (`QWidget`): Owner widget, QWidget is created if this is not provided. Returns: `QWidget`
def create_pane(widgets, horizontal, parent_widget=None, compact=False, compact_spacing=2): pane = parent_widget or QtGui.QWidget() type_ = QtGui.QHBoxLayout if horizontal else QtGui.QVBoxLayout layout = type_() if compact: layout.setSpacing(compact_spacing) layout.setContentsMargins(compact_spacing, compact_spacing, compact_spacing, compact_spacing) for widget in widgets: stretch = 0 if isinstance(widget, tuple): widget, stretch = widget if isinstance(widget, int): layout.addSpacing(widget) elif widget: layout.addWidget(widget, stretch) else: layout.addStretch() pane.setLayout(layout) return pane
232,447
Create a toolbutton. Args: entries: List of (label, slot) tuples. Returns: `QtGui.QToolBar`.
def create_toolbutton(entries, parent=None): btn = QtGui.QToolButton(parent) menu = QtGui.QMenu() actions = [] for label, slot in entries: action = add_menu_action(menu, label, slot) actions.append(action) btn.setPopupMode(QtGui.QToolButton.MenuButtonPopup) btn.setDefaultAction(actions[0]) btn.setMenu(menu) return btn, actions
232,453
Find packages that are plugins of the given package. Args: package_name (str): Name of the package. paths (list of str): Paths to search for packages, defaults to `config.packages_path`. Returns: list of str: The packages that are plugins of the given package.
def get_plugins(package_name, paths=None): pkg = get_latest_package(package_name, paths=paths, error=True) if not pkg.has_plugins: return [] it = iter_package_families(paths) package_names = set(x.name for x in it) bar = ProgressBar("Searching", len(package_names)) plugin_pkgs = [] for package_name_ in package_names: bar.next() if package_name_ == package_name: continue # not a plugin of itself plugin_pkg = get_latest_package(package_name_, paths=paths) if not plugin_pkg.plugin_for: continue for plugin_for in plugin_pkg.plugin_for: if plugin_for == pkg.name: plugin_pkgs.append(package_name_) bar.finish() return plugin_pkgs
232,479
Search for resources. Args: resources_request (str): Resource to search, glob-style patterns are supported. If None, returns all matching resource types. Returns: 2-tuple: - str: resource type (family, package, variant); - List of `ResourceSearchResult`: Matching resources. Will be in alphabetical order if families, and version ascending for packages or variants.
def search(self, resources_request=None): # Find matching package families name_pattern, version_range = self._parse_request(resources_request) family_names = set( x.name for x in iter_package_families(paths=self.package_paths) if fnmatch.fnmatch(x.name, name_pattern) ) family_names = sorted(family_names) # determine what type of resource we're searching for if self.resource_type: resource_type = self.resource_type elif version_range or len(family_names) == 1: resource_type = "package" else: resource_type = "family" if not family_names: return resource_type, [] # return list of family names (validation is n/a in this case) if resource_type == "family": results = [ResourceSearchResult(x, "family") for x in family_names] return "family", results results = [] # iterate over packages/variants for name in family_names: it = iter_packages(name, version_range, paths=self.package_paths) packages = sorted(it, key=lambda x: x.version) if self.latest and packages: packages = [packages[-1]] for package in packages: # validate and check time (accessing timestamp may cause # validation fail) try: if package.timestamp: if self.after_time and package.timestamp < self.after_time: continue if self.before_time and package.timestamp >= self.before_time: continue if self.validate: package.validate_data() except ResourceContentError as e: if resource_type == "package": result = ResourceSearchResult(package, "package", str(e)) results.append(result) continue if resource_type == "package": result = ResourceSearchResult(package, "package") results.append(result) continue # iterate variants try: for variant in package.iter_variants(): if self.validate: try: variant.validate_data() except ResourceContentError as e: result = ResourceSearchResult( variant, "variant", str(e)) results.append(result) continue result = ResourceSearchResult(variant, "variant") results.append(result) except ResourceContentError: # this may happen if 'variants' in package is malformed continue return resource_type, results
232,482
Print formatted search results. Args: search_results (list of `ResourceSearchResult`): Search to format.
def print_search_results(self, search_results, buf=sys.stdout): formatted_lines = self.format_search_results(search_results) pr = Printer(buf) for txt, style in formatted_lines: pr(txt, style)
232,485
Format search results. Args: search_results (list of `ResourceSearchResult`): Search to format. Returns: List of 2-tuple: Text and color to print in.
def format_search_results(self, search_results): formatted_lines = [] for search_result in search_results: lines = self._format_search_result(search_result) formatted_lines.extend(lines) return formatted_lines
232,486
Create a memcached client. Args: servers (str or list of str): Server URI(s), eg '127.0.0.1:11211'. debug (bool): If True, quasi human readable keys are used. This helps debugging - run 'memcached -vv' in the foreground to see the keys being get/set/stored.
def __init__(self, servers, debug=False): self.servers = [servers] if isinstance(servers, basestring) else servers self.key_hasher = self._debug_key_hash if debug else self._key_hash self._client = None self.debug = debug self.current = ''
232,544
Drop existing entries from the cache. Args: hard (bool): If True, all current entries are flushed from the server(s), which affects all users. If False, only the local process is affected.
def flush(self, hard=False): if not self.servers: return if hard: self.client.flush_all() self.reset_stats() else: from uuid import uuid4 tag = uuid4().hex if self.debug: tag = "flushed" + tag self.current = tag
232,550
Create a reorderer. Args: order_dict (dict of (str, `PackageOrder`): Orderers to apply to each package family. default_order (`PackageOrder`): Orderer to apply to any packages not specified in `order_dict`.
def __init__(self, order_dict, default_order=None): self.order_dict = order_dict.copy() self.default_order = default_order
232,588
Create a reorderer. Args: timestamp (int): Epoch time of timestamp. Packages before this time are preferred. rank (int): If non-zero, allow version changes at this rank or above past the timestamp.
def __init__(self, timestamp, rank=0): self.timestamp = timestamp self.rank = rank
232,594
Find an executable. Args: name (str): Program name. check_syspaths (bool): If True, check the standard system paths as well, if program was not found on current $PATH. Returns: str: Full filepath of executable.
def find_executable(cls, name, check_syspaths=False): exe = which(name) if not exe and check_syspaths: paths = cls.get_syspaths() env = os.environ.copy() env["PATH"] = os.pathsep.join(paths) exe = which(name, env=env) if not exe: raise RuntimeError("Couldn't find executable '%s'." % name) return exe
232,626
Create a package variant. Args: variant (`Variant`): Package variant. building (bool): True if a build is occurring.
def __init__(self, variant, building): self.variant = variant self.building = building
232,657
Get a list of variants that intersect with the given range. Args: range_ (`VersionRange`): Package version range. Returns: List of `_PackageEntry` objects.
def get_intersection(self, range_): result = [] for entry in self.entries: package, value = entry if value is None: continue # package was blocked by package filters if package.version not in range_: continue if isinstance(value, list): variants = value entry_ = _PackageEntry(package, variants, self.solver) result.append(entry_) continue # apply package filter if self.solver.package_filter: rule = self.solver.package_filter.excludes(package) if rule: if config.debug_package_exclusions: print_debug("Package '%s' was excluded by rule '%s'" % (package.qualified_name, str(rule))) entry[1] = None continue # expand package entry into list of variants if self.solver.package_load_callback: self.solver.package_load_callback(package) variants_ = [] for var in package.iter_variants(): variant = PackageVariant(var, self.solver.building) variants_.append(variant) entry[1] = variants_ entry_ = _PackageEntry(package, variants_, self.solver) result.append(entry_) return result or None
232,665
Get a list of variants from the cache. Args: package_name (str): Name of package. range_ (`VersionRange`): Package version range. Returns: `_PackageVariantSlice` object.
def get_variant_slice(self, package_name, range_): variant_list = self.variant_lists.get(package_name) if variant_list is None: variant_list = _PackageVariantList(package_name, self.solver) self.variant_lists[package_name] = variant_list entries = variant_list.get_intersection(range_) if not entries: return None slice_ = _PackageVariantSlice(package_name, entries=entries, solver=self.solver) return slice_
232,681
Get packages involved in a failure. Args: failure_index: See `failure_reason`. Returns: A list of Requirement objects.
def failure_packages(self, failure_index=None): phase, _ = self._get_failed_phase(failure_index) fr = phase.failure_reason return fr.involved_requirements() if fr else None
232,709
Returns a graph showing a solve failure. Args: failure_index: See `failure_reason` Returns: A pygraph.digraph object.
def get_fail_graph(self, failure_index=None): phase, _ = self._get_failed_phase(failure_index) return phase.get_graph()
232,711
Create a filesystem package repository. Args: location (str): Path containing the package repository.
def __init__(self, location, resource_pool): super(FileSystemPackageRepository, self).__init__(location, resource_pool) global _settings _settings = config.plugins.package_repository.filesystem self.register_resource(FileSystemPackageFamilyResource) self.register_resource(FileSystemPackageResource) self.register_resource(FileSystemVariantResource) self.register_resource(FileSystemCombinedPackageFamilyResource) self.register_resource(FileSystemCombinedPackageResource) self.register_resource(FileSystemCombinedVariantResource) self.get_families = lru_cache(maxsize=None)(self._get_families) self.get_family = lru_cache(maxsize=None)(self._get_family) self.get_packages = lru_cache(maxsize=None)(self._get_packages) self.get_variants = lru_cache(maxsize=None)(self._get_variants) self.get_file = lru_cache(maxsize=None)(self._get_file)
232,746
Temporarily make `path` writable, if possible. Does nothing if: - config setting 'make_package_temporarily_writable' is False; - this can't be done (eg we don't own `path`). Args: path (str): Path to make temporarily writable
def make_path_writable(path): from rez.config import config try: orig_mode = os.stat(path).st_mode new_mode = orig_mode if config.make_package_temporarily_writable and \ not os.access(path, os.W_OK): new_mode = orig_mode | stat.S_IWUSR # make writable if new_mode != orig_mode: os.chmod(path, new_mode) except OSError: # ignore access errors here, and just do nothing. It will be more # intuitive for the calling code to fail on access instead. # orig_mode = None new_mode = None # yield, then reset mode back to original try: yield finally: if new_mode != orig_mode: os.chmod(path, orig_mode)
232,764
Get the longest parent path in `path` that exists. If `path` exists, it is returned. Args: path (str): Path to test topmost_path (str): Do not test this path or above Returns: str: Existing path, or None if no path was found.
def get_existing_path(path, topmost_path=None): prev_path = None if topmost_path: topmost_path = os.path.normpath(topmost_path) while True: if os.path.exists(path): return path path = os.path.dirname(path) if path == prev_path: return None if topmost_path and os.path.normpath(path) == topmost_path: return None prev_path = path
232,765
Check that the found software version is within supplied range. Args: version: Version of the package as a Version object. range_: Allowable version range as a VersionRange object.
def check_version(version, range_=None): if range_ and version not in range_: raise RezBindError("found version %s is not within range %s" % (str(version), str(range_)))
232,808
Find an executable. Args: name: Name of the program, eg 'python'. filepath: Path to executable, a search is performed if None. Returns: Path to the executable if found, otherwise an error is raised.
def find_exe(name, filepath=None): if filepath: if not os.path.exists(filepath): open(filepath) # raise IOError elif not os.path.isfile(filepath): raise RezBindError("not a file: %s" % filepath) else: filepath = which(name) if not filepath: raise RezBindError("could not find executable: %s" % name) return filepath
232,809
Run an executable and get the program version. Args: exepath: Filepath to executable. version_arg: Arg to pass to program, eg "-V". Can also be a list. word_index: Expect the Nth word of output to be the version. version_rank: Cap the version to this many tokens. Returns: `Version` object.
def extract_version(exepath, version_arg, word_index=-1, version_rank=3): if isinstance(version_arg, basestring): version_arg = [version_arg] args = [exepath] + version_arg stdout, stderr, returncode = _run_command(args) if returncode: raise RezBindError("failed to execute %s: %s\n(error code %d)" % (exepath, stderr, returncode)) stdout = stdout.strip().split('\n')[0].strip() log("extracting version from output: '%s'" % stdout) try: strver = stdout.split()[word_index] toks = strver.replace('.', ' ').replace('-', ' ').split() strver = '.'.join(toks[:version_rank]) version = Version(strver) except Exception as e: raise RezBindError("failed to parse version from output '%s': %s" % (stdout, str(e))) log("extracted version: '%s'" % str(version)) return version
232,810
Create a VersionedObject directly from an object name and version. Args: name: Object name string. version: Version object.
def construct(cls, name, version=None): other = VersionedObject(None) other.name_ = name other.version_ = Version() if version is None else version return other
232,820
Create a requirement directly from an object name and VersionRange. Args: name: Object name string. range: VersionRange object. If None, an unversioned requirement is created.
def construct(cls, name, range=None): other = Requirement(None) other.name_ = name other.range_ = VersionRange() if range is None else range return other
232,824
Create a RequirementList. Args: requirements: List of Requirement objects.
def __init__(self, requirements): self.requirements_ = [] self.conflict_ = None self.requirements_dict = {} self.names_ = set() self.conflict_names_ = set() for req in requirements: existing_req = self.requirements_dict.get(req.name) if existing_req is None: self.requirements_dict[req.name] = req else: merged_req = existing_req.merged(req) if merged_req is None: self.conflict_ = (existing_req, req) return else: self.requirements_dict[req.name] = merged_req seen = set() # build optimised list, this intends to match original request order # as closely as possible for req in requirements: if req.name not in seen: seen.add(req.name) req_ = self.requirements_dict[req.name] self.requirements_.append(req_) if req_.conflict: self.conflict_names_.add(req.name) else: self.names_.add(req.name)
232,829
Replacement for pygraph.readwrite.dot.write, which is dog slow. Note: This isn't a general replacement. It will work for the graphs that Rez generates, but there are no guarantees beyond that. Args: g (`pygraph.digraph`): Input graph. Returns: str: Graph in dot format.
def write_dot(g): lines = ["digraph g {"] def attrs_txt(items): if items: txt = ", ".join(('%s="%s"' % (k, str(v).strip('"'))) for k, v in items) return '[' + txt + ']' else: return '' for node in g.nodes(): atxt = attrs_txt(g.node_attributes(node)) txt = "%s %s;" % (node, atxt) lines.append(txt) for e in g.edges(): edge_from, edge_to = e attrs = g.edge_attributes(e) label = str(g.edge_label(e)) if label: attrs.append(("label", label)) atxt = attrs_txt(attrs) txt = "%s -> %s %s;" % (edge_from, edge_to, atxt) lines.append(txt) lines.append("}") return '\n'.join(lines)
232,834
Prune a package graph so it only contains nodes accessible from the given package. Args: graph_str (str): Dot-language graph string. package_name (str): Name of package of interest. Returns: Pruned graph, as a string.
def prune_graph(graph_str, package_name): # find nodes of interest g = read_dot(graph_str) nodes = set() for node, attrs in g.node_attr.iteritems(): attr = [x for x in attrs if x[0] == "label"] if attr: label = attr[0][1] try: req_str = _request_from_label(label) request = PackageRequest(req_str) except PackageRequestError: continue if request.name == package_name: nodes.add(node) if not nodes: raise ValueError("The package %r does not appear in the graph." % package_name) # find nodes upstream from these nodes g_rev = g.reverse() accessible_nodes = set() access = accessibility(g_rev) for node in nodes: nodes_ = access.get(node, []) accessible_nodes |= set(nodes_) # remove inaccessible nodes inaccessible_nodes = set(g.nodes()) - accessible_nodes for node in inaccessible_nodes: g.del_node(node) return write_dot(g)
232,835
Render a graph to an image file. Args: graph_str (str): Dot-language graph string. dest_file (str): Filepath to save the graph to. fmt (str): Format, eg "png", "jpg". image_ratio (float): Image ratio. Returns: String representing format that was written, such as 'png'.
def save_graph(graph_str, dest_file, fmt=None, image_ratio=None): g = pydot.graph_from_dot_data(graph_str) # determine the dest format if fmt is None: fmt = os.path.splitext(dest_file)[1].lower().strip('.') or "png" if hasattr(g, "write_" + fmt): write_fn = getattr(g, "write_" + fmt) else: raise Exception("Unsupported graph format: '%s'" % fmt) if image_ratio: g.set_ratio(str(image_ratio)) write_fn(dest_file) return fmt
232,836
Confirm a flow method Confirms to the peer that a flow command was received and processed. PARAMETERS: active: boolean current flow setting Confirms the setting of the processed flow method: True means the peer will start sending or continue to send content frames; False means it will not.
def _x_flow_ok(self, active): args = AMQPWriter() args.write_bit(active) self._send_method((20, 21), args)
232,901
Open a channel for use This method opens a virtual connection (a channel). RULE: This method MUST NOT be called when the channel is already open. PARAMETERS: out_of_band: shortstr (DEPRECATED) out-of-band settings Configures out-of-band transfers on this channel. The syntax and meaning of this field will be formally defined at a later date.
def _x_open(self): if self.is_open: return args = AMQPWriter() args.write_shortstr('') # out_of_band: deprecated self._send_method((20, 10), args) return self.wait(allowed_methods=[ (20, 11), # Channel.open_ok ])
232,902
Return the installation path for a package (where its payload goes). Args: path (str): Package repository path.
def get_package_install_path(self, path): from rez.package_repository import package_repository_manager pkg_repo = package_repository_manager.get_repository(path) return pkg_repo.get_package_payload_path( package_name=self.package.name, package_version=self.package.version )
232,931
Find the bind module matching the given name. Args: name (str): Name of package to find bind module for. verbose (bool): If True, print extra output. Returns: str: Filepath to bind module .py file, or None if not found.
def find_bind_module(name, verbose=False): bindnames = get_bind_modules(verbose=verbose) bindfile = bindnames.get(name) if bindfile: return bindfile if not verbose: return None # suggest close matches fuzzy_matches = get_close_pkgs(name, bindnames.keys()) if fuzzy_matches: rows = [(x[0], bindnames[x[0]]) for x in fuzzy_matches] print "'%s' not found. Close matches:" % name print '\n'.join(columnise(rows)) else: print "No matches." return None
232,973
Create a release hook. Args: source_path: Path containing source that was released.
def __init__(self, source_path): self.source_path = source_path self.package = get_developer_package(source_path) self.type_settings = self.package.config.plugins.release_hook self.settings = self.type_settings.get(self.name())
232,991
Recursively convert dict and UserDict types. Note that `d` is unchanged. Args: to_class (type): Dict-like type to convert values to, usually UserDict subclass, or dict. from_class (type): Dict-like type to convert values from. If a tuple, multiple types are converted. Returns: Converted data as `to_class` instance.
def convert_dicts(d, to_class=AttrDictWrapper, from_class=dict): d_ = to_class() for key, value in d.iteritems(): if isinstance(value, from_class): d_[key] = convert_dicts(value, to_class=to_class, from_class=from_class) else: d_[key] = value return d_
233,059
Create a package repository. Args: location (str): A string specifying the location of the repository. This could be a filesystem path, or a database uri, etc. resource_pool (`ResourcePool`): The pool used to manage package resources.
def __init__(self, location, resource_pool): self.location = location self.pool = resource_pool
233,083
Get a resource. Attempts to get and return a cached version of the resource if available, otherwise a new resource object is created and returned. Args: resource_key (`str`): Name of the type of `Resources` to find variables: data to identify / store on the resource Returns: `PackageRepositoryResource` instance.
def get_resource(self, resource_key, **variables): handle = self.make_resource_handle(resource_key, **variables) return self.get_resource_from_handle(handle, verify_repo=False)
233,087
Get a resource. Args: resource_handle (`ResourceHandle`): Handle of the resource. Returns: `PackageRepositoryResource` instance.
def get_resource_from_handle(self, resource_handle, verify_repo=True): if verify_repo: # we could fix the handle at this point, but handles should # always be made from repo.make_resource_handle... for now, # at least, error to catch any "incorrect" construction of # handles... if resource_handle.variables.get("repository_type") != self.name(): raise ResourceError("repository_type mismatch - requested %r, " "repository_type is %r" % (resource_handle.variables["repository_type"], self.name())) if resource_handle.variables.get("location") != self.location: raise ResourceError("location mismatch - requested %r, " "repository location is %r " % (resource_handle.variables["location"], self.location)) resource = self.pool.get_resource_from_handle(resource_handle) resource._repository = self return resource
233,088
Get a package repository. Args: path (str): Entry from the 'packages_path' config setting. This may simply be a path (which is managed by the 'filesystem' package repository plugin), or a string in the form "type@location", where 'type' identifies the repository plugin type to use. Returns: `PackageRepository` instance.
def get_repository(self, path): # normalise parts = path.split('@', 1) if len(parts) == 1: parts = ("filesystem", parts[0]) repo_type, location = parts if repo_type == "filesystem": # choice of abspath here vs realpath is deliberate. Realpath gives # canonical path, which can be a problem if two studios are sharing # packages, and have mirrored package paths, but some are actually # different paths, symlinked to look the same. It happened! location = os.path.abspath(location) normalised_path = "%s@%s" % (repo_type, location) return self._get_repository(normalised_path)
233,090
Get a resource. Args: resource_handle (`ResourceHandle`): Handle of the resource. Returns: `PackageRepositoryResource` instance.
def get_resource_from_handle(self, resource_handle): repo_type = resource_handle.get("repository_type") location = resource_handle.get("location") if not (repo_type and location): raise ValueError("PackageRepositoryManager requires " "resource_handle objects to have a " "repository_type and location defined") path = "%s@%s" % (repo_type, location) repo = self.get_repository(path) resource = repo.get_resource_from_handle(resource_handle) return resource
233,093
Get a context. Args: name (str): Name to store the context under. Returns: `ResolvedContext` object.
def context(self, name): data = self._context(name) context = data.get("context") if context: return context assert self.load_path context_path = os.path.join(self.load_path, "contexts", "%s.rxt" % name) context = ResolvedContext.load(context_path) data["context"] = context data["loaded"] = True return context
233,287
Add a context to the suite. Args: name (str): Name to store the context under. context (ResolvedContext): Context to add.
def add_context(self, name, context, prefix_char=None): if name in self.contexts: raise SuiteError("Context already in suite: %r" % name) if not context.success: raise SuiteError("Context is not resolved: %r" % name) self.contexts[name] = dict(name=name, context=context.copy(), tool_aliases={}, hidden_tools=set(), priority=self._next_priority, prefix_char=prefix_char) self._flush_tools()
233,288
Remove a context from the suite. Args: name (str): Name of the context to remove.
def remove_context(self, name): self._context(name) del self.contexts[name] self._flush_tools()
233,290
Set a context's prefix. This will be applied to all wrappers for the tools in this context. For example, a tool called 'foo' would appear as '<prefix>foo' in the suite's bin path. Args: name (str): Name of the context to prefix. prefix (str): Prefix to apply to tools.
def set_context_prefix(self, name, prefix): data = self._context(name) data["prefix"] = prefix self._flush_tools()
233,291
Set a context's suffix. This will be applied to all wrappers for the tools in this context. For example, a tool called 'foo' would appear as 'foo<suffix>' in the suite's bin path. Args: name (str): Name of the context to suffix. suffix (str): Suffix to apply to tools.
def set_context_suffix(self, name, suffix): data = self._context(name) data["suffix"] = suffix self._flush_tools()
233,292
Hide a tool so that it is not exposed in the suite. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to hide.
def hide_tool(self, context_name, tool_name): data = self._context(context_name) hidden_tools = data["hidden_tools"] if tool_name not in hidden_tools: self._validate_tool(context_name, tool_name) hidden_tools.add(tool_name) self._flush_tools()
233,294
Unhide a tool so that it may be exposed in a suite. Note that unhiding a tool doesn't guarantee it can be seen - a tool of the same name from a different context may be overriding it. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to unhide.
def unhide_tool(self, context_name, tool_name): data = self._context(context_name) hidden_tools = data["hidden_tools"] if tool_name in hidden_tools: hidden_tools.remove(tool_name) self._flush_tools()
233,295
Register an alias for a specific tool. Note that a tool alias takes precedence over a context prefix/suffix. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to alias. tool_alias (str): Alias to give the tool.
def alias_tool(self, context_name, tool_name, tool_alias): data = self._context(context_name) aliases = data["tool_aliases"] if tool_name in aliases: raise SuiteError("Tool %r in context %r is already aliased to %r" % (tool_name, context_name, aliases[tool_name])) self._validate_tool(context_name, tool_name) aliases[tool_name] = tool_alias self._flush_tools()
233,296
Deregister an alias for a specific tool. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to unalias.
def unalias_tool(self, context_name, tool_name): data = self._context(context_name) aliases = data["tool_aliases"] if tool_name in aliases: del aliases[tool_name] self._flush_tools()
233,297
Given a visible tool alias, return the full path to the executable. Args: tool_alias (str): Tool alias to search for. Returns: (str): Filepath of executable, or None if the tool is not in the suite. May also return None because this suite has not been saved to disk, so a filepath hasn't yet been established.
def get_tool_filepath(self, tool_alias): tools_dict = self.get_tools() if tool_alias in tools_dict: if self.tools_path is None: return None else: return os.path.join(self.tools_path, tool_alias) else: return None
233,298
Given a visible tool alias, return the name of the context it belongs to. Args: tool_alias (str): Tool alias to search for. Returns: (str): Name of the context that exposes a visible instance of this tool alias, or None if the alias is not available.
def get_tool_context(self, tool_alias): tools_dict = self.get_tools() data = tools_dict.get(tool_alias) if data: return data["context_name"] return None
233,299
Save the suite to disk. Args: path (str): Path to save the suite to. If a suite is already saved at `path`, then it will be overwritten. Otherwise, if `path` exists, an error is raised.
def save(self, path, verbose=False): path = os.path.realpath(path) if os.path.exists(path): if self.load_path and self.load_path == path: if verbose: print "saving over previous suite..." for context_name in self.context_names: self.context(context_name) # load before dir deleted shutil.rmtree(path) else: raise SuiteError("Cannot save, path exists: %r" % path) contexts_path = os.path.join(path, "contexts") os.makedirs(contexts_path) # write suite data data = self.to_dict() filepath = os.path.join(path, "suite.yaml") with open(filepath, "w") as f: f.write(dump_yaml(data)) # write contexts for context_name in self.context_names: context = self.context(context_name) context._set_parent_suite(path, context_name) filepath = self._context_path(context_name, path) if verbose: print "writing %r..." % filepath context.save(filepath) # create alias wrappers tools_path = os.path.join(path, "bin") os.makedirs(tools_path) if verbose: print "creating alias wrappers in %r..." % tools_path tools = self.get_tools() for tool_alias, d in tools.iteritems(): tool_name = d["tool_name"] context_name = d["context_name"] data = self._context(context_name) prefix_char = data.get("prefix_char") if verbose: print ("creating %r -> %r (%s context)..." % (tool_alias, tool_name, context_name)) filepath = os.path.join(tools_path, tool_alias) create_forwarding_script(filepath, module="suite", func_name="_FWD__invoke_suite_tool_alias", context_name=context_name, tool_name=tool_name, prefix_char=prefix_char)
233,303
Print table of tools available in the suite. Args: context_name (str): If provided, only print the tools from this context.
def print_tools(self, buf=sys.stdout, verbose=False, context_name=None): def _get_row(entry): context_name_ = entry["context_name"] tool_alias = entry["tool_alias"] tool_name = entry["tool_name"] properties = [] col = None variant = entry["variant"] if isinstance(variant, set): properties.append("(in conflict)") col = critical if verbose: package = ", ".join(x.qualified_package_name for x in variant) else: v = iter(variant).next() package = "%s (+%d more)" % (v.qualified_package_name, len(variant) - 1) else: package = variant.qualified_package_name if tool_name == tool_alias: tool_name = "-" else: properties.append("(aliased)") if col is None: col = alias_col msg = " ".join(properties) row = [tool_alias, tool_name, package, context_name_, msg] return row, col if context_name: self._context(context_name) # check context exists context_names = [context_name] else: context_names = sorted(self.contexts.iterkeys()) rows = [["TOOL", "ALIASING", "PACKAGE", "CONTEXT", ""], ["----", "--------", "-------", "-------", ""]] colors = [None, None] entries_dict = defaultdict(list) for d in self.get_tools().itervalues(): entries_dict[d["context_name"]].append(d) if verbose: # add hidden entries for d in self.hidden_tools: d_ = d.copy() d_["hidden"] = True entries_dict[d["context_name"]].append(d_) # add conflicting tools for docs in self.tool_conflicts.itervalues(): for d in docs: d_ = d.copy() d_["conflicting"] = True entries_dict[d["context_name"]].append(d_) for i, context_name in enumerate(context_names): entries = entries_dict.get(context_name, []) if entries: if i: rows.append(('', '', '', '', '')) colors.append(None) entries = sorted(entries, key=lambda x: x["tool_alias"].lower()) for entry in entries: row, col = _get_row(entry) if "hidden" in entry: row[-1] = "(hidden)" rows.append(row) colors.append(warning) elif "conflicting" in entry: row[-1] = "(not visible)" rows.append(row) colors.append(warning) else: rows.append(row) colors.append(col) if rows: _pr = Printer(buf) for col, line in zip(colors, columnise(rows)): _pr(line, col) else: _pr("No tools available.")
233,308
Run a pip command. Args: command_args (list of str): Args to pip. Returns: `subprocess.Popen`: Pip process.
def run_pip_command(command_args, pip_version=None, python_version=None): pip_exe, context = find_pip(pip_version, python_version) command = [pip_exe] + list(command_args) if context is None: return popen(command) else: return context.execute_shell(command=command, block=False)
233,321
Create a context containing the specific pip and python. Args: pip_version (str or `Version`): Version of pip to use, or latest if None. python_version (str or `Version`): Python version to use, or latest if None. Returns: `ResolvedContext`: Context containing pip and python.
def create_context(pip_version=None, python_version=None): # determine pip pkg to use for install, and python variants to install on if pip_version: pip_req = "pip-%s" % str(pip_version) else: pip_req = "pip" if python_version: ver = Version(str(python_version)) major_minor_ver = ver.trim(2) py_req = "python-%s" % str(major_minor_ver) else: # use latest major.minor package = get_latest_package("python") if package: major_minor_ver = package.version.trim(2) else: # no python package. We're gonna fail, let's just choose current # python version (and fail at context creation time) major_minor_ver = '.'.join(map(str, sys.version_info[:2])) py_req = "python-%s" % str(major_minor_ver) # use pip + latest python to perform pip download operations request = [pip_req, py_req] with convert_errors(from_=(PackageFamilyNotFoundError, PackageNotFoundError), to=BuildError, msg="Cannot run - pip or python rez " "package is not present"): context = ResolvedContext(request) # print pip package used to perform the install pip_variant = context.get_resolved_package("pip") pip_package = pip_variant.parent print_info("Using %s (%s)" % (pip_package.qualified_name, pip_variant.uri)) return context
233,323
Get packages in the request. Args: include_implicit (bool): If True, implicit packages are appended to the result. Returns: List of `PackageRequest` objects.
def requested_packages(self, include_implicit=False): if include_implicit: return self._package_requests + self.implicit_packages else: return self._package_requests
233,399
Get the resolve graph. Args: as_dot: If True, get the graph as a dot-language string. Otherwise, a pygraph.digraph object is returned. Returns: A string or `pygraph.digraph` object, or None if there is no graph associated with the resolve.
def graph(self, as_dot=False): if not self.has_graph: return None if not as_dot: if self.graph_ is None: # reads either dot format or our compact format self.graph_ = read_graph_from_string(self.graph_string) return self.graph_ if self.graph_string: if self.graph_string.startswith('{'): # compact format self.graph_ = read_graph_from_string(self.graph_string) else: # already in dot format. Note that this will only happen in # old rez contexts where the graph is not stored in the newer # compact format. return self.graph_string return write_dot(self.graph_)
233,404
Print the difference between the resolve of two contexts. Args: other (`ResolvedContext`): Context to compare to. heading: One of: - None: Do not display a heading; - True: Display the filename of each context as a heading, if both contexts have a filepath; - 2-tuple: Use the given two strings as headings - the first is the heading for `self`, the second for `other`.
def print_resolve_diff(self, other, heading=None): d = self.get_resolve_diff(other) if not d: return rows = [] if heading is True and self.load_path and other.load_path: a = os.path.basename(self.load_path) b = os.path.basename(other.load_path) heading = (a, b) if isinstance(heading, tuple): rows.append(list(heading) + [""]) rows.append(('-' * len(heading[0]), '-' * len(heading[1]), "")) newer_packages = d.get("newer_packages", {}) older_packages = d.get("older_packages", {}) added_packages = d.get("added_packages", set()) removed_packages = d.get("removed_packages", set()) if newer_packages: for name, pkgs in newer_packages.iteritems(): this_pkg = pkgs[0] other_pkg = pkgs[-1] diff_str = "(+%d versions)" % (len(pkgs) - 1) rows.append((this_pkg.qualified_name, other_pkg.qualified_name, diff_str)) if older_packages: for name, pkgs in older_packages.iteritems(): this_pkg = pkgs[0] other_pkg = pkgs[-1] diff_str = "(-%d versions)" % (len(pkgs) - 1) rows.append((this_pkg.qualified_name, other_pkg.qualified_name, diff_str)) if added_packages: for pkg in sorted(added_packages, key=lambda x: x.name): rows.append(("-", pkg.qualified_name, "")) if removed_packages: for pkg in sorted(removed_packages, key=lambda x: x.name): rows.append((pkg.qualified_name, "-", "")) print '\n'.join(columnise(rows))
233,412
Get a data key value for each resolved package. Args: key (str): String key of property, eg 'tools'. request_only (bool): If True, only return the key from resolved packages that were also present in the request. Returns: Dict of {pkg-name: (variant, value)}.
def get_key(self, key, request_only=False): values = {} requested_names = [x.name for x in self._package_requests if not x.conflict] for pkg in self.resolved_packages: if (not request_only) or (pkg.name in requested_names): value = getattr(pkg, key) if value is not None: values[pkg.name] = (pkg, value) return values
233,417
Get the variant(s) that provide the named tool. If there are more than one variants, the tool is in conflict, and Rez does not know which variant's tool is actually exposed. Args: tool_name(str): Name of the tool to search for. Returns: Set of `Variant` objects. If no variant provides the tool, an empty set is returned.
def get_tool_variants(self, tool_name): variants = set() tools_dict = self.get_tools(request_only=False) for variant, tools in tools_dict.itervalues(): if tool_name in tools: variants.add(variant) return variants
233,418
Returns tools of the same name provided by more than one package. Args: request_only: If True, only return the key from resolved packages that were also present in the request. Returns: Dict of {tool-name: set([Variant])}.
def get_conflicting_tools(self, request_only=False): from collections import defaultdict tool_sets = defaultdict(set) tools_dict = self.get_tools(request_only=request_only) for variant, tools in tools_dict.itervalues(): for tool in tools: tool_sets[tool].add(variant) conflicts = dict((k, v) for k, v in tool_sets.iteritems() if len(v) > 1) return conflicts
233,419
Get the shell code resulting from intepreting this context. Args: shell (str): Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ (dict): Environment to interpret the context within, defaults to os.environ if None. style (): Style to format shell code in.
def get_shell_code(self, shell=None, parent_environ=None, style=OutputStyle.file): executor = self._create_executor(interpreter=create_shell(shell), parent_environ=parent_environ) if self.load_path and os.path.isfile(self.load_path): executor.env.REZ_RXT_FILE = self.load_path self._execute(executor) return executor.get_output(style)
233,420
Get the list of rex.Action objects resulting from interpreting this context. This is provided mainly for testing purposes. Args: parent_environ Environment to interpret the context within, defaults to os.environ if None. Returns: A list of rex.Action subclass instances.
def get_actions(self, parent_environ=None): interp = Python(target_environ={}, passive=True) executor = self._create_executor(interp, parent_environ) self._execute(executor) return executor.actions
233,421
Apply the context to the current python session. Note that this updates os.environ and possibly sys.path, if `parent_environ` is not provided. Args: parent_environ: Environment to interpret the context within, defaults to os.environ if None.
def apply(self, parent_environ=None): interpreter = Python(target_environ=os.environ) executor = self._create_executor(interpreter, parent_environ) self._execute(executor) interpreter.apply_environ()
233,422